repo_name
stringlengths
5
114
repo_url
stringlengths
24
133
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
directory_id
stringlengths
40
40
branch_name
stringclasses
209 values
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
9.83k
683M
star_events_count
int64
0
22.6k
fork_events_count
int64
0
4.15k
gha_license_id
stringclasses
17 values
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_language
stringclasses
115 values
files
listlengths
1
13.2k
num_files
int64
1
13.2k
CoorFun/RPi-SPAM
https://github.com/CoorFun/RPi-SPAM
659f2475b0731c840a69e44a21a4a761c0d142b1
8729677703703e92084694f5615d57792a525644
99ac88ad6c1939a1ca78cfe43c3daa239f3c4cb7
refs/heads/master
2018-01-07T22:44:27.502745
2017-12-20T10:22:45
2017-12-20T10:22:45
55,935,979
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6634615659713745, "alphanum_fraction": 0.7163461446762085, "avg_line_length": 16.33333396911621, "blob_id": "cfee0da7fcdaadccb38b01b1f351edc0d17ade97", "content_id": "880aa88f576a9926fdf39373219032b651eda239", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 208, "license_type": "no_license", "max_line_length": 40, "num_lines": 12, "path": "/SAKS/beep.py", "repo_name": "CoorFun/RPi-SPAM", "src_encoding": "UTF-8", "text": "from sakshat import SAKSHAT\nimport time,random\n\nSAKS = SAKSHAT()\n\ndelay = 0\nwhile True:\n delay = random.randrange(50,2000)/1000\n SAKS.buzzer.on()\n time.sleep(delay)\n SAKS.buzzer.off()\n time.sleep(delay)\n" }, { "alpha_fraction": 0.7924528121948242, "alphanum_fraction": 0.8032345175743103, "avg_line_length": 91.75, "blob_id": "907cd7fd712b47e1002552bc33cb9585ea5ffbbc", "content_id": "fab21628618634f94378c64f6a15fdd868f059b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 371, "license_type": "no_license", "max_line_length": 262, "num_lines": 4, "path": "/README.md", "repo_name": "CoorFun/RPi-SPAM", "src_encoding": "UTF-8", "text": "# RPi-SPAM\nThis respository is used to share some basic codes of Raspberry Pi project. For example, some basic driver code of peripheral IC like 74HCxx or segments display driver TM16xx is going to be shared here. Also I will put some basic tutorial with codes of RPi here.\n\nFeel free to prensent your question and modification if I made any mistake or unclear comments.\n" }, { "alpha_fraction": 0.640625, "alphanum_fraction": 0.68359375, "avg_line_length": 15.340425491333008, "blob_id": "dc99ca3cf0f8e12703e00aa129b2f22e36fd0461", "content_id": "c16aa47a255ac260ee888a3d9c33b014eb45e1aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 768, "license_type": "no_license", "max_line_length": 60, "num_lines": 47, "path": "/RPI-GPIO/leds.py", "repo_name": "CoorFun/RPi-SPAM", "src_encoding": "UTF-8", "text": "import RPi.GPIO as GPIO\nimport time\n\nGPIO.setmode(GPIO.BCM)\n\nDS = 6\nSHCP = 19\nSTCP = 13\n\ndef init():\n\tGPIO.setup(DS, GPIO.OUT)\n\tGPIO.setup(SHCP, GPIO.OUT)\n\tGPIO.setup(STCP, GPIO.OUT)\n\n\tGPIO.output(DS, GPIO.LOW)\n\tGPIO.output(SHCP, GPIO.LOW)\n\tGPIO.output(STCP, GPIO.LOW)\n\ndef writeBit(data):\n\tGPIO.output(DS, data)\n\n\tGPIO.output(SHCP, GPIO.LOW)\n\tGPIO.output(SHCP, GPIO.HIGH)\n\ndef writeByte(data):\n\tfor i in range (0, 8):\n\t\twriteBit((data>>i) & 0x01)\n\n\tGPIO.output(STCP, GPIO.HIGH)\n\tGPIO.output(STCP, GPIO.LOW)\n\ntry:\n\tinit()\n\twhile True:\n\t\tfor i in [0xfe, 0xfd, 0xfb, 0xf7, 0xef, 0xdf, 0xbf, 0x7f]:\n\t\t\twriteByte(i)\n\t\t\ttime.sleep(0.2)\n\t\twriteByte(0xff)\n\t\ttime.sleep(0.1)\n\t\twriteByte(0x00)\n\t\ttime.sleep(0.1)\n\nexcept KeyboardInterrupt:\n\tpass\n\nwriteByte(0x00)\nGPIO.cleanup()\n" }, { "alpha_fraction": 0.6633465886116028, "alphanum_fraction": 0.7065073251724243, "avg_line_length": 15.921348571777344, "blob_id": "1a83413c906c30bef1bd336db20be2d60cd024a3", "content_id": "5371e4db063228e24b04cb84dd8793e2b7704317", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1506, "license_type": "no_license", "max_line_length": 74, "num_lines": 89, "path": "/RPI-GPIO/digts.py", "repo_name": "CoorFun/RPi-SPAM", "src_encoding": "UTF-8", "text": "import RPi.GPIO as GPIO\nimport time\n\nGPIO.setmode(GPIO.BCM)\n\nSCL = 5\nSDA = 25\n\nGPIO.setup(SCL, GPIO.OUT)\nGPIO.setup(SDA, GPIO.OUT)\n\nGPIO.output(SCL, GPIO.LOW)\nGPIO.output(SDA, GPIO.LOW)\n\nnumbs = [0x3f, 0x06, 0x5b, 0x4f, 0x66, 0x6d, 0x7d, 0x07, 0x7f, 0x6f, 0x77]\n\ndef busDelay():\n\ttime.sleep(0.001)\n\ndef startBus():\n\tGPIO.output(SCL, GPIO.HIGH)\n\tGPIO.output(SDA, GPIO.HIGH)\n\tbusDelay()\n\tGPIO.output(SDA, GPIO.LOW)\n\tbusDelay()\n\tGPIO.output(SCL, GPIO.LOW)\n\tbusDelay()\n\ndef stopBus():\n\tGPIO.output(SCL, GPIO.LOW)\n\tbusDelay()\n\tGPIO.output(SDA, GPIO.LOW)\n\tbusDelay()\n\tGPIO.output(SCL, GPIO.HIGH)\n\tbusDelay()\n\tGPIO.output(SDA, GPIO.HIGH)\n\tbusDelay()\n\ndef writeBit(data):\n\tGPIO.output(SCL, GPIO.LOW)\n\tbusDelay()\n\tGPIO.output(SDA, data)\n\tbusDelay()\n\tGPIO.output(SCL, GPIO.HIGH)\n\tbusDelay()\n\ndef writeByte(pattern):\n\tfor i in range (0,8):\n\t\twriteBit((pattern>>i) & 0x01)\n\n\tGPIO.output(SCL, GPIO.LOW)\n\tbusDelay()\n\n\tGPIO.output(SDA, GPIO.HIGH)\n\tbusDelay()\n\n\tGPIO.output(SCL, GPIO.HIGH)\n\tbusDelay()\n\ndef writeCommand(command):\n\tstartBus()\n\twriteByte(command)\n\tstartBus()\n\ndef writeData(address, data):\n\tstartBus()\n\twriteByte(address)\n\twriteByte(data)\n\tstartBus()\n\ndef timeDisplay(time):\n\twriteCommand(0x44)\n\twriteData(0xc0, numbs[time.tm_min/10])\n\twriteData(0xc1, (numbs[time.tm_min%10])|0x80)\n\twriteData(0xc2, numbs[time.tm_sec/10])\n\twriteData(0xc3, numbs[time.tm_sec%10])\n\twriteCommand(0x8f)\n\ntry:\n\twhile True:\n\n\t\tt = time.gmtime()\n\t\ttimeDisplay(t)\n\nexcept KeyboardInterrupt:\n\tpass\n\nwriteCommand(0x80)\nGPIO.cleanup()\n" }, { "alpha_fraction": 0.617353081703186, "alphanum_fraction": 0.6765294075012207, "avg_line_length": 28.081396102905273, "blob_id": "9aaaaedab434ac545d34eea3dd743b39b08111a2", "content_id": "9c2c5d937dfe9eac485b18056e3ec8d41bf8b1f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2501, "license_type": "no_license", "max_line_length": 67, "num_lines": 86, "path": "/SAKS/fre.py", "repo_name": "CoorFun/RPi-SPAM", "src_encoding": "UTF-8", "text": "from sakshat import SAKSHAT\nimport alsaaudio as aa\nimport wave\nfrom struct import unpack\nimport numpy as np\nimport time\n\nsaks = SAKS = SAKSHAT()\nsaks.ledrow.off()\nsaks.digital_display.off()\n\nspectrum = [1,1,1,3,3,3,2,2]\nmatrix = [0,0,0,0,0,0,0,0]\npower = []\nweighting = [2,8,8,16,16,32,32,64] \n\n# Audio setup\nwavfile = wave.open('/home/pi/rpi-programs/Only.wav','r')\nsample_rate = wavfile.getframerate()\nno_channels = wavfile.getnchannels()\nchunk = 4096 # Use a multiple of 8\n\n# ALSA\noutput = aa.PCM(aa.PCM_PLAYBACK, aa.PCM_NORMAL)\noutput.setchannels(no_channels)\noutput.setrate(sample_rate)\noutput.setformat(aa.PCM_FORMAT_S16_LE)\noutput.setperiodsize(chunk)\n\ndef piff(val):\n return int(2*chunk*val/sample_rate)\n \ndef calculate_levels(data, chunk,sample_rate):\n global matrix\n\n # Convert raw data (ASCII string) to numpy array\n data = unpack(\"%dh\"%(len(data)/2),data)\n data = np.array(data, dtype='h')\n\n # Apply FFT - real data\n fourier=np.fft.rfft(data)\n # Remove last element in array to make it the same size as chunk\n fourier=np.delete(fourier,len(fourier)-1)\n # Find average 'amplitude' for specific frequency ranges in Hz\n power = np.abs(fourier) \n matrix[0]= int(np.mean(power[piff(0) :piff(156):1]))\n matrix[1]= int(np.mean(power[piff(156) :piff(313):1]))\n matrix[2]= int(np.mean(power[piff(313) :piff(625):1]))\n matrix[3]= int(np.mean(power[piff(625) :piff(1250):1]))\n matrix[4]= int(np.mean(power[piff(1250) :piff(2500):1]))\n matrix[5]= int(np.mean(power[piff(2500) :piff(5000):1]))\n matrix[6]= int(np.mean(power[piff(5000) :piff(10000):1]))\n matrix[7]= int(np.mean(power[piff(10000):piff(20000):1]))\n\n # Tidy up column values for the LED matrix\n matrix=np.divide(np.multiply(matrix,weighting),1000000)\n # Set floor at 0 and ceiling at 8 for LED matrix\n matrix=matrix.clip(0,8)\n return matrix\n\n#count = 0\ndata = wavfile.readframes(chunk)\n# Loop while audio data present\nwhile data!='':\n<<<<<<< HEAD\n\toutput.write(data) \n\tmatrix=calculate_levels(data, chunk,sample_rate)\n\tsaks.ledrow.off()\n\tsaks.ledrow.off()\n\n\tfor x in range(0, matrix[2]):\n\t\tsaks.ledrow.on_for_index(x)\n\t\n\t#count = count + 1\n\t#print count\n\tdata = wavfile.readframes(chunk)\n=======\n output.write(data) \n matrix=calculate_levels(data, chunk,sample_rate)\n saks.ledrow.off()\n \n for x in range(0, matrix[2]):\n saks.ledrow.on_for_index(x)\n \n data = wavfile.readframes(chunk)\n>>>>>>> d47bad266decf14bd2bcf3c8bd283a5c36e9ffb5\n" } ]
5
rice-trc/project2.2
https://github.com/rice-trc/project2.2
6c430cf253675fa9916833f2a486b7df2d680c70
0940a48881e76b60ba4525ff10ba45e7583011f6
308989d0612461007303c11e237d9145b209fa75
refs/heads/master
2022-03-11T04:13:20.731030
2019-11-15T17:26:26
2019-11-15T17:26:26
193,583,476
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5835064649581909, "alphanum_fraction": 0.6164935231208801, "avg_line_length": 27.518518447875977, "blob_id": "1c47c7bbca70e9cce69c8fbcd8ccf212011286b6", "content_id": "636dae5727c33c88331a47e486f5c4f9f8ce67e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7710, "license_type": "no_license", "max_line_length": 79, "num_lines": 270, "path": "/benchmark4_reg/discrete_time.py", "repo_name": "rice-trc/project2.2", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nfrom functools import partial\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom scipy import signal\nfrom scipy.io import loadmat\n\nfrom pyvib.common import db, dsample\nfrom pyvib.forcing import multisine, multisine_time\nfrom pyvib.modal import mkc2ss\nfrom pyvib.newmark import Newmark\nfrom pyvib.nlss import NLSS, nlsim2\nfrom pyvib.nonlinear_elements import NLS, Polynomial, Tanhdryfriction\nfrom pyvib.nonlinear_elements_newmark import NLS as nmNLS\nfrom pyvib.nonlinear_elements_newmark import Polynomial as nmPolynomial\nfrom pyvib.nonlinear_elements_newmark import \\\n Tanhdryfriction as nmTanhdryfriction\n\n\"\"\"This script simulates a cantilever beam with attached slider\n\nThe slider is attached to the end; In order to know the slider velocity, needed\nfor output-based identification, the slider is modeled as a small extra mass\nattached to the tip with a spring. The nonlinear damping is then found from the\nextra mass' velocity using a regulized tanh function, ie\n\nfnl = μ*tanh(ẏ/ε)\n\nTo determine the right multisine amplitude, we make a with scan with increasing\namplitudes for one period and one realization. By looking at the first modes\nresonance peak in the FRF, we can roughly correlate the amplitude to stick or\nslip condition. We know the natural frequencies for each extreme from linear\nmodal analysis, ie. either fully stuck or fully sliding.\n\nωₙ = 19.59, 122.17, 143.11 # free\nωₙ = 21.44, 123.34, 344.78 # stuck\n\nWe need at least 2**13(8192) points per period for good identification of the\nlinear system. Even if the system is sampled with more points\n\"\"\"\n\nscan = False\nbenchmark = 4\n\n# define multisine\nf1 = 5\nf2 = 100\nnpp = 2**14\nfs = 700\nif scan:\n R = 1\n P = 2\n Avec = [10, 30, 50, 70, 80, 100, 120]\n Avec = [0.1, 1, 5, 10, 15, 30, 40, 50, 70, 80, 100, 120, 150]\n #Avec = [0.1, 50, 1500, 3000]\n Avec = np.round(np.logspace(1, 4, 20)).astype(int)\n upsamp = 70\n fname = 'scan'\nelse:\n R = 2\n P = 6\n Avec = [700]\n Avec = [700]\n upsamp = 70 #70\n fname = 'ms'\n fname = 'pol'\n\nns = npp*R*P\nt = np.arange(ns)/fs\nfsint = upsamp * fs\nnppint = upsamp * npp\n# add extra period which will be removed due to edge effects\nPfilter = 1\nif upsamp > 1:\n P = Pfilter + P\nnsint = nppint*P*R\ndt = 1/fsint\nNtr = 1\n\n# load system defined in matlab\ndata = loadmat('data/system.mat')\nM = data['M']\nC = data['C']\nK = data['K']\nmuN = data['muN'].item()\neps = data['eps_reg'].item()\nT_tip = data['T_tip'].squeeze().astype(int)\nFex1 = data['Fex1'].squeeze().astype(int)\nw = data['w'].squeeze().astype(int)\nfdof = np.argwhere(Fex1).item()\nnldof = np.argwhere(w).item()\nndof = M.shape[0]\n# Fixed contact and free natural frequencies (rad/s).\nom_fixed = data['om_fixed'].squeeze()\nom_free = data['om_free'].squeeze()\n\neps = 0.1\nwd = [0,0,0,0,0,1]\nnlx = NLS(Tanhdryfriction(eps=eps, w=wd))\n\nmuN = 1e9\nexponent = 3\nwd = [0,0,1,0,0,0]\nwd = w\nnlx = NLS(Polynomial(exponent=exponent, w=wd))\n# nlx = None\nnly = None\nepsf = f'{eps}'.replace('.', '')\n\n# cont time\na, b, c, d = mkc2ss(M, K, C)\nfact = 1\n# include velocity in output\nif len(wd) == 6:\n c = np.vstack((c ,np.hstack((np.zeros((3,3)), np.eye(3))) ))\n d = np.vstack((d, np.zeros((3,3))))\n fact = 2\ncsys = signal.StateSpace(a, b, c, d)\nEc = np.zeros((2*ndof, 1))\nFc = np.zeros((fact*ndof, 0))\nEc[ndof+nldof] = -muN\n\ncmodel = NLSS(csys.A, csys.B, csys.C, csys.D, Ec, Fc)\ncmodel.add_nl(nlx=nlx, nly=nly)\n\n\ndef fex_cont(A, u, t):\n t = np.atleast_1d(t)\n fex = np.zeros((len(t), ndof))\n fex[:, fdof] = A*u(t)\n return fex\n\n\ndef simulate_cont(sys, A, t):\n nt = len(t)\n y = np.empty((R, nt, sys.outputs))\n x = np.empty((R, nt, len(sys.A)))\n u = np.empty((R, nt))\n for r in range(R):\n np.random.seed(r)\n ufunc, lines = multisine_time(f1, f2, N=nhar)\n fexc = partial(fex_cont, A, ufunc)\n\n _, yr, xr = nlsim2(sys, fexc, t=tc)\n y[r] = yr\n x[r] = xr\n u[r] = ufunc(t)\n\n return y.reshape((R*nt, -1)), x.reshape((R*nt, -1)), u, lines\n\n\nnhar = 1000\nf0 = (f2-f1) / nhar\nt2 = P/f0\ntc = np.linspace(0, t2, nppint*P, endpoint=False)\nfsc = f0*nppint\nfreqc = np.arange(nppint)/nppint * fsc\n\n# convert to discrete time\ndsys = csys.to_discrete(dt=dt, method='foh') # tustin\nEd = np.zeros((2*ndof, 1))\nFd = np.zeros((fact*ndof, 0))\n# euler discretization\nEd[ndof+nldof] = -muN*dt\n\ndmodel = NLSS(dsys.A, dsys.B, dsys.C, dsys.D, Ed, Fd, dt=dsys.dt)\ndmodel.add_nl(nlx=nlx, nly=nly)\n\n# newmark\nnls = nmNLS(nmTanhdryfriction(eps=eps, w=w, kt=muN))\nnls = nmNLS(nmPolynomial(exp=exponent, w=w, k=muN))\n\n# nls = None\nsys = Newmark(M, C, K, nls)\nnm = False\n\nnp.random.seed(0)\nud, linesd, freqd = multisine(f1, f2, N=nppint, fs=fsint, R=R, P=P)\nfext = np.zeros((nsint, ndof))\n\nfor A in Avec:\n print(f'Discrete started with ns: {nsint}, A: {A}, R: {R}, P: {P}, '\n f'upsamp: {upsamp}, eps:{eps}')\n # Transient: Add periods before the start of each realization. To generate\n # steady state data.\n T1 = np.r_[npp*Ntr, np.r_[0:(R-1)*P*nppint+1:P*nppint]]\n fext[:, fdof] = A*ud.ravel()\n _, yd, xd = dmodel.simulate(fext, T1=T1)\n yc, xc, uc, linesc = simulate_cont(cmodel, A, tc)\n\n try:\n ynm, ydnm, yddnm = sys.integrate(fext, dt, x0=None, v0=None,\n sensitivity=False)\n Ynm = np.fft.fft(ynm[-nppint:, [fdof, nldof]], axis=0)\n nm = True\n except ValueError as e:\n print(f'Discrete stepping failed with error {e}. For A: {A}')\n\n #if scan:\n # plot frf for forcing and tanh node\n Yd = np.fft.fft(yd[-nppint:, [fdof, nldof]], axis=0)\n Yc = np.fft.fft(yc[-nppint:, [fdof, nldof]], axis=0)\n nfd = Yd.shape[0]//2\n plt.figure()\n plt.plot(freqd[:nfd], db(np.abs(Yd[:nfd])))\n if nm:\n plt.plot(freqd[:nfd], db(np.abs(Ynm[:nfd])))\n nm = False\n plt.plot(freqc[:nfd], db(np.abs(Yc[:nfd])))\n plt.xlim([0, 50])\n plt.ylim(bottom=-150)\n plt.xlabel('Frequency (Hz)')\n plt.ylabel('Amplitude (dB)')\n plt.legend(['d: Force dof', 'd: nl dof', 'nm: Force dof', 'nm: nl dof',\n 'c: Force dof', 'c: nl dof'])\n plt.title(f'A: {A}')\n plt.minorticks_on()\n plt.grid(which='both')\n plt.savefig(f'fig/dc_b{benchmark}_A{A}_eps{epsf}_fft_comp_n{fdof}.png')\n\n# We need to reshape into (npp,m,R,P)\nif len(wd) != 6:\n yd = np.hstack((yd,yd))\n yc = np.hstack((yc,yc))\nys = [ynm, ydnm, yddnm, yd[:,:3], yd[:,3:], yc[:,:3], yc[:,3:]]\nys = [y.reshape(R, P, nppint, ndof).transpose(2, 3, 0, 1) for y in ys]\n\nxs = [xd, xc]\nxs = [x.reshape(R, P, nppint, 2*ndof).transpose(2, 3, 0, 1) for x in xs]\n\nus = [A*ud, uc]\nus = [u.reshape(R, P, nppint, 1).transpose(2, 3, 0, 1) for u in us]\n\nif upsamp: # > 1:\n ys = [dsample(y, upsamp, zero_phase=True) for y in ys]\n xs = [dsample(y, upsamp, zero_phase=True) for y in xs]\n us = [u[::upsamp, :, :, :] for u in us]\n\nfname = f'data/{fname}_A{A}_upsamp{upsamp}_fs{fs}_eps{epsf}.npz'\nnp.savez(fname,\n ynm=ys[0], ydotnm=ys[1], yddotnm=ys[2],\n yd=ys[3], ydotd=ys[4], xd=xs[0], ud=us[0], linesd=linesd,\n yc=ys[5], ydotc=ys[6], xc=xs[1], uc=us[1], linesc=linesc,\n fs=fs, A=A, fsc=f0*npp)\nprint(f'data saved as {fname}')\n\n# plt.figure()\n#plt.plot(t, x, '-k', label=r'$x_1$')\n##plt.plot(t, x, '-r', label=r'$x_2$')\n#plt.xlabel('Time (t)')\n#plt.ylabel('Displacement (m)')\n#plt.title('Force type: {}, periods:{:d}')\n# plt.legend()\n#\n# plt.figure()\n# plt.plot(np.abs(np.fft.fft(x[6*1024:7*1024,0])))\n#\n#\n# x = ufunc(tc)\n# X = np.fft.fft(x)\n# nfd = X.shape[0]//2\n# plt.figure()\n# plt.plot(freq[:nfd], db(np.abs(X[:nfd])))\n# plt.xlabel('Frequency (Hz)')\n# plt.ylabel('Amplitude (dB)')\n\n\n# plt.show()\n" }, { "alpha_fraction": 0.5563710331916809, "alphanum_fraction": 0.6203194260597229, "avg_line_length": 34.89024353027344, "blob_id": "a600deb506423b1bcee3a8f99abe034eaa21c5bc", "content_id": "0af9112956a59a56bb2895a58a67e7a4f84dae44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14718, "license_type": "no_license", "max_line_length": 82, "num_lines": 410, "path": "/benchmark4_reg/test2.py", "repo_name": "rice-trc/project2.2", "src_encoding": "UTF-8", "text": "from collections import namedtuple\nfrom copy import deepcopy\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom scipy.linalg import norm\n\nfrom pyvib.common import db\nfrom pyvib.fnsi import FNSI\nfrom pyvib.forcing import multisine\nfrom pyvib.frf import covariance\nfrom pyvib.nlss import NLSS\nfrom pyvib.nonlinear_elements import Nonlinear_Element, Pnl, Tanhdryfriction\nfrom pyvib.signal import Signal\nfrom pyvib.subspace import Subspace\n\n\"\"\"Identify coefficient of tanh nonlinearity\nfnl = k*tanh(ẏ/eps)\nwhere eps is the regularization parameter specified a priory\n\nDepending on the value of eps, we get a good or bad identification. If we look\nat the BLA plot, we see that the noise level is high for the values of eps that\nresult in poor estimation. For 'good' eps the noise floor is zero.\n\nSo somehow certain values of eps result in data that seems to be disturbed by\nnoise, which I think is a sign of unsteady data.\nThe data is generated as noise-free\n\nfnsi good:\neps = 0.1, 0.0001\n\nfnsi bad:\neps = 0.01\n\"\"\"\n\n# data containers\nData = namedtuple('Data', ['sig', 'uest', 'yest', 'uval', 'yval', 'utest',\n 'ytest', 'um', 'ym', 'covY', 'freq', 'lines',\n 'npp', 'Ntr'])\nResult = namedtuple('Result', ['est_err', 'val_err', 'test_err', 'noise',\n 'errvec', 'descrip'])\np = 2\nweight = False\nadd_noise = False\n## Generate data from true model ##\n# Construct model to estimate\nA = np.array([[0.73915535, -0.62433133], [0.6247377, 0.7364469]])\nB = np.array([[0.79287245], [-0.34515159]])\nC = np.array([[0.71165154, 0.34917771]])\nD = np.array([[0.04498052]])\nif p == 2:\n C = np.vstack((C, C))\n D = np.vstack((D, 0.1563532))\n\nFfull = np.array([\n [-0.00867042, -0.00636662, 0.00197873, -0.00090865, -0.00088879,\n -0.02759694, -0.01817546, -0.10299409, 0.00648549, 0.08990175,\n 0.21129849, 0.00030216, 0.03299013, 0.02058325, -0.09202439,\n -0.0380775],\n [-0.17323214, -0.08738017, -0.11346953, -0.08077963, -0.05496476,\n 0.01874564, -0.02946581, -0.01869213, -0.07492472, 0.06868484,\n -0.02770704, 0.19900055, -0.089364, -0.00410125, 0.13002691,\n -0.11460958]])\nEfull = np.array([\n [1.88130305e-01, -2.70291900e-01, 9.12423046e-03, -5.78088500e-01,\n 9.54588221e-03, 5.08576019e-04, -1.33890850e+00, -2.02171960e+00,\n -4.05918956e-01, -1.37744223e+00, 1.21206232e-01, -9.26349423e-02,\n -5.38072197e-01, 2.34134460e-03, 4.94334690e-02, -1.88329572e-02],\n [-5.35196110e-01, -3.66250013e-01, 2.34622651e-02, 1.43228677e-01,\n -1.35959331e-02, 1.32052696e-02, 7.98717915e-01, 1.35344901e+00,\n -5.29440815e-02, 4.88513652e-01, 7.81285093e-01, -3.41019453e-01,\n 2.27692972e-01, 7.70150211e-02, -1.25046731e-02, -1.62456154e-02]])\n\n# excitation signal\nRMSu = 0.05 # Root mean square value for the input signal\nR = 4 # Number of phase realizations (one for validation and one\n# for testing)\nP = 3 # Number of periods\nkind = 'Full' # 'Full','Odd','SpecialOdd', or 'RandomOdd': kind of multisine\nm = D.shape[1] # number of inputs\np = C.shape[0] # number of outputs\nfs = 1 # normalized sampling rate\n\n\ndef simulate(true_model, npp=1024, Ntr=1, add_noise=False):\n print()\n print(f'Nonlinear parameters:',\n f'{len(true_model.nlx.active) + len(true_model.nly.active)}')\n print(f'Parameters to estimate: {true_model.npar}')\n # set non-active coefficients to zero. Note order of input matters\n idx = np.setdiff1d(np.arange(true_model.E.size), true_model.nlx.active)\n idy = np.setdiff1d(np.arange(true_model.F.size), true_model.nly.active)\n true_model.E.flat[idx] = 0\n true_model.F.flat[idy] = 0\n\n # get predictable random numbers. https://dilbert.com/strip/2001-10-25\n np.random.seed(10)\n # shape of u from multisine: (R,P*npp)\n u, lines, freq = multisine(N=npp, P=P, R=R, lines=kind, rms=RMSu)\n\n # Transient: Add Ntr periods before the start of each realization. To\n # generate steady state data.\n T1 = np.r_[npp*Ntr, np.r_[0:(R-1)*P*npp+1:P*npp]]\n _, yorig, _ = true_model.simulate(u.ravel(), T1=T1)\n u = u.reshape((R, P, npp)).transpose((2, 0, 1))[:, None] # (npp,m,R,P)\n y = yorig.reshape((R, P, npp, p)).transpose((2, 3, 0, 1))\n\n # Add colored noise to the output. randn generate white noise\n if add_noise:\n np.random.seed(10)\n noise = 1e-3*np.std(y[:, -1, -1]) * np.random.randn(*y.shape)\n # Do some filtering to get colored noise\n noise[1:-2] += noise[2:-1]\n y += noise\n\n return {'y': y, 'u': u, 'lines': lines, 'freq': freq}\n\n\ndef partion_data(data, Rest=2, Ntr=1):\n y = data['y']\n u = data['u']\n lines = data['lines']\n freq = data['freq']\n npp, p, R, P = y.shape\n # partitioning the data. Use last period of two last realizations.\n # test for performance testing and val for model selection\n utest = u[:, :, -1, -1]\n ytest = y[:, :, -1, -1]\n uval = u[:, :, -2, -1]\n yval = y[:, :, -2, -1]\n # all other realizations are used for estimation\n uest = u[..., :Rest, :]\n yest = y[..., :Rest, :]\n # noise estimate over periods. This sets the performace limit for the\n # estimated model\n covY = covariance(yest)\n\n # create signal object\n sig = Signal(uest, yest, fs=fs)\n sig.lines = lines\n # plot periodicity for one realization to verify data is steady state\n # sig.periodicity()\n # Calculate BLA, total- and noise distortion. Used for subspace\n # identification\n sig.bla()\n # average signal over periods. Used for training of PNLSS model\n um, ym = sig.average()\n\n return Data(sig, uest, yest, uval, yval, utest, ytest, um, ym, covY,\n freq, lines, npp, Ntr)\n\n\ndef identify_nlss(data, linmodel, nlx, nly, nmax=25, info=2):\n Rest = data.yest.shape[2]\n T1 = np.r_[data.npp*data.Ntr, np.r_[0:(Rest-1)*data.npp+1:data.npp]]\n model = NLSS(linmodel)\n # model._cost_normalize = 1\n model.add_nl(nlx=nlx, nly=nly)\n model.set_signal(data.sig)\n model.transient(T1)\n model.optimize(lamb=100, weight=weight, nmax=nmax, info=info)\n # get best model on validation data. Change Transient settings, as there is\n # only one realization\n nl_errvec = model.extract_model(data.yval, data.uval, T1=data.npp*data.Ntr,\n info=info)\n\n return model, nl_errvec\n\n\ndef identify_fnsi(data, nlx, nly, n, r, nmax=25, optimize=True, info=2):\n fnsi_errvec = []\n # FNSI can only use 1 realization\n sig = deepcopy(data.sig)\n # This is stupid, but unfortunately nessecary\n sig.y = sig.y[:, :, 0][:, :, None]\n sig.u = sig.u[:, :, 0][:, :, None]\n sig.R = 1\n sig.average()\n fnsi1 = FNSI()\n fnsi1.set_signal(sig)\n fnsi1.add_nl(nlx=nlx)\n fnsi1.estimate(n=n, r=r, weight=weight)\n fnsi1.transient(T1=data.npp*data.Ntr)\n if optimize:\n try:\n fnsi1.optimize(lamb=100, weight=weight, nmax=nmax, info=info)\n fnsi_errvec = fnsi1.extract_model(data.yval, data.uval,\n T1=data.npp*data.Ntr, info=info)\n except ValueError as e:\n print(f'FNSI optimization failed with {e}')\n return fnsi1, fnsi_errvec\n\n\ndef identify_linear(data, n, r, subscan=True, info=2):\n lin_errvec = []\n linmodel = Subspace(data.sig)\n linmodel._cost_normalize = 1\n if subscan:\n linmodel.scan(nvec=[2, 3, 4, 5, 6, 7, 8], maxr=20,\n optimize=True, weight=False, info=info)\n lin_errvec = linmodel.extract_model(data.yval, data.uval)\n print(f\"Best subspace model, n, r: {linmodel.n}, {linmodel.r}\")\n\n #linmodel.estimate(n=n, r=r, weight=weight)\n #linmodel.optimize(weight=weight, info=info)\n else:\n linmodel.estimate(n=n, r=r, weight=weight)\n linmodel.optimize(weight=weight, info=info)\n return linmodel, lin_errvec\n\n\ndef evaluate_models(data, models, errvec, info=2):\n\n descrip = tuple(models.keys()) # convert to tuple for legend concatenation\n models = list(models.values())\n Rest = data.yest.shape[2]\n T1 = np.r_[data.npp*data.Ntr, np.r_[0:(Rest-1)*data.npp+1:data.npp]]\n # simulation error\n val = np.empty((*data.yval.shape, len(models)))\n est = np.empty((*data.ym.shape, len(models)))\n test = np.empty((*data.ytest.shape, len(models)))\n for i, model in enumerate(models):\n test[..., i] = model.simulate(data.utest, T1=data.npp*data.Ntr)[1]\n val[..., i] = model.simulate(data.uval, T1=data.npp*data.Ntr)[1]\n est[..., i] = model.simulate(data.um, T1=T1)[1]\n\n Pest = data.yest.shape[3]\n # convenience inline functions\n\n def stack(ydata, ymodel): return \\\n np.concatenate(\n (ydata[..., None], (ydata[..., None] - ymodel)), axis=2)\n\n def rms(y): return np.sqrt(np.mean(y**2, axis=0))\n est_err = stack(data.ym, est) # (npp*R,p,nmodels)\n val_err = stack(data.yval, val)\n test_err = stack(data.ytest, test)\n noise = np.abs(np.sqrt(Pest*data.covY.squeeze()))\n\n if info:\n print()\n print(f\"err for models: signal, {descrip}\")\n # print(f'rms error noise:\\n{rms(noise)} \\ndb: \\n{db(rms(noise))} ')\n # only print error for p = 0. Almost equal to p = 1\n print(f'rms error est (db): \\n{db(rms(est_err[:,0]))}')\n print(f'rms error val (db): \\n{db(rms(val_err[:,0]))}')\n # print(f'rms error test: \\n{rms(test_err)} \\ndb: \\n{db(rms(test_err))}')\n return Result(est_err, val_err, test_err, noise, errvec, descrip)\n\n\ndef plot_val(res, data, p):\n figs = {}\n lines = data.lines\n freq = data.freq\n Pest = data.yest.shape[3]\n\n # result on validation data\n N = len(data.yval)\n freq = np.arange(N)/N*fs\n plottime = res.val_err\n plotfreq = np.fft.fft(plottime, axis=0)/np.sqrt(N)\n plt.figure()\n plt.plot(freq[lines], db(plotfreq[lines, p]), '.')\n plt.plot(freq[lines], db(np.sqrt(Pest*data.covY[lines, p, p].squeeze() / N)),\n '.')\n plt.xlabel('Frequency')\n plt.ylabel('Output (errors) (dB)')\n plt.legend(('Output',) + res.descrip + ('Noise',))\n plt.title(f'Validation results p:{p}')\n figs['val_data'] = (plt.gcf(), plt.gca())\n\n return figs\n\n\ndef plot_path(res, data, p):\n figs = {}\n # optimization path for NLSS\n plt.figure()\n for desc, err in res.errvec.items():\n if len(err) == 0:\n continue\n # optimization path for NLSS\n plt.plot(db(err), label=desc)\n imin = np.argmin(err)\n plt.scatter(imin, db(err[imin]))\n plt.xlabel('Successful iteration number')\n plt.ylabel('Validation error [dB]')\n plt.title('Selection of the best model on a separate data set')\n plt.legend()\n figs['pnlss_path'] = (plt.gcf(), plt.gca())\n return figs\n\n\ndef plot_time(res, data, p):\n figs = {}\n plt.figure()\n plt.plot(res.est_err[:, p])\n plt.xlabel('Time index')\n plt.ylabel('Output (errors)')\n plt.legend(('Output',) + res.descrip)\n plt.title(f'Estimation results p:{p}')\n figs['estimation_error'] = (plt.gcf(), plt.gca())\n return figs\n\n\ndef plot_bla(res, data, p):\n figs = {}\n lines = data.lines\n freq = data.freq\n\n # BLA plot. We can estimate nonlinear distortion\n # total and noise distortion averaged over P periods and M realizations\n # total distortion level includes nonlinear and noise distortion\n plt.figure()\n # When comparing distortion(variance, proportional to power) with\n # G(propertional to amplitude(field)), there is two definations for dB:\n # dB for power: Lp = 10 log10(P).\n # dB for field quantity: Lf = 10 log10(F²)\n # Alternative calc: bla_noise = db(np.abs(sig.covGn[:,pp,pp])*R, 'power')\n # if the signal is noise-free, fix noise so we see it in plot\n bla_noise = db(np.sqrt(np.abs(data.sig.covGn[:, p, p])*R))\n bla_noise[bla_noise < -150] = -150\n bla_tot = db(np.sqrt(np.abs(data.sig.covG[:, p, p])*R))\n bla_tot[bla_tot < -150] = -150\n\n plt.plot(freq[lines], db(np.abs(data.sig.G[:, p, 0])))\n plt.plot(freq[lines], bla_noise, 's')\n plt.plot(freq[lines], bla_tot, '*')\n plt.xlabel('Frequency (Hz)')\n plt.ylabel('magnitude (dB)')\n plt.title(f'Estimated BLA and nonlinear distortion p: {p}')\n plt.legend(('BLA FRF', 'Noise Distortion', 'Total Distortion'))\n plt.gca().set_ylim(bottom=-150)\n figs['bla'] = (plt.gcf(), plt.gca())\n return figs\n\n\ndef savefig(fname, figs):\n for k, fig in figs.items():\n fig = fig if isinstance(fig, list) else [fig]\n for i, f in enumerate(fig):\n f[0].tight_layout()\n f[0].savefig(f\"{fname}{k}{i}.png\")\n\n\ndef identify(data, nlx, nly, n, r, subscan=True):\n errvec = {}\n models = {}\n models['lin'], _ = identify_linear(\n data, n=n, r=r, subscan=subscan, info=info)\n models['fnsi'], _ = identify_fnsi(\n data, nlx, nly, n=n, r=r, nmax=nmax, optimize=False, info=info)\n models['fnsi optim'], errvec['fnsi'] = identify_fnsi(\n data, nlx, nly, n=n, r=r, nmax=nmax, optimize=True, info=info)\n models['nlss'], errvec['nlss'] = identify_nlss(\n data, models['lin'], nlx, nly, nmax=nmax, info=info)\n\n nly_pnl = [Pnl(degree=[2, 3, 5], structure='statesonly')]\n nlx_pnl = [Pnl(degree=[2, 3, 5], structure='statesonly')]\n # models['nlss_pnl'], errvec['nlss_pnl'] = identify_nlss(\n # data, models['lin'], nlx_pnl, nly_pnl, nmax=nmax, info=info)\n res = evaluate_models(data, models, errvec, info=info)\n return models, res\n\n\ndef disp_plot(data, res, nldof):\n f1 = plot_bla(res, data, nldof)\n f2 = plot_val(res, data, nldof)\n f3 = plot_path(res, data, nldof)\n f4 = plot_time(res, data, nldof)\n figs = {**f1, **f2, **f3, **f4}\n return figs\n\n# parameters\nnmax = 100\ninfo = 1\nsubscan = False\nnldof = 1\n\ntahn1 = Tanhdryfriction(eps=0.0001, w=[0, 1])\nnlx = [tahn1]\nF = np.array([])\nnly = None\n\n# We get good identification using BLA\nE = 1e-1*Efull[:, :len(nlx)]\n\ntrue_model = NLSS(A, B, C, D, E, F)\ntrue_model.add_nl(nlx=nlx, nly=nly)\nraw_data3 = simulate(true_model, npp=2048, Ntr=2)\n#raw_data3 = np.load('data/test.npz')\n\n# Ntr: how many transient periods in T1 for identification\ndata3 = partion_data(raw_data3, Ntr=1)\n# plot bla to see nonlinear distortion. Check noise level!\nplot_bla([], data3, nldof)\n\nmodels, res3 = identify(data3, nlx, nly, n=2, r=5, subscan=subscan)\nfigs = disp_plot(data3, res3, nldof)\n\n# subspace plots\nlinmodel = models['lin']\nfigs['subspace_models'] = linmodel.plot_models()\nif subscan:\n figs['subspace_optim'] = linmodel.plot_info()\n\n# plot periodicity for one realization to verify data is steady state\nfigs['per'] = data3.sig.periodicity(dof=nldof)\n\nsavefig('fig/test_', figs)\n" }, { "alpha_fraction": 0.5803869366645813, "alphanum_fraction": 0.6186346411705017, "avg_line_length": 28.585525512695312, "blob_id": "9c22a4c7832f3491805e5fa7dab2b1fdf371d529", "content_id": "f91fb8bfe592b73b7dd956b8d26a46b64a9f01d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4507, "license_type": "no_license", "max_line_length": 79, "num_lines": 152, "path": "/benchmark4_reg/newmark_multisine.py", "repo_name": "rice-trc/project2.2", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom scipy.io import loadmat\n\nfrom pyvib.common import db, dsample\nfrom pyvib.forcing import multisine\nfrom pyvib.newmark import Newmark\nfrom pyvib.nonlinear_elements_newmark import NLS, Tanhdryfriction\n\n\"\"\"This script simulates a cantilever beam with attached slider\n\nThe slider is attached to the end; In order to know the slider velocity, needed\nfor output-based identification, the slider is modeled as a small extra mass\nattached to the tip with a spring. The nonlinear damping is then found from the\nextra mass' velocity using a regulized tanh function, ie\n\nfnl = μ*tanh(ẏ/ε)\n\nTo determine the right multisine amplitude, we make a with scan with increasing\namplitudes for one period and one realization. By looking at the first modes\nresonance peak in the FRF, we can roughly correlate the amplitude to stick or\nslip condition. We know the natural frequencies for each extreme from linear\nmodal analysis, ie. either fully stuck or fully sliding.\n\nωₙ = 19.59, 122.17, 143.11 # free\nωₙ = 21.44, 123.34, 344.78 # stuck\n\"\"\"\n\nscan = True\nbenchmark = 4\n\n# define multisine\nf0 = 5\nf1 = 70\nNt = 2**13\nfs = 15000\n\nif scan:\n R = 1\n P = 1\n Avec = [10, 30, 50, 70, 80, 100, 120]\n Avec = [0.1,1,5,10,15,30,40,50,70,80,100,120,150]\n #Avec = [50]\n upsamp = 1\n fname = 'scan'\nelse:\n R = 2\n P = 6\n Avec = [20]\n upsamp = 20\n fname = 'ms'\n\nns = Nt*R*P\nt = np.arange(ns)/fs\nfsint = upsamp * fs\nNtint = upsamp * Nt\n# add extra period which will be removed due to edge effects\nPfilter = 1\nif upsamp > 1:\n P = Pfilter + P\nnsint = Ntint*P*R\ndt = 1/fsint\n\n\n# load system defined in matlab\ndata = loadmat('data/system.mat')\nM = data['M']\nC = data['C']\nK = data['K']\nmuN = data['muN'].item()\neps = data['eps_reg'].item()\nT_tip = data['T_tip'].squeeze().astype(int)\nFex1 = data['Fex1'].squeeze().astype(int)\nw = data['w'].squeeze().astype(int)\nfdof = np.argwhere(Fex1).item()\nnldof = np.argwhere(w).item()\nndof = M.shape[0]\n# Fixed contact and free natural frequencies (rad/s).\nom_fixed = data['om_fixed'].squeeze()\nom_free = data['om_free'].squeeze()\n\n#data2 = loadmat('data/b4_A1_up1_ms_full.mat')\n#u = data2['u'].squeeze()\n#freq = data2['freq'].squeeze()\n#fs = data2['fs'].item()\n#dt = 1/fs\n#nsint = len(u)\n#Ntint = nsint\n\nnp.random.seed(0)\nu, lines, freq = multisine(f0, f1, N=Ntint, fs=fsint, R=R, P=P)\nfext = np.zeros((nsint, ndof))\n\nnls = NLS(Tanhdryfriction(eps=eps, w=w, kt=muN))\nsys = Newmark(M, C, K, nls)\nfor A in Avec:\n fext[:, fdof] = A*u.ravel()\n print(f'Newmark started with ns: {nsint}, A: {A}')\n try:\n x, xd, xdd = sys.integrate(fext, dt, x0=None, v0=None,\n sensitivity=False)\n if scan:\n np.savez(f'data/scan_A{A}.npz', x=x, freq=freq, Ntint=Ntint,\n fdof=fdof, nldof=nldof)\n # plot frf for forcing and tanh node\n Y = np.fft.fft(x[-Ntint:, [fdof, nldof]], axis=0)\n nfd = Y.shape[0]//2\n plt.figure()\n plt.plot(freq[:nfd], db(np.abs(Y[:nfd])))\n plt.xlim([0, 50])\n plt.xlabel('Frequency (Hz)')\n plt.ylabel('Amplitude (dB)')\n plt.legend(['Force dof', 'nl dof'])\n plt.minorticks_on()\n plt.grid(which='both')\n plt.savefig(f'fig/nm_b{benchmark}_A{A}_fft_comp_n{fdof}.png')\n except ValueError as e:\n print(f'Newmark integration failed with error {e}. For A: {A}')\n\n # We need to reshape into (npp,m,R,P)\nfext = fext.reshape(R, P, Ntint, ndof).transpose(2, 3, 0, 1)\n# fext.shape = (Ntint,P,R,ndof)\nx = x.reshape(R, P, Ntint, ndof).transpose(2, 3, 0, 1)\nxd = xd.reshape(R, P, Ntint, ndof).transpose(2, 3, 0, 1)\nxdd = xdd.reshape(R, P, Ntint, ndof).transpose(2, 3, 0, 1)\n\nx2, xd2, xdd2, fext2 = [None]*4\nif upsamp: # > 1:\n x2 = dsample(x, upsamp)\n xd2 = dsample(xd, upsamp)\n xdd2 = dsample(xdd, upsamp)\n fext2 = fext[::upsamp, :, :, 1:] # dsample remove first period\n\nnp.savez(f'data/{fname}.npz', x=x, xd=xd, xdd=xdd, x2=x2, xd2=xd2, xdd2=xdd2,\n fext=fext[:, fdof], fext2=fext2[:, fdof],\n lines=lines, fs=fs, A=A)\n# plt.figure()\n#plt.plot(t, x, '-k', label=r'$x_1$')\n##plt.plot(t, x, '-r', label=r'$x_2$')\n#plt.xlabel('Time (t)')\n#plt.ylabel('Displacement (m)')\n#plt.title('Force type: {}, periods:{:d}')\n# plt.legend()\n#\n# plt.figure()\n# plt.plot(np.abs(np.fft.fft(x[6*1024:7*1024,0])))\n#\n#\n# plt.show()\n" } ]
3
NoSyu/CDMM-B
https://github.com/NoSyu/CDMM-B
79d70612d6ba01158592ce0399107e8bda061ad1
0baa831e217368782ed275f6dba35cca591f99e6
df99c5fb04ff21d7b589835872d5f7079010ef79
refs/heads/main
2023-03-13T22:39:47.593267
2021-03-15T03:45:57
2021-03-15T03:45:57
346,273,640
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 20, "blob_id": "586e358b29e3ea86c9e358b40e11c8c5358e9b99", "content_id": "d7247cf603e414da1a0c50c99a7cd82c68b77d8f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21, "license_type": "permissive", "max_line_length": 20, "num_lines": 1, "path": "/src/models/__init__.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from .cdmmb import *\n" }, { "alpha_fraction": 0.5756531953811646, "alphanum_fraction": 0.5780935883522034, "avg_line_length": 43.36942672729492, "blob_id": "fec087287493993c87f6453af2074a731d01e540", "content_id": "ae47bed9640c9d72eb7fe0fdb4742da9298a3e99", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6966, "license_type": "permissive", "max_line_length": 117, "num_lines": 157, "path": "/src/solvers/cdmmb_solver.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from .solver import Solver\nfrom utils import time_desc_decorator\nfrom tqdm import tqdm\nfrom utils import to_var\nimport torch\nimport torch.nn as nn\nfrom math import isnan\nfrom transformers import get_linear_schedule_with_warmup\nimport codecs\nimport os\nfrom sklearn.metrics import precision_recall_fscore_support, accuracy_score\n\n\nclass SolverCDMMB(Solver):\n def __init__(self, config, train_data_loader, eval_data_loader, is_train=True, model=None):\n super(SolverCDMMB, self).__init__(config, train_data_loader, eval_data_loader, is_train, model)\n self.loss_fn = nn.CrossEntropyLoss()\n\n @time_desc_decorator('Training Start!')\n def train(self):\n highest_validation_acc = 0.0\n t_total = len(self.train_data_loader) * self.config.n_epoch\n warmup_step = int(t_total * self.config.warmup_ratio)\n scheduler = get_linear_schedule_with_warmup(self.optimizer, num_warmup_steps=warmup_step,\n num_training_steps=t_total)\n\n for epoch_i in range(self.epoch_i, self.config.n_epoch):\n self.epoch_i = epoch_i\n self.model.train()\n train_acc = 0.0\n\n for batch_i, (decisions, users, conv_length, convs) in enumerate(tqdm(self.train_data_loader, ncols=80)):\n token_ids = list()\n valid_length = list()\n segment_ids = list()\n for one_conv_token_ids, one_conv_valid_length, one_conv_segment_ids in convs:\n token_ids += one_conv_token_ids\n valid_length += one_conv_valid_length\n segment_ids += one_conv_segment_ids\n\n token_ids = to_var(torch.LongTensor(token_ids))\n segment_ids = to_var(torch.LongTensor(segment_ids))\n valid_length = valid_length\n decisions = to_var(torch.LongTensor(decisions))\n users = to_var(torch.LongTensor(users))\n\n self.optimizer.zero_grad()\n\n out = self.model(token_ids, valid_length, segment_ids, users, conv_length)\n batch_loss = self.loss_fn(out, decisions)\n assert not isnan(batch_loss.item())\n\n batch_loss.backward()\n\n torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.config.clip)\n\n self.optimizer.step()\n scheduler.step()\n\n train_acc += self._calc_accuracy(out, decisions)\n\n print('\\n<Validation>...')\n self.validation_acc = self.evaluate()\n self.train_acc = train_acc\n print(\"epoch {} train acc {} validation acc {}\".format(epoch_i + 1,\n self.train_acc / (batch_i + 1),\n self.validation_acc))\n\n if self.validation_acc > highest_validation_acc:\n self.save_model(epoch_i + 1)\n highest_validation_acc = self.validation_acc\n\n if epoch_i % self.config.plot_every_epoch == 0:\n self.write_summary(epoch_i)\n\n return None\n\n def evaluate(self):\n self.model.eval()\n output_decisions = list()\n output_outs = list()\n\n for batch_i, (decisions, users, conv_length, convs) in enumerate(tqdm(self.eval_data_loader, ncols=80)):\n with torch.no_grad():\n token_ids = list()\n valid_length = list()\n segment_ids = list()\n for one_conv_token_ids, one_conv_valid_length, one_conv_segment_ids in convs:\n token_ids += one_conv_token_ids\n valid_length += one_conv_valid_length\n segment_ids += one_conv_segment_ids\n\n token_ids = to_var(torch.LongTensor(token_ids))\n segment_ids = to_var(torch.LongTensor(segment_ids))\n valid_length = valid_length\n decisions = to_var(torch.LongTensor(decisions))\n users = to_var(torch.LongTensor(users))\n\n out = self.model(token_ids, valid_length, segment_ids, users, conv_length)\n\n max_vals, max_indices = torch.max(out, 1)\n max_indices = max_indices.data.cpu().numpy().tolist()\n decisions = decisions.data.cpu().numpy().tolist()\n\n output_outs.append(max_indices)\n output_decisions.append(decisions)\n\n output_outs = [one_ele for sub_list in output_outs for one_ele in sub_list]\n output_decisions = [one_ele for sub_list in output_decisions for one_ele in sub_list]\n\n validation_acc = accuracy_score(output_decisions, output_outs)\n\n return validation_acc\n\n def test(self, is_print=True):\n self.model.eval()\n output_decisions = list()\n output_outs = list()\n\n for batch_i, (decisions, users, conv_length, convs) in enumerate(tqdm(self.eval_data_loader, ncols=80)):\n with torch.no_grad():\n token_ids = list()\n valid_length = list()\n segment_ids = list()\n for one_conv_token_ids, one_conv_valid_length, one_conv_segment_ids in convs:\n token_ids += one_conv_token_ids\n valid_length += one_conv_valid_length\n segment_ids += one_conv_segment_ids\n\n token_ids = to_var(torch.LongTensor(token_ids))\n segment_ids = to_var(torch.LongTensor(segment_ids))\n valid_length = valid_length\n decisions = to_var(torch.LongTensor(decisions))\n users = to_var(torch.LongTensor(users))\n\n out = self.model(token_ids, valid_length, segment_ids, users, conv_length)\n max_vals, max_indices = torch.max(out, 1)\n max_indices = max_indices.data.cpu().numpy().tolist()\n decisions = decisions.data.cpu().numpy().tolist()\n output_outs.append(max_indices)\n output_decisions.append(decisions)\n\n output_outs = [one_ele for sub_list in output_outs for one_ele in sub_list]\n output_decisions = [one_ele for sub_list in output_decisions for one_ele in sub_list]\n\n target_file_name = 'outputs_{}.csv'.format(self.epoch_i)\n with codecs.open(os.path.join(self.config.save_path, target_file_name), 'w', \"utf-8\") as output_f:\n for one_out, one_dec in zip(output_outs, output_decisions):\n print(\"{},{}\".format(one_out, one_dec), file=output_f)\n\n if is_print:\n print(accuracy_score(output_decisions, output_outs))\n print(precision_recall_fscore_support(output_decisions, output_outs, average='macro'))\n print(precision_recall_fscore_support(output_decisions, output_outs, average='micro'))\n print(precision_recall_fscore_support(output_decisions, output_outs, average='weighted'))\n\n return output_decisions, output_outs\n" }, { "alpha_fraction": 0.6115108132362366, "alphanum_fraction": 0.6179856061935425, "avg_line_length": 32.095237731933594, "blob_id": "23b81311d60a7f1fb62e0981cbd14cd405ccb8de", "content_id": "24f7c8d14c3af5a92516e36a750aea61fe75f51f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1390, "license_type": "permissive", "max_line_length": 112, "num_lines": 42, "path": "/src/test.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from dataloader import get_loader\nfrom configs import get_config, load_json\nimport solvers\nimport nosyupylib\nimport traceback\nimport os\nimport re\n\n\ndef main():\n config = get_config(mode='test')\n\n with open(os.path.join(config.save_path, 'config.json'), 'r') as json_f:\n temp_config_str = json_f.read()\n config.max_users = int(re.findall(r\"'max_users': ([0-9]+?),\", temp_config_str)[0])\n config.max_len = int(re.findall(r\"'max_len': ([0-9]+?),\", temp_config_str)[0])\n config.rnn_hidden_size = int(re.findall(r\"'rnn_hidden_size': ([0-9]+?),\", temp_config_str)[0])\n\n raw_data = load_json(config.all_path)\n test_data_loader = get_loader(raw_data=raw_data, max_len=config.max_len, batch_size=config.batch_size,\n shuffle=False, user_map_dict=config.user_map_dict, max_users=config.max_users)\n\n model_solver = getattr(solvers, \"Solver{}\".format(config.model))\n solver = model_solver(config, None, test_data_loader, is_train=False)\n\n solver.build()\n solver.test()\n\n return config\n\n\nif __name__ == '__main__':\n add_msg = \"\"\n main_config = \"WHAT?!\"\n try:\n main_config = main()\n except Exception as e:\n add_msg = traceback.format_exc()\n pass\n finally:\n final_msg = \"{}\\n{}\\n{}\".format(__file__, str(main_config), add_msg)\n nosyupylib.alert_end_program(final_msg)\n" }, { "alpha_fraction": 0.5951720476150513, "alphanum_fraction": 0.6012763381004333, "avg_line_length": 42.95121765136719, "blob_id": "98d8c77f450e63e8d5db0e626a98cfbc4358cb34", "content_id": "46e35b30b437012da93f9c4ba08e01a9e42e3638", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3604, "license_type": "permissive", "max_line_length": 118, "num_lines": 82, "path": "/src/dataloader.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from torch.utils.data import Dataset, DataLoader\nimport gluonnlp as nlp\nimport numpy as np\nimport kobert.utils\nfrom kobert.pytorch_kobert import get_pytorch_kobert_model\n\n\nclass CDMMBDataset(Dataset):\n def __init__(self, convs, decisions, users, bert_tokenizer, max_len, pad, pair, user_map_dict, max_users):\n max_user_id = len(user_map_dict)\n transform = nlp.data.BERTSentenceTransform(bert_tokenizer, max_seq_length=max_len, pad=pad, pair=pair)\n self.convs = list()\n self.conv_length = list()\n\n one_utter_output = transform((\"test sentence\",))\n padding_utter_tokens = np.zeros_like(one_utter_output[0])\n padding_utter_valid_length = np.zeros_like(one_utter_output[1])\n padding_utter_segment_ids = np.zeros_like(one_utter_output[2])\n\n for one_conv in convs:\n convs_tokens = list()\n convs_valid_length = list()\n convs_segment_ids = list()\n\n one_conv_list = one_conv.split(\"<utter/>\")\n if len(one_conv_list) < max_users:\n for each_utter in one_conv_list:\n one_utter_output = transform((each_utter,))\n convs_tokens.append(one_utter_output[0])\n convs_valid_length.append(one_utter_output[1])\n convs_segment_ids.append(one_utter_output[2])\n for _ in range(max_users - len(one_conv_list)):\n convs_tokens.append(padding_utter_tokens)\n convs_valid_length.append(padding_utter_valid_length)\n convs_segment_ids.append(padding_utter_segment_ids)\n self.conv_length.append(len(one_conv_list))\n else:\n for each_utter in one_conv_list[:max_users]:\n one_utter_output = transform((each_utter,))\n convs_tokens.append(one_utter_output[0])\n convs_valid_length.append(one_utter_output[1])\n convs_segment_ids.append(one_utter_output[2])\n self.conv_length.append(max_users)\n\n self.convs.append((convs_tokens, convs_valid_length, convs_segment_ids))\n\n self.decisions = [np.int32(one_deci) for one_deci in decisions]\n self.users = list()\n for one_users in users:\n one_users_arr = [np.int32(user_map_dict[x]) for x in one_users.split(\",\")]\n if len(one_users_arr) < max_users:\n one_users_output = np.pad(one_users_arr, (0, max_users-len(one_users_arr)),\n 'constant', constant_values=max_user_id)\n else:\n one_users_output = one_users_arr[:max_users]\n self.users.append(one_users_output)\n\n def __getitem__(self, idx):\n one_conv = self.convs[idx]\n one_deci = self.decisions[idx]\n one_user = self.users[idx]\n one_conv_length = self.conv_length[idx]\n\n return one_deci, one_user, one_conv_length, one_conv\n\n def __len__(self):\n return len(self.decisions)\n\n\ndef get_loader(raw_data, max_len, batch_size=100, shuffle=False, user_map_dict=None, max_users=10):\n def collate_fn(data):\n return zip(*data)\n\n bertmodel, vocab = get_pytorch_kobert_model()\n tokenizer = kobert.utils.get_tokenizer()\n tok = nlp.data.BERTSPTokenizer(tokenizer, vocab, lower=False)\n\n dataset = CDMMBDataset(raw_data[0], raw_data[1], raw_data[2], tok, max_len, True, False, user_map_dict, max_users)\n\n data_loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=shuffle, collate_fn=collate_fn)\n\n return data_loader\n" }, { "alpha_fraction": 0.6111971139907837, "alphanum_fraction": 0.6197110414505005, "avg_line_length": 42.53932571411133, "blob_id": "b30d04f04b72e02b75c530126fda2f7bc732f42c", "content_id": "b9564ad6a3904dd74e2856d039114c735a36c200", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3876, "license_type": "permissive", "max_line_length": 105, "num_lines": 89, "path": "/src/models/cdmmb.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "import torch\nimport torch.nn as nn\nimport math\nfrom kobert.pytorch_kobert import get_pytorch_kobert_model\n\n\ndef _gen_attention_mask(token_ids, valid_length):\n attention_mask = torch.zeros_like(token_ids)\n for i, v in enumerate(valid_length):\n attention_mask[i][:v] = 1\n return attention_mask.float()\n\n\nclass CDMMB(nn.Module):\n def __init__(self, config):\n super(CDMMB, self).__init__()\n\n self.config = config\n\n bertmodel, vocab = get_pytorch_kobert_model()\n self.bert = bertmodel\n self.vocab = vocab\n\n self.top_rnn = nn.GRU(input_size=config.hidden_size, hidden_size=config.rnn_hidden_size,\n dropout=0, bidirectional=False, batch_first=True)\n\n self.user_rnn = nn.GRU(input_size=config.embedding_size, hidden_size=config.rnn_hidden_size,\n dropout=0, bidirectional=False, batch_first=True)\n\n self.classifier = nn.Linear(config.rnn_hidden_size * 4, config.num_classes)\n self.dropout = nn.Dropout(p=config.dr_rate)\n\n self.user_embedding = nn.Embedding(config.user_size+1, config.embedding_size, padding_idx=0)\n self.user_embedding.weight.requires_grad = True\n\n def _attention_net(self, rnn_output, final_hidden_state):\n scale = 1. / math.sqrt(self.config.rnn_hidden_size)\n query = final_hidden_state.unsqueeze(1) # [BxQ] -> [Bx1xQ]\n keys = rnn_output.permute(0, 2, 1) # [BxTxK] -> [BxKxT]\n energy = torch.bmm(query, keys) # [Bx1xQ]x[BxKxT] -> [Bx1xT]\n energy = nn.functional.softmax(energy.mul_(scale), dim=2) # scale, normalize\n\n values = rnn_output # [BxTxV]\n linear_combination = torch.bmm(energy, values).squeeze(1) # [Bx1xT]x[BxTxV] -> [BxV]\n\n return linear_combination\n\n def _user(self, users, conv_length):\n embedded_users = self.user_embedding(users)\n users_input = nn.utils.rnn.pack_padded_sequence(embedded_users, conv_length,\n batch_first=True, enforce_sorted=False)\n packed_output, hidden = self.user_rnn(users_input)\n rnn_output, _ = nn.utils.rnn.pad_packed_sequence(packed_output, batch_first=True)\n\n attn_output = self._attention_net(rnn_output, hidden[-1])\n\n return attn_output\n\n def _conv(self, token_ids, valid_length, segment_ids, conv_length, batch_size):\n attention_mask = _gen_attention_mask(token_ids, valid_length)\n\n _, pooler = self.bert(input_ids=token_ids, token_type_ids=segment_ids.long(),\n attention_mask=attention_mask.float().to(token_ids.device))\n\n output_pooler = pooler.view(batch_size, -1, self.config.hidden_size)\n\n convs_input = nn.utils.rnn.pack_padded_sequence(output_pooler, conv_length,\n batch_first=True, enforce_sorted=False)\n\n packed_output, hidden = self.top_rnn(convs_input)\n rnn_output, _ = nn.utils.rnn.pad_packed_sequence(packed_output, batch_first=True)\n\n avg_pool = nn.functional.adaptive_avg_pool1d(rnn_output.permute(0, 2, 1), 1).view(batch_size, -1)\n max_pool = nn.functional.adaptive_max_pool1d(rnn_output.permute(0, 2, 1), 1).view(batch_size, -1)\n\n return hidden[-1], avg_pool, max_pool\n\n def forward(self, token_ids, valid_length, segment_ids, users, conv_length):\n batch_size = len(conv_length)\n\n user_hidden = self._user(users, conv_length)\n\n conv_hidden, conv_avg_pool, conv_max_pool = self._conv(token_ids, valid_length, segment_ids,\n conv_length, batch_size)\n merged_output = [user_hidden, conv_hidden, conv_avg_pool, conv_max_pool]\n merged_output = torch.cat(merged_output, dim=1)\n out = self.classifier(merged_output)\n\n return out\n\n" }, { "alpha_fraction": 0.7931034564971924, "alphanum_fraction": 0.7931034564971924, "avg_line_length": 57, "blob_id": "d4b782aac2c2edbc4454f106a9393c6390b3dade", "content_id": "5a55d35039a71b8a87fd41046a83e211a713787a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 58, "license_type": "permissive", "max_line_length": 57, "num_lines": 1, "path": "/README.md", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "# Conversational Decision Making Model with BERT (CDMM-B)\n" }, { "alpha_fraction": 0.7400000095367432, "alphanum_fraction": 0.7400000095367432, "avg_line_length": 24, "blob_id": "f083f6e434b6c630d761e4583ef3e02d07352eea", "content_id": "dc4b5f601cdcbe43b1b6c50fe33124c930a626e5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 50, "license_type": "permissive", "max_line_length": 27, "num_lines": 2, "path": "/src/solvers/__init__.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from .solver import *\nfrom .cdmmb_solver import *\n" }, { "alpha_fraction": 0.5514018535614014, "alphanum_fraction": 0.5887850522994995, "avg_line_length": 34.33333206176758, "blob_id": "fa0c782fdda1d765ab299f02d8eafbc175b3826a", "content_id": "de9e6d6a107f93619e6aa6da7d15579b577b7b47", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 107, "license_type": "permissive", "max_line_length": 84, "num_lines": 3, "path": "/src/RunTrain.sh", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\npython train.py --model=\"$1\" --batch_size=\"$2\" --eval_batch_size=\"$2\" --n_epoch=\"$3\"\n\n" }, { "alpha_fraction": 0.8272727131843567, "alphanum_fraction": 0.8272727131843567, "avg_line_length": 35.66666793823242, "blob_id": "6120a620191c55d49f96578ac5f14e976420199b", "content_id": "640d3f3cbc1eeab6088f2086e067daabf6f06ba8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 110, "license_type": "permissive", "max_line_length": 43, "num_lines": 3, "path": "/src/utils/__init__.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from .time_track import time_desc_decorator\nfrom .convert import *\nfrom .tensorboard import TensorboardWriter\n" }, { "alpha_fraction": 0.5740740895271301, "alphanum_fraction": 0.6111111044883728, "avg_line_length": 35, "blob_id": "c66b505d6958a1cc6756c00c26436b9538a61b0b", "content_id": "b3e9688e84b463d73df926730d15c79b323840ad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 108, "license_type": "permissive", "max_line_length": 86, "num_lines": 3, "path": "/src/RunTest.sh", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\npython test.py --model=\"$1\" --batch_size=\"$2\" --eval_batch_size=\"$2\" --checkpoint=\"$3\"\n" }, { "alpha_fraction": 0.6579804420471191, "alphanum_fraction": 0.6579804420471191, "avg_line_length": 35.117645263671875, "blob_id": "aa77614ef8397bf2153d9c98fc89142ffb601144", "content_id": "3cb75b7c1b6a484fe7908064dee5c8f61398a37d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 614, "license_type": "permissive", "max_line_length": 78, "num_lines": 17, "path": "/src/utils/tensorboard.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from tensorboardX import SummaryWriter\n\n\nclass TensorboardWriter(SummaryWriter):\n def __init__(self, logdir):\n super(TensorboardWriter, self).__init__(logdir)\n self.logdir = self.file_writer.get_logdir()\n\n def update_parameters(self, module, step_i):\n for name, param in module.named_parameters():\n self.add_histogram(name, param.clone().cpu().data.numpy(), step_i)\n\n def update_loss(self, loss, step_i, name='loss'):\n self.add_scalar(name, loss, step_i)\n\n def update_histogram(self, values, step_i, name='hist'):\n self.add_histogram(name, values, step_i)\n" }, { "alpha_fraction": 0.6530612111091614, "alphanum_fraction": 0.6530612111091614, "avg_line_length": 34.21875, "blob_id": "c400b92b90422dca871a9f89658397cb7634f7bc", "content_id": "0d9a4747862a0edf62601e8033183e5db2aa61a7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1127, "license_type": "permissive", "max_line_length": 119, "num_lines": 32, "path": "/src/train.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "from dataloader import get_loader\nfrom configs import get_config, load_json\nimport os\nimport solvers\n\n\ndef main():\n config = get_config(mode='train')\n val_config = get_config(mode='valid')\n with open(os.path.join(config.save_path, 'config.json'), 'w') as json_f:\n config.to_json(json_f)\n\n raw_data = load_json(config.all_path)\n train_data_loader = get_loader(raw_data=raw_data, max_len=config.max_len, batch_size=config.batch_size,\n shuffle=True, user_map_dict=config.user_map_dict, max_users=config.max_users)\n\n raw_data = load_json(val_config.all_path)\n eval_data_loader = get_loader(raw_data=raw_data, max_len=val_config.max_len, batch_size=val_config.eval_batch_size,\n shuffle=False, user_map_dict=config.user_map_dict, max_users=config.max_users)\n\n model_solver = getattr(solvers, \"Solver{}\".format(config.model))\n solver = model_solver(config, train_data_loader, eval_data_loader, is_train=True)\n\n solver.build()\n solver.train()\n solver.writer.close()\n\n return config\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.610238790512085, "alphanum_fraction": 0.6188427805900574, "avg_line_length": 36.49193572998047, "blob_id": "131656c18bd00cf3e479ea26043bf05578a2ea26", "content_id": "1ecc3f3eb63dabeff7245e7e8a4912953ef191b1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4649, "license_type": "permissive", "max_line_length": 85, "num_lines": 124, "path": "/src/configs.py", "repo_name": "NoSyu/CDMM-B", "src_encoding": "UTF-8", "text": "import os\nimport argparse\nfrom datetime import datetime\nfrom pathlib import Path\nimport pprint\nfrom torch import optim\nimport torch.nn as nn\nimport codecs\nimport json\n\nproject_dir = Path(__file__).resolve().parent\noptimizer_dict = {'RMSprop': optim.RMSprop, 'Adam': optim.Adam, 'AdamW': None}\nrnn_dict = {'lstm': nn.LSTM, 'gru': nn.GRU}\nusername = Path.home().name\nsave_dir = project_dir.joinpath(\"results\")\nos.makedirs(save_dir, exist_ok=True)\n\n\ndef str2bool(v):\n \"\"\"string to boolean\"\"\"\n if v.lower() in ('yes', 'true', 't', 'y', '1'):\n return True\n elif v.lower() in ('no', 'false', 'f', 'n', '0'):\n return False\n else:\n raise argparse.ArgumentTypeError('Boolean value expected.')\n\n\ndef load_json(json_path):\n with codecs.open(json_path, \"r\", \"utf-8\") as json_f:\n load_data = json.load(json_f)\n return load_data\n\n\nclass Config(object):\n def __init__(self, **kwargs):\n \"\"\"Configuration Class: set kwargs as class attributes with setattr\"\"\"\n if kwargs is not None:\n for key, value in kwargs.items():\n if key == 'optimizer':\n value = optimizer_dict[value]\n if key == 'rnn':\n value = rnn_dict[value]\n setattr(self, key, value)\n\n self.dataset_dir = project_dir.joinpath('ajd_data')\n self.user_dict_path = self.dataset_dir.joinpath(\"user_dict.csv\")\n\n self.data_dir = self.dataset_dir.joinpath(self.mode)\n\n self.all_path = self.data_dir.joinpath('convs_decisions_users.json')\n self.convs_path = self.data_dir.joinpath('convs.json')\n self.decisions_path = self.data_dir.joinpath('decisions.json')\n self.users_path = self.data_dir.joinpath('users_new.json')\n\n if self.mode == 'train' and self.checkpoint is None:\n time_now = datetime.now().strftime('%Y%m%d_%H%M%S')\n self.save_path = save_dir.joinpath(self.data, self.model, time_now)\n self.logdir = str(self.save_path)\n os.makedirs(self.save_path, exist_ok=True)\n elif self.checkpoint is not None:\n assert os.path.exists(self.checkpoint)\n self.save_path = os.path.dirname(self.checkpoint)\n self.logdir = str(self.save_path)\n\n self.user_dict = dict()\n self.user_map_dict = dict()\n with codecs.open(self.user_dict_path, \"r\", \"utf-8\") as csv_f:\n for line in csv_f:\n line_arr = line.strip().split(\"\\t\")\n name = line_arr[0]\n idx = int(line_arr[1])\n self.user_dict[name] = idx\n self.user_map_dict[str(idx)] = idx\n self.user_size = len(self.user_dict)\n\n def __str__(self):\n \"\"\"Pretty-print configurations in alphabetical order\"\"\"\n config_str = 'Configurations\\n'\n config_str += pprint.pformat(self.__dict__)\n return config_str\n\n def to_json(self, json_f):\n json.dump(pprint.pformat(self.__dict__), fp=json_f, sort_keys=True, indent=4)\n\n\ndef get_config(parse=True, **optional_kwargs):\n parser = argparse.ArgumentParser()\n\n parser.add_argument('--mode', type=str, default='test')\n\n parser.add_argument('--batch_size', type=int, default=5)\n parser.add_argument('--eval_batch_size', type=int, default=5)\n parser.add_argument('--n_epoch', type=int, default=5)\n parser.add_argument('--learning_rate', type=float, default=1e-4)\n parser.add_argument('--optimizer', type=str, default='AdamW')\n parser.add_argument('--clip', type=float, default=1.0)\n parser.add_argument('--warmup_ratio', type=float, default=0.1)\n parser.add_argument('--checkpoint', type=str, default=None)\n\n parser.add_argument('--model', type=str, default='CDMMTN')\n parser.add_argument('--rnn', type=str, default='gru')\n parser.add_argument('--embedding_size', type=int, default=100)\n parser.add_argument('--hidden_size', type=int, default=768)\n parser.add_argument('--rnn_hidden_size', type=int, default=100)\n parser.add_argument('--num_classes', type=int, default=4)\n parser.add_argument('--dr_rate', type=float, default=0.2)\n parser.add_argument('--context_size', type=int, default=1000)\n parser.add_argument('--max_users', type=int, default=10)\n\n parser.add_argument('--plot_every_epoch', type=int, default=1)\n\n parser.add_argument('--data', type=str, default='ajd_data')\n parser.add_argument('--max_len', type=int, default=100)\n\n if parse:\n kwargs = parser.parse_args()\n else:\n kwargs = parser.parse_known_args()[0]\n\n kwargs = vars(kwargs)\n kwargs.update(optional_kwargs)\n\n return Config(**kwargs)\n" } ]
13
silky/Python-Image-Morpher
https://github.com/silky/Python-Image-Morpher
a1e36e75458d5090bd2684c1b248ebeddf5a6915
d2d94cd340ffc091a322a155df1f8048c662d35c
493eaabb164462ec06825fb39c6f635cdeff7bf5
refs/heads/master
2023-02-14T17:48:22.599671
2021-01-17T03:48:25
2021-01-17T03:48:25
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7045454382896423, "alphanum_fraction": 0.8181818127632141, "avg_line_length": 8, "blob_id": "ba116c463e562735c2d9520c5c9451aae58f3ed5", "content_id": "021b7aa0e90431923370b307d01615bf0993a1d9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 44, "license_type": "permissive", "max_line_length": 13, "num_lines": 5, "path": "/Morphing/requirements.txt", "repo_name": "silky/Python-Image-Morpher", "src_encoding": "UTF-8", "text": "pyqt5\nscipy\nnumpy==1.19.3\nimageio\nmatplotlib" }, { "alpha_fraction": 0.5827063322067261, "alphanum_fraction": 0.5928325057029724, "avg_line_length": 66.21500396728516, "blob_id": "e1b5b5da88294151f674118e53f7d8122fa853b4", "content_id": "d549ba3a3a681f21ba6ef0463ca0ffd06f3916dc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 74565, "license_type": "permissive", "max_line_length": 571, "num_lines": 1093, "path": "/Morphing/MorphingApp.py", "repo_name": "silky/Python-Image-Morpher", "src_encoding": "UTF-8", "text": "#######################################################\r\n# Author: David Dowd\r\n# Email: [email protected]\r\n#######################################################\r\n\r\nimport multiprocessing\r\nimport sys\r\nimport os\r\nimport re\r\nimport time\r\nimport shutil\r\n\r\nimport imageio\r\nfrom PIL import Image\r\nfrom PyQt5.QtWidgets import QMainWindow, QApplication, QFileDialog\r\nimport math\r\n\r\nfrom Morphing import *\r\nfrom MorphingGUI import *\r\n\r\n# Module level Variables\r\n#######################################################\r\nROOT_DIR = os.path.dirname(os.path.abspath(__file__))\r\n\r\n\r\nclass MorphingApp(QMainWindow, Ui_MainWindow):\r\n\r\n def __init__(self, parent=None):\r\n super(MorphingApp, self).__init__(parent)\r\n self.setupUi(self)\r\n self.setWindowIcon(QtGui.QIcon(\"./Morphing.ico\"))\r\n\r\n # Defaults on Startup\r\n self.chosen_left_points = [] # List used to store points confirmed in previous sessions (LEFT)\r\n self.chosen_right_points = [] # List used to store points confirmed in previous sessions (RIGHT)\r\n self.added_left_points = [] # List used to store temporary points added in current session (LEFT)\r\n self.added_right_points = [] # List used to store temporary points added in current session (RIGHT)\r\n self.confirmed_left_points = [] # List used to store existing points confirmed in current session (LEFT)\r\n self.confirmed_right_points = [] # List used to store existing points confirmed in current session (RIGHT)\r\n self.placed_points_history = [] # List used to log recent points placed during this session for CTRL + Y\r\n self.clicked_window_history = [-1] # List used to log the order in which the image windows have been clicked - functions as a \"stack\"\r\n self.leftPolyList = [] # List used to store delaunay triangles (LEFT)\r\n self.rightPolyList = [] # List used to store delaunay triangles (RIGHT)\r\n self.blendList = [] # List used to store a variable amount of alpha increment frames for full blending\r\n\r\n self.startingImagePath = '' # String used to store file path to the left image\r\n self.endingImagePath = '' # String used to store file path to the right image\r\n self.startingTextPath = '' # String used to store file path to the left image's text file, if it was pre-made (LEGACY)\r\n self.endingTextPath = '' # String used to store file path to the right image's text file, if it was pre-made (LEGACY)\r\n self.startingTextCorePath = '' # String used to store local file path to the left image's corresponding text file\r\n self.endingTextCorePath = '' # String used to store local file path to the right image's corresponding text file\r\n self.startingImageName = '' # String used to store the left image's file name\r\n self.endingImageName = '' # String used to store the right image's file name\r\n self.startingImageType = '' # String used to store the left image's file type\r\n self.endingImageType = '' # String used to store the right image's file type\r\n\r\n self.enableDeletion = 0 # Flag used to indicate whether the most recently created point can be deleted with Backspace\r\n self.triangleUpdate = 0 # Flag used to indicate whether triangles need to be repainted (or removed) in the next paint event\r\n self.triangleUpdatePref = 0 # Flag used to remember whether the user wants to display triangles (in the case that they are forced off)\r\n self.imageScalar = 0 # Value used to scale created points to where they visually line up with the original images\r\n self.fullBlendValue = 0.05 # Value used for determining the spacing between alpha increments when full blending\r\n self.gifValue = 100 # Value used for determining the amount of time allotted to each frame of a created .gif file\r\n\r\n self.leftSize = (0, 0) # Tuple used to store the width and height of the left image\r\n self.rightSize = (0, 0) # Tuple used to store the width and height of the right image\r\n self.leftZoomData = None # Tuple used to store the coordinates of the user's zoom on the left image. None when zoomed out, (a, b, c, d) when zoomed in\r\n self.rightZoomData = None # Tuple used to store the coordinates of the user's zoom on the right image. None when zoomed out, (e, f, g, h) when zoomed in\r\n\r\n self.fullBlendComplete = False # Flag used to indicate whether a full blend is displayable\r\n self.changeFlag = False # Flag used to indicate when the program should repaint (because Qt loves to very frequently call paint events)\r\n\r\n self.blendedImage = None # Pre-made reference to a variable that is used to store a singular blended image\r\n\r\n # Logic\r\n self.loadStartButton.clicked.connect(self.loadDataLeft) # When the first load image button is clicked, begins loading logic\r\n self.loadEndButton.clicked.connect(self.loadDataRight) # When the second load image button is clicked, begins loading logic\r\n self.resizeLeftButton.clicked.connect(self.resizeLeft) # When the left resize button is clicked, begins resizing logic\r\n self.resizeRightButton.clicked.connect(self.resizeRight) # When the right resize button is clicked, begins resizing logic\r\n self.triangleBox.clicked.connect(self.updateTriangleStatus) # When the triangle box is clicked, changes flags\r\n self.transparencyBox.stateChanged.connect(self.transparencyUpdate) # When the transparency box is checked or unchecked, changes flags\r\n self.blendButton.clicked.connect(self.blendImages) # When the blend button is clicked, begins blending logic\r\n self.blendBox.stateChanged.connect(self.blendBoxUpdate) # When the blend box is checked or unchecked, changes flags\r\n self.blendText.returnPressed.connect(self.blendTextDone) # When the return key is pressed, removes focus from the input text window\r\n self.saveButton.clicked.connect(self.saveImages) # When the save button is clicked, begins image saving logic\r\n self.gifText.returnPressed.connect(self.gifTextDone) # When the return key is pressed, removes focus from the input text window\r\n self.alphaSlider.valueChanged.connect(self.updateAlpha) # When the alpha slider is moved, reads and formats the value\r\n self.triangleRedSlider.valueChanged.connect(self.updateRed) # When the red slider is moved, reads the value\r\n self.triangleGreenSlider.valueChanged.connect(self.updateGreen) # When the green slider is moved, reads the value\r\n self.triangleBlueSlider.valueChanged.connect(self.updateBlue) # When the blue slider is moved, reads the value\r\n self.resetPointsButton.clicked.connect(self.resetPoints) # When the reset points button is clicked, begins logic for removing points\r\n self.resetSliderButton.clicked.connect(self.resetAlphaSlider) # When the reset slider button is clicked, begins logic for resetting it to default\r\n self.autoCornerButton.clicked.connect(self.autoCorner) # When the add corner button is clicked, begins logic for adding corner points\r\n\r\n # Macro function to save unnecessary repetitions of the same few lines of code.\r\n # Essentially reconfigures the alpha slider as the user works with the GUI.\r\n def refreshAlphaSlider(self):\r\n temp = float(self.alphaValue.text())\r\n self.alphaSlider.setMaximum(int(100 / (self.fullBlendValue / 0.01)))\r\n self.fullBlendValue = 1.0 / self.alphaSlider.maximum()\r\n self.blendText.setText(str(self.fullBlendValue))\r\n self.alphaSlider.setValue(round(temp / self.fullBlendValue))\r\n if self.alphaSlider.maximum() == 20:\r\n self.alphaSlider.setTickInterval(2)\r\n self.resetSliderButton.setEnabled(0)\r\n else:\r\n self.alphaSlider.setTickInterval(1)\r\n self.resetSliderButton.setEnabled(1)\r\n\r\n # QoL function that removes focus from the full blend text window when the user presses Enter.\r\n # Additionally verifies the user's specified full blending value.\r\n def blendTextDone(self):\r\n self.fullBlendComplete = False\r\n if self.blendText.text() == '.':\r\n self.fullBlendValue = 0.05\r\n self.blendText.setText(str(self.fullBlendValue))\r\n self.refreshAlphaSlider()\r\n else:\r\n self.verifyValue(\"blend\")\r\n self.notificationLine.setFocus()\r\n\r\n # QoL function that removes focus from the gif text window when the user presses Enter.\r\n # Additionally verifies the user's specified gif frame time value.\r\n def gifTextDone(self):\r\n if self.gifText.text().strip() == 'ms':\r\n self.gifValue = 100\r\n self.gifText.setText(\"100 ms\")\r\n else:\r\n self.verifyValue(\"gif\")\r\n self.notificationLine.setFocus()\r\n\r\n # Macro function to save unnecessary repetitions of the same few lines of code.\r\n # Essentially corrects invalid values that the user may enter for full blending and gif frame times..\r\n # then rounds to the best number that's closest to what the user specified in the input box.\r\n # (This is required, since Qt can't restrict all bad forms of input.)\r\n def verifyValue(self, param: str):\r\n if param == \"blend\":\r\n self.fullBlendComplete = False\r\n self.fullBlendValue = min(float(self.blendText.text()), 0.25)\r\n self.fullBlendValue = max(self.fullBlendValue, 0.001)\r\n self.blendText.setText(str(self.fullBlendValue))\r\n self.refreshAlphaSlider()\r\n elif param == \"gif\":\r\n self.gifValue = min(int(self.gifText.text().replace(' ms', '')), 999)\r\n self.gifValue = int(max(self.gifValue, 1))\r\n if len(str(self.gifValue)) == 1:\r\n self.gifText.setText(\"00\" + str(self.gifValue) + \" ms\")\r\n elif len(str(self.gifValue)) == 2:\r\n self.gifText.setText(\"0\" + str(self.gifValue) + \" ms\")\r\n else:\r\n self.gifText.setText(str(self.gifValue) + \" ms\")\r\n\r\n # Workaround function that prevents unpredictable behavior with the triangle box\r\n def updateTriangleStatus(self):\r\n self.triangleUpdatePref = int(self.triangleBox.isChecked())\r\n self.displayTriangles()\r\n\r\n # Macro function to save unnecessary repetitions of the same few lines of code.\r\n # Essentially refreshes the displayed left and right images of the UI.\r\n def refreshPaint(self):\r\n self.changeFlag = True\r\n self.paintEvent(1)\r\n\r\n # Macro function to save unnecessary repetitions of the same few lines of code.\r\n # Essentially toggles the triangle widget when necessary.\r\n def updateTriangleWidget(self, val):\r\n if val:\r\n self.triangleRed.setText(\"<b><font color='red'>Red</font></b>\")\r\n self.triangleGreen.setText(\"<b><font color='green'>Green</font></b>\")\r\n self.triangleBlue.setText(\"<b><font color='blue'>Blue</font></b>\")\r\n else:\r\n self.triangleRed.setText(\"<font color='black'>Red</font>\")\r\n self.triangleGreen.setText(\"<font color='black'>Green</font>\")\r\n self.triangleBlue.setText(\"<font color='black'>Blue</font>\")\r\n self.triangleLabel.setEnabled(val)\r\n self.triangleRed.setEnabled(val)\r\n self.triangleGreen.setEnabled(val)\r\n self.triangleBlue.setEnabled(val)\r\n self.triangleRedSlider.setEnabled(val)\r\n self.triangleGreenSlider.setEnabled(val)\r\n self.triangleBlueSlider.setEnabled(val)\r\n self.triangleRedValue.setEnabled(val)\r\n self.triangleGreenValue.setEnabled(val)\r\n self.triangleBlueValue.setEnabled(val)\r\n\r\n # Self-contained function that checks for the existence of corner points and adds any that are not already present.\r\n # Can not be invoked while a point is pending (in order to prevent exploits).\r\n # Written to dynamically work with triangles without any exploits.\r\n def autoCorner(self):\r\n leftMaxX = min(math.ceil((self.startingImage.geometry().topRight().x() - self.startingImage.geometry().topLeft().x() - 1) * self.imageScalar[0]), self.leftSize[0] - 1)\r\n leftMaxY = min(math.ceil((self.endingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y() - 1) * self.imageScalar[1]), self.leftSize[1] - 1)\r\n rightMaxX = min(math.ceil((self.endingImage.geometry().topRight().x() - self.endingImage.geometry().topLeft().x() - 1) * self.imageScalar[0]), self.rightSize[0] - 1)\r\n rightMaxY = min(math.ceil((self.endingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y() - 1) * self.imageScalar[1]), self.rightSize[1] - 1)\r\n\r\n tempLeft = [QtCore.QPoint(0, 0), QtCore.QPoint(0, leftMaxY), QtCore.QPoint(leftMaxX, 0), QtCore.QPoint(leftMaxX, leftMaxY)]\r\n tempRight = [QtCore.QPoint(0, 0), QtCore.QPoint(0, rightMaxY), QtCore.QPoint(rightMaxX, 0), QtCore.QPoint(rightMaxX, rightMaxY)]\r\n\r\n self.triangleBox.setEnabled(1)\r\n\r\n counter = 0\r\n for leftPoint, rightPoint in zip(tempLeft, tempRight):\r\n if leftPoint not in self.confirmed_left_points and leftPoint not in self.chosen_left_points and rightPoint not in self.confirmed_right_points and rightPoint not in self.chosen_right_points:\r\n counter += 1\r\n self.confirmed_left_points.append(leftPoint)\r\n self.confirmed_right_points.append(rightPoint)\r\n self.clicked_window_history.append(0)\r\n self.clicked_window_history.append(1)\r\n\r\n with open(self.startingTextCorePath, \"a\") as startingFile:\r\n if not os.stat(self.startingTextCorePath).st_size: # left file is empty\r\n startingFile.write('{:>8}{:>8}'.format(str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].x(), \".1f\")), str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].y(), \".1f\"))))\r\n else:\r\n startingFile.write('\\n{:>8}{:>8}'.format(str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].x(), \".1f\")), str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].y(), \".1f\"))))\r\n with open(self.endingTextCorePath, \"a\") as endingFile:\r\n if not os.stat(self.endingTextCorePath).st_size: # if right file is empty\r\n endingFile.write('{:>8}{:>8}'.format(str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].x(), \".1f\")), str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].y(), \".1f\"))))\r\n else:\r\n endingFile.write('\\n{:>8}{:>8}'.format(str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].x(), \".1f\")), str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].y(), \".1f\"))))\r\n\r\n if counter:\r\n self.refreshPaint()\r\n if counter == 1:\r\n self.notificationLine.setText(\" Successfully added a new corner point.\")\r\n else:\r\n self.notificationLine.setText(\" Successfully added \" + str(counter) + \" new corner points.\")\r\n else:\r\n self.notificationLine.setText(\" Failed to add any new corner points.\")\r\n\r\n self.enableDeletion = 0\r\n self.displayTriangles()\r\n self.triangleBox.setChecked(self.triangleUpdatePref)\r\n self.blendButton.setEnabled(1)\r\n self.resetPointsButton.setEnabled(1)\r\n\r\n # Function that wipes the slate clean, erasing all placed points from the GUI and relevant files.\r\n # Similar to autoCorner, this has been written to dynamically work with triangles without any exploits.\r\n def resetPoints(self):\r\n self.triangleUpdatePref = int(self.triangleBox.isChecked())\r\n self.blendButton.setEnabled(0)\r\n self.resetPointsButton.setEnabled(0)\r\n self.autoCornerButton.setEnabled(1)\r\n self.triangleBox.setChecked(0)\r\n self.triangleBox.setEnabled(0)\r\n self.added_left_points.clear()\r\n self.added_right_points.clear()\r\n self.confirmed_left_points.clear()\r\n self.confirmed_right_points.clear()\r\n self.chosen_left_points.clear()\r\n self.chosen_right_points.clear()\r\n self.leftPolyList.clear()\r\n self.rightPolyList.clear()\r\n\r\n if os.path.isfile(self.startingTextCorePath):\r\n os.remove(self.startingTextCorePath)\r\n\r\n if os.path.isfile(self.endingTextCorePath):\r\n os.remove(self.endingTextCorePath)\r\n\r\n self.enableDeletion = 0\r\n self.refreshPaint()\r\n\r\n self.notificationLine.setText(\" Successfully reset points.\")\r\n\r\n # Function that resets the alpha slider (for use after setting a full blend value that has modified the slider).\r\n # Resets the full blend value as well, just to prevent any weird behavior from ever occurring.\r\n def resetAlphaSlider(self):\r\n self.alphaSlider.setMaximum(20)\r\n self.alphaSlider.setTickInterval(2)\r\n self.fullBlendValue = 0.05\r\n self.blendText.setText(str(self.fullBlendValue))\r\n self.alphaValue.setText(str(0.0))\r\n self.alphaSlider.setValue(0)\r\n self.resetSliderButton.setEnabled(0)\r\n\r\n # Function that handles the rendering of points and triangles onto the GUI when manually called.\r\n # Dynamically handles changes in point and polygon lists to be compatible with resetPoints, autoCorner, etc.\r\n # TODO: Modify pointWidth to be a function of image size\r\n def paintEvent(self, paint_event):\r\n if self.changeFlag or self.pointSlider.valueChanged:\r\n leftPic = QtGui.QPixmap(self.startingImagePath)\r\n rightPic = QtGui.QPixmap(self.endingImagePath)\r\n pen = QtGui.QPen()\r\n pen.setWidth(self.pointSlider.value())\r\n leftpainter = QtGui.QPainter(leftPic)\r\n rightpainter = QtGui.QPainter(rightPic)\r\n pointWidth = self.pointSlider.value()\r\n\r\n if self.triangleUpdate == 1:\r\n if len(self.leftPolyList) == len(self.rightPolyList) > 0:\r\n pointWidth *= 1.7\r\n pen.setColor(QtGui.QColor(self.triangleRedSlider.value(), self.triangleGreenSlider.value(), self.triangleBlueSlider.value(), 255))\r\n leftpainter.setPen(pen)\r\n for x in self.leftPolyList:\r\n leftpainter.drawPolygon(x, 3)\r\n\r\n pen.setColor(QtGui.QColor(self.triangleRedSlider.value(), self.triangleGreenSlider.value(), self.triangleBlueSlider.value(), 255))\r\n rightpainter.setPen(pen)\r\n for x in self.rightPolyList:\r\n rightpainter.drawPolygon(x, 3)\r\n\r\n leftpainter.setBrush(QtGui.QColor(255, 0, 0, 255))\r\n for x in self.chosen_left_points:\r\n leftpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n leftpainter.setBrush(QtGui.QColor(0, 255, 0, 255))\r\n for x in self.added_left_points:\r\n leftpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n leftpainter.setBrush(QtGui.QColor(0, 0, 255, 255))\r\n for x in self.confirmed_left_points:\r\n leftpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n if not self.leftZoomData:\r\n self.startingImage.setPixmap(leftPic)\r\n else:\r\n self.leftZoomData[2] = min(max(0, self.leftZoomData[0] - int(self.leftSize[0] / 4)), int(self.leftSize[0] * 0.5))\r\n self.leftZoomData[3] = min(max(0, self.leftZoomData[1] - int(self.leftSize[1] / 4)), int(self.leftSize[0] * 0.5))\r\n temp = leftPic.copy(QtCore.QRect(self.leftZoomData[2], self.leftZoomData[3], int(self.leftSize[0] / 2), int(self.leftSize[1] / 2)))\r\n self.startingImage.setPixmap(temp)\r\n leftpainter.end()\r\n\r\n rightpainter.setBrush(QtGui.QColor(255, 0, 0, 255))\r\n for x in self.chosen_right_points:\r\n rightpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n rightpainter.setBrush(QtGui.QColor(0, 255, 0, 255))\r\n for x in self.added_right_points:\r\n rightpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n rightpainter.setBrush(QtGui.QColor(0, 0, 255, 255))\r\n for x in self.confirmed_right_points:\r\n rightpainter.drawEllipse(x, pointWidth, pointWidth)\r\n\r\n if not self.rightZoomData:\r\n self.endingImage.setPixmap(rightPic)\r\n else:\r\n self.rightZoomData[2] = min(max(0, self.rightZoomData[0] - int(self.rightSize[0] / 4)), int(self.rightSize[0] * 0.5))\r\n self.rightZoomData[3] = min(max(0, self.rightZoomData[1] - int(self.rightSize[1] / 4)), int(self.rightSize[0] * 0.5))\r\n temp = rightPic.copy(QtCore.QRect(self.rightZoomData[2], self.rightZoomData[3], int(self.rightSize[0] / 2), int(self.rightSize[1] / 2)))\r\n self.endingImage.setPixmap(temp)\r\n\r\n rightpainter.end()\r\n\r\n self.changeFlag = False\r\n\r\n # Event handler for keystrokes\r\n # CTRL + Z will either:\r\n # 1) Undo the most recently placed temporary (green) point [NOT \"the last placed temporary point\"], or\r\n # 2) Undo the most recent confirmed (blue) point pair\r\n # a) If this would cause a change in triangle display, the user's preference is remembered\r\n # CTRL + Y will restore the most recently deleted / undone point(s) in the cache, which is cleared when a new point is placed\r\n # Backspace will only delete the last placed temporary point, if there is one to delete.\r\n # a) It can not be invoked more than one time in succession, and\r\n # b) It has no effect on confirmed (blue) points\r\n def keyPressEvent(self, key_event):\r\n # Undo\r\n if type(key_event) == QtGui.QKeyEvent and key_event.modifiers() == QtCore.Qt.ControlModifier and key_event.key() == QtCore.Qt.Key_Z:\r\n undoFlag = 0\r\n if self.startingImage.hasScaledContents() and self.endingImage.hasScaledContents():\r\n if self.clicked_window_history[-1] == 1 and len(self.added_right_points):\r\n self.placed_points_history.append([self.added_right_points.pop(), self.clicked_window_history.pop()])\r\n self.refreshPaint()\r\n undoFlag = 1\r\n self.notificationLine.setText(\" Removed right temporary point!\")\r\n elif self.clicked_window_history[-1] == 0 and len(self.added_left_points):\r\n self.placed_points_history.append([self.added_left_points.pop(), self.clicked_window_history.pop()])\r\n self.refreshPaint()\r\n undoFlag = 1\r\n self.notificationLine.setText(\" Removed left temporary point!\")\r\n elif len(self.confirmed_left_points) and len(self.confirmed_right_points):\r\n self.clicked_window_history.pop()\r\n self.clicked_window_history.pop()\r\n self.placed_points_history.append((self.confirmed_left_points.pop(), self.confirmed_right_points.pop()))\r\n\r\n data1 = open(self.startingTextCorePath, 'r').readlines()\r\n del data1[-1]\r\n if data1:\r\n data1[-1] = data1[-1][0:int(len(data1[-1]) - 1)] # Remove \\n from the previously second to last line\r\n open(self.startingTextCorePath, 'w').writelines(data1)\r\n else:\r\n os.remove(self.startingTextCorePath)\r\n data2 = open(self.endingTextCorePath, 'r').readlines()\r\n del data2[-1]\r\n if data2:\r\n data2[-1] = data2[-1][0:int(len(data2[-1]) - 1)] # Remove \\n from the previously second to last line\r\n open(self.endingTextCorePath, 'w').writelines(data2)\r\n else:\r\n os.remove(self.endingTextCorePath)\r\n\r\n if len(self.chosen_left_points) + len(self.confirmed_left_points) >= 3:\r\n self.displayTriangles()\r\n self.blendButton.setEnabled(1)\r\n else:\r\n self.triangleUpdatePref = int(self.triangleBox.isChecked())\r\n self.triangleBox.setChecked(0)\r\n self.triangleBox.setEnabled(0)\r\n self.blendButton.setEnabled(0)\r\n self.displayTriangles()\r\n if len(self.chosen_left_points) + len(self.confirmed_left_points) == 0:\r\n self.resetPointsButton.setEnabled(0)\r\n self.refreshPaint()\r\n undoFlag = 1\r\n self.notificationLine.setText(\" Removed confirmed point pair!\")\r\n self.autoCornerButton.setEnabled(len(self.added_left_points) == len(self.added_right_points) == 0)\r\n if undoFlag == 0:\r\n self.notificationLine.setText(\" Can't undo!\")\r\n\r\n # Redo\r\n elif type(key_event) == QtGui.QKeyEvent and key_event.modifiers() == QtCore.Qt.ControlModifier and key_event.key() == QtCore.Qt.Key_Y:\r\n if not len(self.placed_points_history) > 0:\r\n self.notificationLine.setText(\" Can't redo!\")\r\n return\r\n\r\n recoveredData = self.placed_points_history.pop()\r\n if type(recoveredData) is list: # Restore added point\r\n if recoveredData[1] == 0:\r\n self.added_left_points.append(recoveredData[0])\r\n self.clicked_window_history.append(0)\r\n self.notificationLine.setText(\" Recovered left temporary point!\")\r\n elif recoveredData[1] == 1:\r\n self.added_right_points.append(recoveredData[0])\r\n self.clicked_window_history.append(1)\r\n self.notificationLine.setText(\" Recovered right temporary point!\")\r\n self.refreshPaint()\r\n self.enableDeletion = 1\r\n self.autoCornerButton.setEnabled(0)\r\n elif type(recoveredData) is tuple: # Restore confirmed point pair\r\n self.confirmed_left_points.append(recoveredData[0])\r\n self.confirmed_right_points.append(recoveredData[1])\r\n self.clicked_window_history.append(0)\r\n self.clicked_window_history.append(1)\r\n\r\n with open(self.startingTextCorePath, \"a\") as startingFile:\r\n if not os.stat(self.startingTextCorePath).st_size: # left file is empty\r\n startingFile.write('{:>8}{:>8}'.format(\r\n str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].x(), \".1f\")),\r\n str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].y(), \".1f\"))))\r\n else:\r\n startingFile.write('\\n{:>8}{:>8}'.format(\r\n str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].x(), \".1f\")),\r\n str(format(self.confirmed_left_points[len(self.confirmed_left_points) - 1].y(), \".1f\"))))\r\n with open(self.endingTextCorePath, \"a\") as endingFile:\r\n if not os.stat(self.endingTextCorePath).st_size: # right file is empty\r\n endingFile.write('{:>8}{:>8}'.format(\r\n str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].x(), \".1f\")),\r\n str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].y(), \".1f\"))))\r\n else:\r\n endingFile.write('\\n{:>8}{:>8}'.format(\r\n str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].x(), \".1f\")),\r\n str(format(self.confirmed_right_points[len(self.confirmed_right_points) - 1].y(), \".1f\"))))\r\n self.refreshPaint()\r\n self.displayTriangles()\r\n self.autoCornerButton.setEnabled(1)\r\n self.resetPointsButton.setEnabled(1)\r\n self.notificationLine.setText(\" Recovered confirmed point pair!\")\r\n\r\n # Delete recent temp\r\n elif type(key_event) == QtGui.QKeyEvent and key_event.key() == QtCore.Qt.Key_Backspace:\r\n if self.startingImage.hasScaledContents() and self.endingImage.hasScaledContents() and self.enableDeletion == 1:\r\n if self.clicked_window_history[-1] == 1 and len(self.added_right_points):\r\n self.placed_points_history.append([self.added_right_points.pop(), self.clicked_window_history.pop()])\r\n self.enableDeletion = 0\r\n self.refreshPaint()\r\n self.notificationLine.setText(\" Successfully deleted recent temporary point.\")\r\n elif self.clicked_window_history[-1] == 0 and len(self.added_left_points):\r\n self.placed_points_history.append([self.added_left_points.pop(), self.clicked_window_history.pop()])\r\n self.enableDeletion = 0\r\n self.refreshPaint()\r\n self.notificationLine.setText(\" Successfully deleted recent temporary point.\")\r\n self.autoCornerButton.setEnabled(len(self.added_left_points) == len(self.added_right_points) == 0)\r\n\r\n # Function override of the window resize event. Fairly lightweight.\r\n # Currently recalculates the necessary scalar and size values to keep image displays and point placements accurate.\r\n def resizeEvent(self, event):\r\n self.imageScalar = (self.leftSize[0] / (self.startingImage.geometry().topRight().x() - self.startingImage.geometry().topLeft().x()), self.leftSize[1] / (self.startingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y()))\r\n self.startingImage.setFixedWidth(self.blendingImage.width())\r\n self.endingImage.setFixedWidth(self.blendingImage.width())\r\n\r\n # Function that resizes a copy of the left image to the right image's dimensions\r\n def resizeLeft(self):\r\n if self.leftSize != self.rightSize:\r\n img = Image.open(self.startingImagePath)\r\n img = img.resize((self.rightSize[0], self.rightSize[1]), Image.ANTIALIAS)\r\n\r\n for index, pointPair in enumerate(self.chosen_left_points):\r\n self.chosen_left_points[index] = QtCore.QPoint(int(pointPair.x() * self.rightSize[0] / self.leftSize[0]), int(pointPair.y() * self.rightSize[1] / self.leftSize[1]))\r\n for index, pointPair in enumerate(self.confirmed_left_points):\r\n self.confirmed_left_points[index] = QtCore.QPoint(int(pointPair.x() * self.rightSize[0] / self.leftSize[0]), int(pointPair.y() * self.rightSize[1] / self.leftSize[1]))\r\n for index, pointPair in enumerate(self.added_left_points):\r\n self.added_left_points[index] = QtCore.QPoint(int(pointPair.x() * self.rightSize[0] / self.leftSize[0]), int(pointPair.y() * self.rightSize[1] / self.leftSize[1]))\r\n\r\n path = ROOT_DIR + '/Images_Points/' + self.startingImageName + '-' + str(self.rightSize[0]) + 'x' + str(self.rightSize[1]) + self.startingImageType\r\n textPath = ROOT_DIR + '/Images_Points/' + self.startingImageName + '-' + str(self.rightSize[0]) + 'x' + str(self.rightSize[1]) + '-' + self.startingImageType[1:] + '.txt'\r\n img.save(path)\r\n\r\n open(textPath, 'w').close()\r\n writeFlag = False\r\n with open(textPath, \"a\") as startingFile:\r\n for pointPair in self.chosen_left_points:\r\n if not writeFlag:\r\n startingFile.write('{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n writeFlag = True\r\n else:\r\n startingFile.write('\\n{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n for pointPair in self.confirmed_left_points:\r\n if not writeFlag:\r\n startingFile.write('{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n writeFlag = True\r\n else:\r\n startingFile.write('\\n{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n self.startingImageName += '-' + str(self.rightSize[0]) + 'x' + str(self.rightSize[1])\r\n self.startingImagePath = path\r\n self.startingTextCorePath = textPath\r\n self.startingImage.setPixmap(QtGui.QPixmap(self.startingImagePath))\r\n self.notificationLine.setText(\" Successfully resized left image from \" + str(self.leftSize[0]) + \"x\" + str(self.leftSize[1]) + \" to \" + str(self.rightSize[0]) + \"x\" + str(self.rightSize[1]))\r\n self.leftSize = self.rightSize\r\n self.checkResize()\r\n else:\r\n self.notificationLine.setText(\" Can't resize the left image - both images share the same dimensions!\")\r\n\r\n # Function that resizes a copy of the right image to the left image's dimensions\r\n def resizeRight(self):\r\n if self.leftSize != self.rightSize:\r\n img = Image.open(self.endingImagePath)\r\n img = img.resize((self.leftSize[0], self.leftSize[1]), Image.ANTIALIAS)\r\n\r\n for index, pointPair in enumerate(self.chosen_right_points):\r\n self.chosen_right_points[index] = QtCore.QPoint(int(pointPair.x() * self.leftSize[0] / self.rightSize[0]), int(pointPair.y() * self.leftSize[1] / self.rightSize[1]))\r\n for index, pointPair in enumerate(self.confirmed_right_points):\r\n self.confirmed_right_points[index] = QtCore.QPoint(int(pointPair.x() * self.leftSize[0] / self.rightSize[0]), int(pointPair.y() * self.leftSize[1] / self.rightSize[1]))\r\n for index, pointPair in enumerate(self.added_right_points):\r\n self.added_right_points[index] = QtCore.QPoint(int(pointPair.x() * self.leftSize[0] / self.rightSize[0]), int(pointPair.y() * self.leftSize[1] / self.rightSize[1]))\r\n\r\n path = ROOT_DIR + '/Images_Points/' + self.endingImageName + '-' + str(self.leftSize[0]) + 'x' + str(self.leftSize[1]) + self.endingImageType\r\n textPath = ROOT_DIR + '/Images_Points/' + self.endingImageName + '-' + str(self.leftSize[0]) + 'x' + str(self.leftSize[1]) + '-' + self.endingImageType[1:] + '.txt'\r\n img.save(path)\r\n\r\n open(textPath, 'w').close()\r\n writeFlag = False\r\n with open(textPath, \"a\") as endingFile:\r\n for pointPair in self.chosen_right_points:\r\n if not writeFlag:\r\n endingFile.write('{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n writeFlag = True\r\n else:\r\n endingFile.write('\\n{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n for pointPair in self.confirmed_right_points:\r\n if not writeFlag:\r\n endingFile.write('{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n writeFlag = True\r\n else:\r\n endingFile.write('\\n{:>8}{:>8}'.format(str(format(pointPair.x(), \".1f\")), str(format(pointPair.y(), \".1f\"))))\r\n self.endingImageName += '-' + str(self.leftSize[0]) + 'x' + str(self.leftSize[1])\r\n self.endingImagePath = path\r\n self.endingTextCorePath = textPath\r\n self.endingImage.setPixmap(QtGui.QPixmap(self.endingImagePath))\r\n self.notificationLine.setText(\" Successfully resized right image from \" + str(self.rightSize[0]) + \"x\" + str(self.rightSize[1]) + \" to \" + str(self.leftSize[0]) + \"x\" + str(self.leftSize[1]))\r\n self.rightSize = self.leftSize\r\n self.checkResize()\r\n else:\r\n self.notificationLine.setText(\" Can't resize the right image - both images share the same dimensions!\")\r\n\r\n # Macro function to save unnecessary repetitions of the same few lines of code called in resizeLeft() and resizeRight()\r\n # Simply toggles some program flags and repaints any GUI changes afterwards.\r\n def checkResize(self):\r\n self.imageScalar = (self.leftSize[0] / (self.startingImage.geometry().topRight().x() - self.startingImage.geometry().topLeft().x()), self.leftSize[1] / (self.startingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y()))\r\n if (len(self.chosen_left_points) + len(self.confirmed_left_points)) == (len(self.chosen_right_points) + len(self.confirmed_right_points)) >= 3:\r\n self.alphaValue.setEnabled(1)\r\n self.alphaSlider.setEnabled(1)\r\n self.autoCornerButton.setEnabled(1)\r\n self.blendButton.setEnabled(1)\r\n self.triangleBox.setEnabled(1)\r\n self.triangleBox.setChecked(self.triangleUpdatePref)\r\n else:\r\n self.alphaValue.setEnabled(0)\r\n self.alphaSlider.setEnabled(0)\r\n self.autoCornerButton.setEnabled(0)\r\n self.blendButton.setEnabled(0)\r\n self.triangleBox.setEnabled(0)\r\n self.triangleBox.setChecked(0)\r\n self.displayTriangles()\r\n self.repaint()\r\n self.resizeLeftButton.setStyleSheet(\"\")\r\n self.resizeRightButton.setStyleSheet(\"\")\r\n\r\n # Function that handles GUI and file behavior when the mouse is clicked.\r\n def mousePressEvent(self, cursor_event):\r\n # LMB (Place Point)\r\n if cursor_event.button() == QtCore.Qt.LeftButton:\r\n if self.leftSize == self.rightSize:\r\n self.imageScalar = (self.leftSize[0] / (self.startingImage.geometry().topRight().x() - self.startingImage.geometry().topLeft().x()), self.leftSize[1] / (self.startingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y()))\r\n\r\n if self.startingImage.hasScaledContents() and self.endingImage.hasScaledContents():\r\n # If there are a set of points to confirm\r\n if len(self.added_left_points) == len(self.added_right_points) == 1:\r\n self.placed_points_history.clear()\r\n self.confirmed_left_points.append(self.added_left_points.pop())\r\n self.confirmed_right_points.append(self.added_right_points.pop())\r\n with open(self.startingTextCorePath, \"a\") as startingFile:\r\n if not os.stat(self.startingTextCorePath).st_size: # left file is empty\r\n startingFile.write('{:>8}{:>8}'.format(str(format(self.confirmed_left_points[len(self.confirmed_left_points)-1].x(), \".1f\")), str(format(self.confirmed_left_points[len(self.confirmed_left_points)-1].y(), \".1f\"))))\r\n else:\r\n startingFile.write('\\n{:>8}{:>8}'.format(str(format(self.confirmed_left_points[len(self.confirmed_left_points)-1].x(), \".1f\")), str(format(self.confirmed_left_points[len(self.confirmed_left_points)-1].y(), \".1f\"))))\r\n with open(self.endingTextCorePath, \"a\") as endingFile:\r\n if not os.stat(self.endingTextCorePath).st_size: # right file is empty\r\n endingFile.write('{:>8}{:>8}'.format(str(format(self.confirmed_right_points[len(self.confirmed_right_points)-1].x(), \".1f\")), str(format(self.confirmed_right_points[len(self.confirmed_right_points)-1].y(), \".1f\"))))\r\n else:\r\n endingFile.write('\\n{:>8}{:>8}'.format(str(format(self.confirmed_right_points[len(self.confirmed_right_points)-1].x(), \".1f\")), str(format(self.confirmed_right_points[len(self.confirmed_right_points)-1].y(), \".1f\"))))\r\n self.refreshPaint()\r\n self.displayTriangles()\r\n self.autoCornerButton.setEnabled(1)\r\n self.resetPointsButton.setEnabled(1)\r\n self.notificationLine.setText(\" Successfully confirmed set of added points.\")\r\n # LMB was clicked inside left image\r\n if self.startingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.startingImage.geometry().topRight().x() and self.startingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.startingImage.geometry().bottomRight().y() and len(self.added_left_points) == 0:\r\n self.placed_points_history.clear()\r\n if not self.leftZoomData:\r\n leftCoord = QtCore.QPoint(int((cursor_event.pos().x() - self.startingImage.geometry().topLeft().x()) * self.imageScalar[0]), int((cursor_event.pos().y() - self.startingImage.geometry().topLeft().y()) * self.imageScalar[1]))\r\n else:\r\n xPos = self.leftZoomData[2] + int((cursor_event.pos().x() - self.startingImage.geometry().topLeft().x()) * self.imageScalar[0] / 2)\r\n yPos = self.leftZoomData[3] + int((cursor_event.pos().y() - self.startingImage.geometry().topLeft().y()) * self.imageScalar[1] / 2)\r\n leftCoord = QtCore.QPoint(xPos, yPos)\r\n self.added_left_points.append(leftCoord)\r\n self.refreshPaint()\r\n self.clicked_window_history.append(0)\r\n self.enableDeletion = 1\r\n self.autoCornerButton.setEnabled(0)\r\n self.notificationLine.setText(\" Successfully added left temporary point.\")\r\n # LMB was clicked inside right image\r\n elif self.endingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.endingImage.geometry().topRight().x() and self.endingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.endingImage.geometry().bottomRight().y() and len(self.added_right_points) == 0:\r\n self.placed_points_history.clear()\r\n if not self.rightZoomData:\r\n rightCoord = QtCore.QPoint(int((cursor_event.pos().x() - self.endingImage.geometry().topLeft().x()) * self.imageScalar[0]), int((cursor_event.pos().y() - self.startingImage.geometry().topLeft().y()) * self.imageScalar[1]))\r\n else:\r\n xPos = self.rightZoomData[2] + int((cursor_event.pos().x() - self.endingImage.geometry().topLeft().x()) * self.imageScalar[0] / 2)\r\n yPos = self.rightZoomData[3] + int((cursor_event.pos().y() - self.endingImage.geometry().topLeft().y()) * self.imageScalar[1] / 2)\r\n rightCoord = QtCore.QPoint(xPos, yPos)\r\n self.added_right_points.append(rightCoord)\r\n self.refreshPaint()\r\n self.clicked_window_history.append(1)\r\n self.enableDeletion = 1\r\n self.notificationLine.setText(\" Successfully added right temporary point.\")\r\n\r\n # Check if 3 or more points exist for two corresponding images so that triangles may be displayed\r\n if (len(self.chosen_left_points) + len(self.confirmed_left_points)) == (len(self.chosen_right_points) + len(self.confirmed_right_points)) >= 3:\r\n self.triangleBox.setEnabled(1)\r\n self.blendButton.setEnabled(1)\r\n if self.triangleUpdatePref == 1:\r\n self.triangleUpdate = 1\r\n self.triangleBox.setChecked(1)\r\n self.refreshPaint()\r\n self.displayTriangles()\r\n else:\r\n if (self.startingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.startingImage.geometry().topRight().x() and self.startingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.startingImage.geometry().bottomRight().y() and len(self.added_left_points) == 0) or (self.endingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.endingImage.geometry().topRight().x() and self.endingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.endingImage.geometry().bottomRight().y() and len(self.added_right_points) == 0):\r\n self.notificationLine.setText(\" Images must be the same size before points can be drawn!\")\r\n # RMB (Toggle Zoom)\r\n elif cursor_event.button() == QtCore.Qt.RightButton:\r\n # RMB was clicked inside left image\r\n if self.startingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.startingImage.geometry().topRight().x() and self.startingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.startingImage.geometry().bottomRight().y():\r\n if not self.leftZoomData:\r\n self.leftZoomData = [int((cursor_event.pos().x() - self.startingImage.geometry().topLeft().x()) * self.imageScalar[0]), int((cursor_event.pos().y() - self.startingImage.geometry().topLeft().y()) * self.imageScalar[1]), 0, 0]\r\n self.notificationLine.setText(\" Zoomed in on left image.\")\r\n else:\r\n self.leftZoomData = None\r\n self.notificationLine.setText(\" Zoomed out of left image.\")\r\n self.refreshPaint()\r\n # RMB was clicked inside right image\r\n elif self.endingImage.geometry().topLeft().x() < cursor_event.pos().x() < self.endingImage.geometry().topRight().x() and self.endingImage.geometry().topLeft().y() < cursor_event.pos().y() < self.endingImage.geometry().bottomRight().y():\r\n if not self.rightZoomData:\r\n self.rightZoomData = [int((cursor_event.pos().x() - self.endingImage.geometry().topLeft().x()) * self.imageScalar[0]), int((cursor_event.pos().y() - self.endingImage.geometry().topLeft().y()) * self.imageScalar[1]), 0, 0]\r\n self.notificationLine.setText(\" Zoomed in on right image.\")\r\n else:\r\n self.rightZoomData = None\r\n self.notificationLine.setText(\" Zoomed out of right image.\")\r\n self.refreshPaint()\r\n\r\n # Very simple function for updating user preference for blending transparency in images\r\n # (This is disabled by default, as transparency is often unused and reduces performance.)\r\n def transparencyUpdate(self):\r\n if self.transparencyBox.isChecked():\r\n self.notificationLine.setText(\" Successfully enabled transparency layer.\")\r\n else:\r\n self.notificationLine.setText(\" Successfully disabled transparency layer.\")\r\n\r\n # Another simple function for updating user preference regarding 'full blending'\r\n # Full blending is defined as morphing every 0.05 alpha increment of the two images.\r\n # The alpha slider than becomes an interactive display, showing each blend in realtime.\r\n # (Naturally, this is disabled by default, as full blending takes much longer to run)\r\n def blendBoxUpdate(self):\r\n self.blendText.setEnabled(int(self.blendBox.isChecked()))\r\n if self.blendBox.isChecked():\r\n self.notificationLine.setText(\" Successfully enabled full blending.\")\r\n else:\r\n self.notificationLine.setText(\" Successfully disabled full blending.\")\r\n\r\n # Function that dynamically updates the list of triangles for the image pair provided, when manually invoked.\r\n # When a process wants to see triangles update properly, THIS is what needs to be called (not self.triangleUpdate).\r\n def displayTriangles(self):\r\n if self.triangleBox.isEnabled() and (self.triangleBox.isChecked() or self.triangleUpdatePref):\r\n if os.path.exists(self.startingTextCorePath) and os.path.exists(self.endingTextCorePath):\r\n self.updateTriangleWidget(1)\r\n leftTriList, rightTriList = loadTriangles(self.startingTextCorePath, self.endingTextCorePath)\r\n self.leftPolyList.clear()\r\n self.rightPolyList.clear()\r\n\r\n for x in leftTriList:\r\n temp = QtGui.QPolygon((QtCore.QPoint(int(x.vertices[0][0]), int(x.vertices[0][1])), QtCore.QPoint(int(x.vertices[1][0]), int(x.vertices[1][1])), QtCore.QPoint(int(x.vertices[2][0]), int(x.vertices[2][1])), QtCore.QPoint(int(x.vertices[0][0]), int(x.vertices[0][1]))))\r\n self.leftPolyList.append(temp)\r\n for y in rightTriList:\r\n temp = QtGui.QPolygon((QtCore.QPoint(int(y.vertices[0][0]), int(y.vertices[0][1])), QtCore.QPoint(int(y.vertices[1][0]), int(y.vertices[1][1])), QtCore.QPoint(int(y.vertices[2][0]), int(y.vertices[2][1])), QtCore.QPoint(int(y.vertices[0][0]), int(y.vertices[0][1]))))\r\n self.rightPolyList.append(temp)\r\n self.triangleUpdate = 1\r\n self.refreshPaint()\r\n\r\n # If the images have any triangles, it is safe to enable the blend button\r\n # (The boolean expression isn't really necessary, but it serves as an OK sanity check.)\r\n self.blendButton.setEnabled(bool(len(self.leftPolyList) == len(self.rightPolyList) >= 1))\r\n return\r\n self.updateTriangleWidget(0)\r\n self.triangleUpdate = 0\r\n self.refreshPaint()\r\n\r\n # Function that handles movement of the alpha slider.\r\n # Typically will only update the alpha value in use unless a full blend has been completed (and is available).\r\n # If so, movement of this slider will also display the new alpha value's corresponding morph frame.\r\n def updateAlpha(self):\r\n value_num = ((self.alphaSlider.value() / self.alphaSlider.maximum()) / self.fullBlendValue) * self.fullBlendValue\r\n value = format(value_num, \".3f\")\r\n self.notificationLine.setText(\" Alpha value changed from \" + self.alphaValue.text() + \" to \" + str(value) + \".\")\r\n self.alphaValue.setText(str(value))\r\n if self.fullBlendComplete:\r\n temp = self.blendList[round(value_num / self.fullBlendValue)]\r\n\r\n if len(temp.shape) == 2:\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], QtGui.QImage.Format_Grayscale8)))\r\n elif temp.shape[2] == 3:\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], temp.shape[1] * 3, QtGui.QImage.Format_RGB888)))\r\n elif temp.shape[2] == 4:\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], QtGui.QImage.Format_RGBA8888)))\r\n else:\r\n print(\"Generic catching error: Something went wrong when loading the image.\")\r\n\r\n # Red/Green/Blue slider functions for the triangle widget in order to select custom colors\r\n def updateRed(self):\r\n self.triangleRedValue.setText(str(self.triangleRedSlider.value()))\r\n self.refreshPaint()\r\n\r\n def updateGreen(self):\r\n self.triangleGreenValue.setText(str(self.triangleGreenSlider.value()))\r\n self.refreshPaint()\r\n\r\n def updateBlue(self):\r\n self.triangleBlueValue.setText(str(self.triangleBlueSlider.value()))\r\n self.refreshPaint()\r\n\r\n # Function that handles behavior when the user wishes to blend the two calibrated images.\r\n # Currently designed to handle:\r\n # > 8-Bit Grayscale (QtGui.QImage.Format_Grayscale8)\r\n # > 24-Bit Color .JPG / .PNG (QtGui.QImage.Format_RGB888)\r\n # > 24-Bit Color, 8-Bit Transparency .PNG (QtGui.QImage.Format_RGBA8888)\r\n def blendImages(self):\r\n self.blendButton.setEnabled(0)\r\n self.blendBox.setEnabled(0)\r\n triangleTuple = loadTriangles(self.startingTextCorePath, self.endingTextCorePath)\r\n leftImageRaw = imageio.imread(self.startingImagePath)\r\n rightImageRaw = imageio.imread(self.endingImagePath)\r\n leftImageARR = np.asarray(leftImageRaw)\r\n rightImageARR = np.asarray(rightImageRaw)\r\n errorFlag = False\r\n\r\n if self.blendBox.isChecked() and self.blendText.text() == '.':\r\n self.notificationLine.setText(\" Failed to morph. Please disable full blending or specify a valid value (0.001 to 1.0)\")\r\n errorFlag = True\r\n elif len(leftImageRaw.shape) < 3 and len(rightImageRaw.shape) < 3: # if grayscale\r\n self.notificationLine.setText(\" Calculating grayscale morph...\")\r\n self.repaint()\r\n grayScale = Morpher(leftImageARR, triangleTuple[0], rightImageARR, triangleTuple[1])\r\n self.blendList.clear()\r\n start_time = time.time()\r\n if self.blendBox.isChecked():\r\n self.verifyValue(\"blend\")\r\n x = 0\r\n while x <= self.alphaSlider.maximum():\r\n self.notificationLine.setText(\" Calculating RGB (.jpg) morph... {Frame \" + str(x + 1) + \"/\" + str(self.alphaSlider.maximum() + 1) + \"}\")\r\n self.repaint()\r\n if x == self.alphaSlider.maximum():\r\n tempImage = grayScale.getImageAtAlpha(1.0)\r\n else:\r\n tempImage = grayScale.getImageAtAlpha(x * self.fullBlendValue)\r\n self.blendList.append(tempImage)\r\n x += 1\r\n self.fullBlendComplete = True\r\n self.gifText.setEnabled(1)\r\n temp = self.blendList[int(float(self.alphaValue.text()) / self.fullBlendValue)]\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], QtGui.QImage.Format_Grayscale8)))\r\n else:\r\n self.fullBlendComplete = False\r\n self.gifText.setEnabled(0)\r\n self.blendedImage = grayScale.getImageAtAlpha(float(self.alphaValue.text()))\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(self.blendedImage.data, self.blendedImage.shape[1], self.blendedImage.shape[0], QtGui.QImage.Format_Grayscale8)))\r\n self.notificationLine.setText(\" Morph took \" + \"{:.3f}\".format(time.time() - start_time) + \" seconds.\\n\")\r\n elif not self.transparencyBox.isChecked() or (leftImageRaw.shape[2] == 3 and rightImageRaw.shape[2] == 3): # if color, no alpha (.JPG)\r\n self.notificationLine.setText(\" Calculating RGB (.jpg) morph...\")\r\n self.repaint()\r\n colorScaleR = Morpher(leftImageARR[:, :, 0], triangleTuple[0], rightImageARR[:, :, 0], triangleTuple[1])\r\n colorScaleG = Morpher(leftImageARR[:, :, 1], triangleTuple[0], rightImageARR[:, :, 1], triangleTuple[1])\r\n colorScaleB = Morpher(leftImageARR[:, :, 2], triangleTuple[0], rightImageARR[:, :, 2], triangleTuple[1])\r\n self.blendList.clear()\r\n start_time = time.time()\r\n if self.blendBox.isChecked():\r\n\r\n counter = 0\r\n while counter <= self.alphaSlider.maximum():\r\n self.notificationLine.setText(\" Calculating RGB (.jpg) morph... {Frame \" + str(counter + 1) + \"/\" + str(self.alphaSlider.maximum() + 1) + \"}\")\r\n self.repaint()\r\n if counter == self.alphaSlider.maximum():\r\n alphaVal = 1.0\r\n else:\r\n alphaVal = counter * self.fullBlendValue\r\n pool = multiprocessing.Pool(4)\r\n results = [pool.apply_async(colorScaleR.getImageAtAlpha, (alphaVal,)),\r\n pool.apply_async(colorScaleG.getImageAtAlpha, (alphaVal,)),\r\n pool.apply_async(colorScaleB.getImageAtAlpha, (alphaVal,))]\r\n blendR = results[0].get()\r\n blendG = results[1].get()\r\n blendB = results[2].get()\r\n pool.close()\r\n pool.terminate()\r\n pool.join()\r\n self.blendList.append(np.dstack((blendR, blendG, blendB)))\r\n counter += 1\r\n self.fullBlendComplete = True\r\n self.gifText.setEnabled(1)\r\n temp = self.blendList[int(float(self.alphaValue.text()) / self.fullBlendValue)]\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], temp.shape[1] * 3, QtGui.QImage.Format_RGB888)))\r\n self.notificationLine.setText(\" RGB morph took \" + \"{:.3f}\".format(time.time() - start_time) + \" seconds.\\n\")\r\n else:\r\n self.fullBlendComplete = False\r\n self.gifText.setEnabled(0)\r\n pool = multiprocessing.Pool(4)\r\n results = [pool.apply_async(colorScaleR.getImageAtAlpha, (float(self.alphaValue.text()),)),\r\n pool.apply_async(colorScaleG.getImageAtAlpha, (float(self.alphaValue.text()),)),\r\n pool.apply_async(colorScaleB.getImageAtAlpha, (float(self.alphaValue.text()),))]\r\n blendR = results[0].get()\r\n blendG = results[1].get()\r\n blendB = results[2].get()\r\n pool.close()\r\n pool.terminate()\r\n pool.join()\r\n self.blendedImage = np.dstack((blendR, blendG, blendB))\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(self.blendedImage.data, self.blendedImage.shape[1], self.blendedImage.shape[0], self.blendedImage.shape[1] * 3, QtGui.QImage.Format_RGB888)))\r\n self.notificationLine.setText(\" RGB morph took \" + \"{:.3f}\".format(time.time() - start_time) + \" seconds.\\n\")\r\n elif self.transparencyBox.isChecked() and leftImageRaw.shape[2] == 4 and rightImageRaw.shape[2] == 4: # if color, alpha (.PNG)\r\n self.notificationLine.setText(\" Calculating RGBA (.png) morph...\")\r\n self.repaint()\r\n colorScaleR = Morpher(leftImageARR[:, :, 0], triangleTuple[0], rightImageARR[:, :, 0], triangleTuple[1])\r\n colorScaleG = Morpher(leftImageARR[:, :, 1], triangleTuple[0], rightImageARR[:, :, 1], triangleTuple[1])\r\n colorScaleB = Morpher(leftImageARR[:, :, 2], triangleTuple[0], rightImageARR[:, :, 2], triangleTuple[1])\r\n colorScaleA = Morpher(leftImageARR[:, :, 3], triangleTuple[0], rightImageARR[:, :, 3], triangleTuple[1])\r\n self.blendList.clear()\r\n start_time = time.time()\r\n if self.blendBox.isChecked():\r\n counter = 0\r\n while counter <= self.alphaSlider.maximum():\r\n self.notificationLine.setText(\" Calculating RGBA (.jpg) morph... {Frame \" + str(counter + 1) + \"/\" + str(self.alphaSlider.maximum() + 1) + \"}\")\r\n self.repaint()\r\n if counter == self.alphaSlider.maximum():\r\n alphaVal = 1.0\r\n else:\r\n alphaVal = counter * self.fullBlendValue\r\n pool = multiprocessing.Pool(4)\r\n results = [pool.apply_async(colorScaleR.getImageAtAlpha, (alphaVal,)),\r\n pool.apply_async(colorScaleG.getImageAtAlpha, (alphaVal,)),\r\n pool.apply_async(colorScaleB.getImageAtAlpha, (alphaVal,)),\r\n pool.apply_async(colorScaleA.getImageAtAlpha, (alphaVal,))]\r\n blendR = results[0].get()\r\n blendG = results[1].get()\r\n blendB = results[2].get()\r\n blendA = results[3].get()\r\n pool.close()\r\n pool.terminate()\r\n pool.join()\r\n\r\n self.blendList.append(np.dstack((blendR, blendG, blendB, blendA)))\r\n counter += 1\r\n self.fullBlendComplete = True\r\n self.gifText.setEnabled(1)\r\n temp = self.blendList[int(float(self.alphaValue.text()) / self.fullBlendValue)]\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(temp.data, temp.shape[1], temp.shape[0], QtGui.QImage.Format_RGBA8888)))\r\n self.notificationLine.setText(\" RGBA morph took \" + \"{:.3f}\".format(time.time() - start_time) + \" seconds.\\n\")\r\n else:\r\n self.fullBlendComplete = False\r\n self.gifText.setEnabled(0)\r\n pool = multiprocessing.Pool(4)\r\n results = [pool.apply_async(colorScaleR.getImageAtAlpha, (float(self.alphaValue.text()),)),\r\n pool.apply_async(colorScaleG.getImageAtAlpha, (float(self.alphaValue.text()),)),\r\n pool.apply_async(colorScaleB.getImageAtAlpha, (float(self.alphaValue.text()),)),\r\n pool.apply_async(colorScaleA.getImageAtAlpha, (float(self.alphaValue.text()),))]\r\n blendR = results[0].get()\r\n blendG = results[1].get()\r\n blendB = results[2].get()\r\n blendA = results[3].get()\r\n pool.close()\r\n pool.terminate()\r\n pool.join()\r\n self.blendedImage = np.dstack((blendR, blendG, blendB, blendA))\r\n self.blendingImage.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(self.blendedImage.data, self.blendedImage.shape[1], self.blendedImage.shape[0], QtGui.QImage.Format_RGBA8888)))\r\n self.notificationLine.setText(\" RGBA morph took \" + \"{:.3f}\".format(time.time() - start_time) + \" seconds.\\n\")\r\n else:\r\n self.notificationLine.setText(\" Generic Catching Error: Check image file types..\")\r\n errorFlag = True\r\n if not errorFlag:\r\n self.blendingImage.setScaledContents(1)\r\n self.blendButton.setEnabled(1)\r\n self.saveButton.setEnabled(1)\r\n self.blendBox.setEnabled(1)\r\n\r\n # Function that handles behavior when the user wishes to save the blended image(s)\r\n # Currently designed to handle generation of the following:\r\n # Single Blend → Grayscale .jpg/.jpeg\r\n # Single Blend → Color .jpg/.jpeg/.png\r\n # Full Blend → Grayscale/Color .gif (default frame time: 100 ms)\r\n def saveImages(self):\r\n if self.blendingImage.hasScaledContents():\r\n filepath = \"\"\r\n if self.fullBlendComplete and self.blendList != []: # create GIF\r\n self.gifTextDone()\r\n filepath, _ = QFileDialog.getSaveFileName(self, 'Save the gif as ...', \"Morph.gif\", \"Images (*.gif)\")\r\n\r\n if filepath == \"\":\r\n return\r\n\r\n temp = self.blendList\r\n for frame in reversed(self.blendList):\r\n temp.append(frame)\r\n imageio.mimsave(filepath, temp, duration=float(self.gifValue / 1000))\r\n else: # create image\r\n if len(self.blendedImage.shape) < 3:\r\n filepath, _ = QFileDialog.getSaveFileName(self, 'Save the image as ...', \"Morph.jpg\", \"Images (*.jpg)\")\r\n elif self.blendedImage.shape[2] == 3:\r\n filepath, _ = QFileDialog.getSaveFileName(self, 'Save the image as ...', \"Morph.jpg\", \"Images (*.jpg)\")\r\n elif self.blendedImage.shape[2] == 4:\r\n filepath, _ = QFileDialog.getSaveFileName(self, 'Save the image as ...', \"Morph.png\", \"Images (*.png)\")\r\n\r\n if filepath == \"\":\r\n return\r\n\r\n imageio.imwrite(filepath, self.blendedImage)\r\n else:\r\n self.notificationLine.setText(\" Generic Catching Error: Image(s) can't be saved..\")\r\n\r\n # Function that handles behavior for loading the user's left image\r\n def loadDataLeft(self):\r\n filePath, _ = QFileDialog.getOpenFileName(self, caption='Open Starting Image File ...', filter=\"Images (*.png *.jpg *.jpeg)\")\r\n if not filePath:\r\n return\r\n\r\n self.notificationLine.setText(\" Left image loaded.\")\r\n self.triangleUpdatePref = int(self.triangleBox.isChecked())\r\n self.fullBlendComplete = False\r\n self.alphaValue.setEnabled(0)\r\n self.alphaSlider.setEnabled(0)\r\n self.autoCornerButton.setEnabled(0)\r\n self.blendButton.setEnabled(0)\r\n self.triangleBox.setChecked(0)\r\n self.triangleBox.setEnabled(0)\r\n self.displayTriangles()\r\n\r\n self.startingImage.setPixmap(QtGui.QPixmap(filePath))\r\n self.startingImage.setScaledContents(1)\r\n self.leftSize = (imageio.imread(filePath).shape[1], imageio.imread(filePath).shape[0])\r\n self.imageScalar = (self.leftSize[0] / (self.startingImage.geometry().topRight().x() - self.startingImage.geometry().topLeft().x()), self.leftSize[1] / (self.startingImage.geometry().bottomRight().y() - self.startingImage.geometry().topLeft().y()))\r\n\r\n self.startingImagePath = filePath\r\n\r\n # Obtain file's name and extension\r\n self.startingImageName = re.search('(?<=[/])[^/]+(?=[.])', filePath).group() # Example: C:/Desktop/TestImage.jpg => TestImage\r\n self.startingImageType = os.path.splitext(self.startingImagePath)[1] # Example: C:/Desktop/TestImage.jpg => .jpg\r\n\r\n # Create local path to check for the image's text file\r\n # Example: C:/Desktop/TestImage.jpg => C:/Desktop/TestImage-jpg.txt\r\n self.startingTextPath = filePath[:-(len(self.startingImageName + self.startingImageType))] + self.startingImageName + '-' + self.startingImageType[1:] + '.txt'\r\n\r\n # Now assign file's name to desired path for information storage (appending .txt at the end)\r\n # self.startingTextCorePath = 'C:/Users/USER/PycharmProjects/Personal/Morphing/Images_Points/' + 'TestImage' + '-' + 'jpg' + '.txt'\r\n self.startingTextCorePath = os.path.join(ROOT_DIR, 'Images_Points\\\\' + self.startingImageName + '-' + self.startingImageType[1:] + '.txt')\r\n\r\n self.checkFiles('loadDataLeft', self.startingTextPath, self.startingTextCorePath)\r\n\r\n # Function that handles behavior for loading the user's right image\r\n def loadDataRight(self):\r\n filePath, _ = QFileDialog.getOpenFileName(self, caption='Open Ending Image File ...', filter=\"Images (*.png *.jpg *.jpeg)\")\r\n if not filePath:\r\n return\r\n\r\n self.notificationLine.setText(\" Right image loaded.\")\r\n self.triangleUpdatePref = int(self.triangleBox.isChecked())\r\n self.fullBlendComplete = False\r\n self.alphaValue.setEnabled(0)\r\n self.alphaSlider.setEnabled(0)\r\n self.autoCornerButton.setEnabled(0)\r\n self.blendButton.setEnabled(0)\r\n self.triangleBox.setChecked(0)\r\n self.triangleBox.setEnabled(0)\r\n self.displayTriangles()\r\n\r\n self.endingImage.setPixmap(QtGui.QPixmap(filePath))\r\n self.endingImage.setScaledContents(1)\r\n self.rightSize = (imageio.imread(filePath).shape[1], imageio.imread(filePath).shape[0])\r\n self.imageScalar = (self.rightSize[0] / (self.endingImage.geometry().topRight().x() - self.endingImage.geometry().topLeft().x()), self.rightSize[1] / (self.endingImage.geometry().bottomRight().y() - self.endingImage.geometry().topLeft().y()))\r\n\r\n self.endingImagePath = filePath\r\n\r\n # Obtain file's name and extension\r\n self.endingImageName = re.search('(?<=[/])[^/]+(?=[.])', filePath).group() # Example: C:/Desktop/TestImage.jpg => TestImage\r\n self.endingImageType = os.path.splitext(self.endingImagePath)[1] # Example: C:/Desktop/TestImage.jpg => .jpg\r\n\r\n # Create local path to check for the image's text file\r\n # Example: C:/Desktop/TestImage.jpg => C:/Desktop/TestImage-jpg.txt\r\n self.endingTextPath = filePath[:-(len(self.endingImageName + self.endingImageType))] + self.endingImageName + '-' + self.endingImageType[1:] + '.txt'\r\n\r\n # Now assign file's name to desired path for information storage (appending .txt at the end)\r\n # self.endingTextCorePath = 'C:/Users/USER/PycharmProjects/Personal/Morphing/Images_Points/' + 'TestImage' + '-' + 'jpg' + '.txt'\r\n self.endingTextCorePath = os.path.join(ROOT_DIR, 'Images_Points\\\\' + self.endingImageName + '-' + self.endingImageType[1:] + '.txt')\r\n\r\n self.checkFiles('loadDataRight', self.endingTextPath, self.endingTextCorePath)\r\n\r\n # Helper function for loadDataLeft and loadDataRight to reduce duplication of code\r\n # Handles text file generation, loads data, and sets flags where appropriate\r\n def checkFiles(self, sourceFunc, basePath, rootPath):\r\n # If there is already a text file at the location of the selected image, the program assumes that it is what\r\n # the user intends to start with and moves it to the root path for future manipulation.\r\n # Otherwise, the program creates an empty file at the root path instead.\r\n if os.path.isfile(basePath):\r\n try:\r\n shutil.copy(basePath, rootPath)\r\n os.remove(basePath)\r\n except shutil.SameFileError:\r\n pass\r\n else:\r\n if not os.path.exists(os.path.dirname(rootPath)): # if Images_Points doesn't exist, create it\r\n os.makedirs(os.path.dirname(rootPath))\r\n open(rootPath, 'a').close()\r\n\r\n self.added_left_points.clear()\r\n self.added_right_points.clear()\r\n self.confirmed_left_points.clear()\r\n self.confirmed_right_points.clear()\r\n\r\n with open(rootPath, \"r\") as textFile:\r\n if sourceFunc == 'loadDataLeft':\r\n self.chosen_left_points.clear()\r\n for x in textFile:\r\n self.chosen_left_points.append(QtCore.QPoint(int(float(x.split()[0])), int(float(x.split()[1]))))\r\n elif sourceFunc == 'loadDataRight':\r\n self.chosen_right_points.clear()\r\n for x in textFile:\r\n self.chosen_right_points.append(QtCore.QPoint(int(float(x.split()[0])), int(float(x.split()[1]))))\r\n\r\n if self.startingImage.hasScaledContents() and self.endingImage.hasScaledContents():\r\n self.resizeLeftButton.setEnabled(1)\r\n self.resizeRightButton.setEnabled(1)\r\n if self.chosen_left_points != [] and self.chosen_right_points != []:\r\n self.resetPointsButton.setEnabled(1)\r\n\r\n # Check that the two images are the same size - if they aren't, the user should be notified that this won't work\r\n if self.leftSize == self.rightSize:\r\n self.resizeLeftButton.setStyleSheet(\"\")\r\n self.resizeRightButton.setStyleSheet(\"\")\r\n self.alphaValue.setEnabled(1)\r\n self.alphaSlider.setEnabled(1)\r\n self.autoCornerButton.setEnabled(1)\r\n # Check if 3 or more points exist for two corresponding images so that triangles may be displayed\r\n if (len(self.chosen_left_points) + len(self.confirmed_left_points)) == (len(self.chosen_right_points) + len(self.confirmed_right_points)) >= 3:\r\n self.blendButton.setEnabled(1)\r\n self.triangleBox.setEnabled(1)\r\n self.triangleBox.setChecked(self.triangleUpdatePref)\r\n\r\n else:\r\n self.resizeLeftButton.setStyleSheet(\"font: bold;\")\r\n self.resizeRightButton.setStyleSheet(\"font: bold;\")\r\n if sourceFunc == 'loadDataLeft':\r\n self.notificationLine.setText(\" Left image loaded - WARNING: Input images must be the same size!\")\r\n elif sourceFunc == 'loadDataRight':\r\n self.notificationLine.setText(\" Right image loaded - WARNING: Input images must be the same size!\")\r\n self.displayTriangles()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n currentApp = QApplication(sys.argv)\r\n currentForm = MorphingApp()\r\n\r\n currentForm.show()\r\n currentApp.exec_()\r\n" } ]
2
Ankits-lab/frameworks_base
https://github.com/Ankits-lab/frameworks_base
8a63f39a79965c87a84e80550926327dcafb40b7
150a9240e5a11cd5ebc9bb0832ce30e9c23f376a
83e81c25b1f74f88ed0f723afc5d3f83e7d05da8
refs/heads/main
2023-02-06T03:57:44.893590
2020-11-14T09:13:40
2020-11-14T09:13:40
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6500187516212463, "alphanum_fraction": 0.6601577401161194, "avg_line_length": 34.98611068725586, "blob_id": "974d28e79db51ea7ad9fb753820cd8c965636d72", "content_id": "978873ba68ffc1812555a5f7d6e3a9c7743f4786", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2663, "license_type": "permissive", "max_line_length": 96, "num_lines": 72, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/AverageFilter.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n// Takes sharpness scores in RT and averages them over time\r\n\r\npackage androidx.media.filterfw.samples.simplecamera;\r\n\r\nimport android.util.Log;\r\nimport androidx.media.filterfw.Filter;\r\nimport androidx.media.filterfw.FrameType;\r\nimport androidx.media.filterfw.FrameValue;\r\nimport androidx.media.filterfw.MffContext;\r\nimport androidx.media.filterfw.OutputPort;\r\nimport androidx.media.filterfw.Signature;\r\n\r\npublic class AverageFilter extends Filter {\r\n\r\n private static final String TAG = \"AverageFilter\";\r\n private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);\r\n\r\n private static final int NUM_FRAMES = 5;\r\n private int counter = 0;\r\n private float[] temp = new float[NUM_FRAMES];\r\n\r\n /**\r\n * @param context\r\n * @param name\r\n */\r\n public AverageFilter(MffContext context, String name) {\r\n super(context, name);\r\n }\r\n\r\n @Override\r\n public Signature getSignature() {\r\n FrameType floatT = FrameType.single(float.class);\r\n return new Signature()\r\n .addInputPort(\"sharpness\", Signature.PORT_REQUIRED, floatT)\r\n .addOutputPort(\"avg\", Signature.PORT_REQUIRED, floatT)\r\n .disallowOtherPorts();\r\n }\r\n\r\n @Override\r\n protected void onProcess() {\r\n FrameValue inFrameValue = getConnectedInputPort(\"sharpness\").pullFrame().asFrameValue();\r\n if (counter < NUM_FRAMES && counter >= 0) {\r\n temp[counter] = ((Float)inFrameValue.getValue()).floatValue();\r\n }\r\n\r\n counter = (counter + 1) % NUM_FRAMES;\r\n\r\n float output = (temp[0] + temp[1] + temp[2] + temp[3] + temp[4]) / NUM_FRAMES;\r\n if (mLogVerbose) Log.v(TAG, \"Avg= \" + output + \"temp1= \" + temp[0] + \"temp2= \" +\r\n temp[1] + \"temp3= \" + temp[2] + \"temp4=\" + temp[3] + \"temp5=\" + temp[4]);\r\n\r\n OutputPort outPort = getConnectedOutputPort(\"avg\");\r\n FrameValue outFrame = outPort.fetchAvailableFrame(null).asFrameValue();\r\n outFrame.setValue(output);\r\n outPort.pushFrame(outFrame);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5702678561210632, "alphanum_fraction": 0.5738790035247803, "avg_line_length": 37.55952453613281, "blob_id": "e5847271f1d6d71962962a503ecbe57e0fc0a492", "content_id": "714298cee306e44231a897801606cfe944384204", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6646, "license_type": "permissive", "max_line_length": 107, "num_lines": 168, "path": "/tests/TransitionTests/src/com/android/transitiontests/ListViewAddRemove.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.transitiontests;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\nimport android.view.ViewTreeObserver;\r\nimport android.transition.Fade;\r\nimport android.transition.Scene;\r\nimport android.widget.AdapterView;\r\nimport android.widget.ArrayAdapter;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.ListView;\r\nimport android.widget.TextView;\r\nimport android.transition.AutoTransition;\r\nimport android.transition.ChangeBounds;\r\nimport android.transition.Transition;\r\nimport android.transition.TransitionListenerAdapter;\r\nimport android.transition.TransitionManager;\r\nimport android.transition.TransitionSet;\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.HashMap;\r\nimport java.util.List;\r\n\r\npublic class ListViewAddRemove extends Activity {\r\n\r\n final ArrayList<String> numList = new ArrayList<String>();\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.list_view_add_remove);\r\n\r\n final LinearLayout container = findViewById(R.id.container);\r\n\r\n final ListView listview = findViewById(R.id.listview);\r\n for (int i = 0; i < 200; ++i) {\r\n numList.add(Integer.toString(i));\r\n }\r\n final StableArrayAdapter adapter = new StableArrayAdapter(this,\r\n android.R.layout.simple_list_item_1, numList);\r\n listview.setAdapter(adapter);\r\n\r\n final ViewTreeObserver observer = container.getViewTreeObserver();\r\n observer.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {\r\n public void onGlobalLayout() {\r\n System.out.println(\"-------------------------------------\");\r\n System.out.println(\"onLayoutListener: listview view tops: \");\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView view = (TextView) listview.getChildAt(i);\r\n System.out.println(\" \" + view.getText() + \": \" + view.getTop());\r\n }\r\n }\r\n });\r\n\r\n final Scene mySceneChanger = new Scene(listview);\r\n\r\n mySceneChanger.setEnterAction(new Runnable() {\r\n @Override\r\n public void run() {\r\n numList.remove(mItemToDelete);\r\n adapter.notifyDataSetChanged();\r\n }\r\n });\r\n final Transition myTransition = new AutoTransition();\r\n final TransitionSet noFadeIn = new TransitionSet().\r\n setOrdering(TransitionSet.ORDERING_SEQUENTIAL);\r\n Fade fadeIn = new Fade(Fade.IN);\r\n fadeIn.setDuration(50);\r\n noFadeIn.addTransition(new Fade(Fade.OUT)).addTransition(new ChangeBounds()).addTransition(fadeIn);\r\n\r\n myTransition.addListener(new TransitionListenerAdapter() {\r\n @Override\r\n public void onTransitionStart(Transition transition) {\r\n System.out.println(\"---------ListView Tops: Before--------\");\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView view = (TextView) listview.getChildAt(i);\r\n int position = listview.getPositionForView(view);\r\n }\r\n }\r\n\r\n @Override\r\n public void onTransitionEnd(Transition transition) {\r\n System.out.println(\"---------ListView Tops: After--------\");\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView view = (TextView) listview.getChildAt(i);\r\n int position = listview.getPositionForView(view);\r\n if (view.hasTransientState()) {\r\n// view.setHasTransientState(false);\r\n }\r\n }\r\n myTransition.removeListener(this);\r\n }\r\n });\r\n\r\n listview.setOnItemClickListener(new AdapterView.OnItemClickListener() {\r\n\r\n @Override\r\n public void onItemClick(AdapterView<?> parent, final View view, int position, long id) {\r\n System.out.println(\"---------ListView Tops: OnClick--------\");\r\n String item = (String) parent.getItemAtPosition(position);\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView v = (TextView) listview.getChildAt(i);\r\n if (!item.equals(v.getText())) {\r\n// v.setHasTransientState(true);\r\n }\r\n }\r\n// listview.setHasTransientState(true);\r\n mItemToDelete = item;\r\n// numList.remove(item);\r\n TransitionManager.go(mySceneChanger, noFadeIn);\r\n// view.postDelayed(new Runnable() {\r\n// @Override\r\n// public void run() {\r\n// for (int i = 0; i < listview.getChildCount(); ++i) {\r\n// TextView v = (TextView) listview.getChildAt(i);\r\n// v.setHasTransientState(false);\r\n// }\r\n// }\r\n// }, 200);\r\n }\r\n\r\n });\r\n }\r\n\r\n String mItemToDelete = null;\r\n\r\n private class StableArrayAdapter extends ArrayAdapter<String> {\r\n\r\n HashMap<String, Integer> mIdMap = new HashMap<String, Integer>();\r\n\r\n public StableArrayAdapter(Context context, int textViewResourceId,\r\n List<String> objects) {\r\n super(context, textViewResourceId, objects);\r\n for (int i = 0; i < objects.size(); ++i) {\r\n mIdMap.put(objects.get(i), i);\r\n }\r\n }\r\n\r\n @Override\r\n public long getItemId(int position) {\r\n String item = getItem(position);\r\n return mIdMap.get(item);\r\n }\r\n\r\n @Override\r\n public boolean hasStableIds() {\r\n return true;\r\n }\r\n\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7681159377098083, "alphanum_fraction": 0.7681159377098083, "avg_line_length": 44, "blob_id": "6a51804eff1ea16241200a55328e96be0b7b97c4", "content_id": "860cfa0cef34a1f52cd15eb7187e22c04c29c3b5", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 138, "license_type": "permissive", "max_line_length": 70, "num_lines": 3, "path": "/cmds/content/content", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\nexport CLASSPATH=/system/framework/content.jar\r\nexec app_process /system/bin com.android.commands.content.Content \"$@\"\r\n" }, { "alpha_fraction": 0.7460629940032959, "alphanum_fraction": 0.7578740119934082, "avg_line_length": 33.44186019897461, "blob_id": "273818447222800de4bdec1d15effdded4b1a587", "content_id": "9bb18c5b5b1de77e3f6ea0ba2c879965e610f405", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1524, "license_type": "permissive", "max_line_length": 81, "num_lines": 43, "path": "/native/android/native_activity.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#define LOG_TAG \"native_activity\"\r\n#include <utils/Log.h>\r\n\r\n#include <android_runtime/android_app_NativeActivity.h>\r\n\r\nusing namespace android;\r\n\r\nvoid ANativeActivity_finish(ANativeActivity* activity) {\r\n android_NativeActivity_finish(activity);\r\n}\r\n\r\nvoid ANativeActivity_setWindowFormat(ANativeActivity* activity, int32_t format) {\r\n\tandroid_NativeActivity_setWindowFormat(activity, format);\r\n}\r\n\r\nvoid ANativeActivity_setWindowFlags(ANativeActivity* activity,\r\n\t\tuint32_t addFlags, uint32_t removeFlags) {\r\n\tandroid_NativeActivity_setWindowFlags(activity, addFlags, addFlags|removeFlags);\r\n}\r\n\r\nvoid ANativeActivity_showSoftInput(ANativeActivity* activity, uint32_t flags) {\r\n\tandroid_NativeActivity_showSoftInput(activity, flags);\r\n}\r\n\r\nvoid ANativeActivity_hideSoftInput(ANativeActivity* activity, uint32_t flags) {\r\n\tandroid_NativeActivity_hideSoftInput(activity, flags);\r\n}\r\n" }, { "alpha_fraction": 0.662120521068573, "alphanum_fraction": 0.665640115737915, "avg_line_length": 34.66128921508789, "blob_id": "d424e44baefb6a5b04a4a0d643030026dad692c9", "content_id": "1823e7faf696a6c69270d81e9c84cd3d74f59582", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4546, "license_type": "permissive", "max_line_length": 94, "num_lines": 124, "path": "/core/java/android/speech/tts/AbstractEventLogger.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\r\n * use this file except in compliance with the License. You may obtain a copy of\r\n * the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\r\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r\n * License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage android.speech.tts;\r\n\r\nimport android.os.SystemClock;\r\n\r\n/**\r\n * Base class for storing data about a given speech synthesis request to the\r\n * event logs. The data that is logged depends on actual implementation. Note\r\n * that {@link AbstractEventLogger#onAudioDataWritten()} and\r\n * {@link AbstractEventLogger#onEngineComplete()} must be called from a single\r\n * thread (usually the audio playback thread}.\r\n */\r\nabstract class AbstractEventLogger {\r\n protected final String mServiceApp;\r\n protected final int mCallerUid;\r\n protected final int mCallerPid;\r\n protected final long mReceivedTime;\r\n protected long mPlaybackStartTime = -1;\r\n\r\n private volatile long mRequestProcessingStartTime = -1;\r\n private volatile long mEngineStartTime = -1;\r\n private volatile long mEngineCompleteTime = -1;\r\n\r\n private boolean mLogWritten = false;\r\n\r\n AbstractEventLogger(int callerUid, int callerPid, String serviceApp) {\r\n mCallerUid = callerUid;\r\n mCallerPid = callerPid;\r\n mServiceApp = serviceApp;\r\n mReceivedTime = SystemClock.elapsedRealtime();\r\n }\r\n\r\n /**\r\n * Notifies the logger that this request has been selected from\r\n * the processing queue for processing. Engine latency / total time\r\n * is measured from this baseline.\r\n */\r\n public void onRequestProcessingStart() {\r\n mRequestProcessingStartTime = SystemClock.elapsedRealtime();\r\n }\r\n\r\n /**\r\n * Notifies the logger that a chunk of data has been received from\r\n * the engine. Might be called multiple times.\r\n */\r\n public void onEngineDataReceived() {\r\n if (mEngineStartTime == -1) {\r\n mEngineStartTime = SystemClock.elapsedRealtime();\r\n }\r\n }\r\n\r\n /**\r\n * Notifies the logger that the engine has finished processing data.\r\n * Will be called exactly once.\r\n */\r\n public void onEngineComplete() {\r\n mEngineCompleteTime = SystemClock.elapsedRealtime();\r\n }\r\n\r\n /**\r\n * Notifies the logger that audio playback has started for some section\r\n * of the synthesis. This is normally some amount of time after the engine\r\n * has synthesized data and varies depending on utterances and\r\n * other audio currently in the queue.\r\n */\r\n public void onAudioDataWritten() {\r\n // For now, keep track of only the first chunk of audio\r\n // that was played.\r\n if (mPlaybackStartTime == -1) {\r\n mPlaybackStartTime = SystemClock.elapsedRealtime();\r\n }\r\n }\r\n\r\n /**\r\n * Notifies the logger that the current synthesis has completed.\r\n * All available data is not logged.\r\n */\r\n public void onCompleted(int statusCode) {\r\n if (mLogWritten) {\r\n return;\r\n } else {\r\n mLogWritten = true;\r\n }\r\n\r\n long completionTime = SystemClock.elapsedRealtime();\r\n\r\n // We don't report latency for stopped syntheses because their overall\r\n // total time spent will be inaccurate (will not correlate with\r\n // the length of the utterance).\r\n\r\n // onAudioDataWritten() should normally always be called, and hence mPlaybackStartTime\r\n // should be set, if an error does not occur.\r\n if (statusCode != TextToSpeech.SUCCESS\r\n || mPlaybackStartTime == -1 || mEngineCompleteTime == -1) {\r\n logFailure(statusCode);\r\n return;\r\n }\r\n\r\n final long audioLatency = mPlaybackStartTime - mReceivedTime;\r\n final long engineLatency = mEngineStartTime - mRequestProcessingStartTime;\r\n final long engineTotal = mEngineCompleteTime - mRequestProcessingStartTime;\r\n logSuccess(audioLatency, engineLatency, engineTotal);\r\n }\r\n\r\n protected abstract void logFailure(int statusCode);\r\n protected abstract void logSuccess(long audioLatency, long engineLatency,\r\n long engineTotal);\r\n\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6614969372749329, "alphanum_fraction": 0.6682400703430176, "avg_line_length": 34.17073059082031, "blob_id": "87fd49753766c8c3cb32f183ff00b7e2456906fb", "content_id": "05a45084d74a9c4f970cf33be387a19ebbe2fa7e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1483, "license_type": "permissive", "max_line_length": 99, "num_lines": 41, "path": "/core/java/android/service/autofill/AutofillServiceHelper.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2018 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.service.autofill;\r\n\r\nimport android.annotation.Nullable;\r\nimport android.view.autofill.AutofillId;\r\n\r\nimport com.android.internal.util.Preconditions;\r\n\r\n/** @hide */\r\nfinal class AutofillServiceHelper {\r\n\r\n static AutofillId[] assertValid(@Nullable AutofillId[] ids) {\r\n Preconditions.checkArgument(ids != null && ids.length > 0, \"must have at least one id\");\r\n // Can't use Preconditions.checkArrayElementsNotNull() because it throws NPE instead of IAE\r\n for (int i = 0; i < ids.length; ++i) {\r\n if (ids[i] == null) {\r\n throw new IllegalArgumentException(\"ids[\" + i + \"] must not be null\");\r\n }\r\n }\r\n return ids;\r\n }\r\n\r\n private AutofillServiceHelper() {\r\n throw new UnsupportedOperationException(\"contains static members only\");\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7767210006713867, "alphanum_fraction": 0.7792119383811951, "avg_line_length": 45.505374908447266, "blob_id": "a9f93206b05821b57af750caa33d796ebffa7c4a", "content_id": "8d729a77529890737083e205259e93f81f15a7ef", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4416, "license_type": "permissive", "max_line_length": 99, "num_lines": 93, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/phone/StatusBarRemoteInputCallbackTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2018 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.statusbar.phone;\r\n\r\nimport static android.content.Intent.ACTION_DEVICE_LOCKED_CHANGED;\r\n\r\nimport static org.mockito.ArgumentMatchers.anyInt;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.spy;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\nimport static org.mockito.internal.verification.VerificationModeFactory.times;\r\n\r\nimport android.content.Intent;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.plugins.ActivityStarter;\r\nimport com.android.systemui.statusbar.ActionClickLogger;\r\nimport com.android.systemui.statusbar.CommandQueue;\r\nimport com.android.systemui.statusbar.NotificationLockscreenUserManager;\r\nimport com.android.systemui.statusbar.SysuiStatusBarStateController;\r\nimport com.android.systemui.statusbar.notification.NotificationEntryManager;\r\nimport com.android.systemui.statusbar.policy.DeviceProvisionedController;\r\nimport com.android.systemui.statusbar.policy.KeyguardStateController;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\[email protected]\r\npublic class StatusBarRemoteInputCallbackTest extends SysuiTestCase {\r\n @Mock private NotificationEntryManager mEntryManager;\r\n @Mock private DeviceProvisionedController mDeviceProvisionedController;\r\n @Mock private ShadeController mShadeController;\r\n @Mock private NotificationLockscreenUserManager mNotificationLockscreenUserManager;\r\n @Mock private KeyguardStateController mKeyguardStateController;\r\n @Mock private SysuiStatusBarStateController mStatusBarStateController;\r\n @Mock private StatusBarKeyguardViewManager mStatusBarKeyguardViewManager;\r\n @Mock private ActivityStarter mActivityStarter;\r\n\r\n private int mCurrentUserId = 0;\r\n private StatusBarRemoteInputCallback mRemoteInputCallback;\r\n\r\n @Before\r\n public void setUp() {\r\n MockitoAnnotations.initMocks(this);\r\n mDependency.injectTestDependency(NotificationEntryManager.class, mEntryManager);\r\n mDependency.injectTestDependency(DeviceProvisionedController.class,\r\n mDeviceProvisionedController);\r\n mDependency.injectTestDependency(ShadeController.class, mShadeController);\r\n mDependency.injectTestDependency(NotificationLockscreenUserManager.class,\r\n mNotificationLockscreenUserManager);\r\n\r\n mRemoteInputCallback = spy(new StatusBarRemoteInputCallback(mContext,\r\n mock(NotificationGroupManager.class), mNotificationLockscreenUserManager,\r\n mKeyguardStateController, mStatusBarStateController, mStatusBarKeyguardViewManager,\r\n mActivityStarter, mShadeController, new CommandQueue(mContext),\r\n mock(ActionClickLogger.class)));\r\n mRemoteInputCallback.mChallengeReceiver = mRemoteInputCallback.new ChallengeReceiver();\r\n }\r\n\r\n @Test\r\n public void testActionDeviceLockedChangedWithDifferentUserIdCallsOnWorkChallengeChanged() {\r\n when(mNotificationLockscreenUserManager.getCurrentUserId()).thenReturn(mCurrentUserId);\r\n when(mNotificationLockscreenUserManager.isCurrentProfile(anyInt())).thenReturn(true);\r\n Intent intent = new Intent()\r\n .setAction(ACTION_DEVICE_LOCKED_CHANGED)\r\n .putExtra(Intent.EXTRA_USER_HANDLE, mCurrentUserId + 1);\r\n mRemoteInputCallback.mChallengeReceiver.onReceive(mContext, intent);\r\n verify(mRemoteInputCallback, times(1)).onWorkChallengeChanged();\r\n }\r\n\r\n}" }, { "alpha_fraction": 0.7055888175964355, "alphanum_fraction": 0.7130738496780396, "avg_line_length": 29.3125, "blob_id": "9f41616efbd85611d53e674dda375a2c6004093f", "content_id": "caf7ff96cddaf693d60011b5a3892ae0ef339254", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2004, "license_type": "permissive", "max_line_length": 87, "num_lines": 64, "path": "/tools/split-select/SplitDescription.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef H_ANDROID_SPLIT_SPLIT_DESCRIPTION\r\n#define H_ANDROID_SPLIT_SPLIT_DESCRIPTION\r\n\r\n#include \"aapt/ConfigDescription.h\"\r\n#include \"Abi.h\"\r\n\r\n#include <utils/String8.h>\r\n#include <utils/Vector.h>\r\n\r\nnamespace split {\r\n\r\nstruct SplitDescription {\r\n SplitDescription();\r\n\r\n ConfigDescription config;\r\n abi::Variant abi;\r\n\r\n int compare(const SplitDescription& rhs) const;\r\n inline bool operator<(const SplitDescription& rhs) const;\r\n inline bool operator==(const SplitDescription& rhs) const;\r\n inline bool operator!=(const SplitDescription& rhs) const;\r\n\r\n bool match(const SplitDescription& o) const;\r\n bool isBetterThan(const SplitDescription& o, const SplitDescription& target) const;\r\n\r\n android::String8 toString() const;\r\n\r\n static bool parse(const android::String8& str, SplitDescription* outSplit);\r\n};\r\n\r\nssize_t parseAbi(const android::Vector<android::String8>& parts, const ssize_t index,\r\n SplitDescription* outSplit);\r\n\r\nbool SplitDescription::operator<(const SplitDescription& rhs) const {\r\n return compare(rhs) < 0;\r\n}\r\n\r\nbool SplitDescription::operator==(const SplitDescription& rhs) const {\r\n return compare(rhs) == 0;\r\n}\r\n\r\nbool SplitDescription::operator!=(const SplitDescription& rhs) const {\r\n return compare(rhs) != 0;\r\n}\r\n\r\n} // namespace split\r\n\r\n#endif // H_ANDROID_SPLIT_SPLIT_DESCRIPTION\r\n" }, { "alpha_fraction": 0.5914396643638611, "alphanum_fraction": 0.6147859692573547, "avg_line_length": 26.55555534362793, "blob_id": "ea4518c52238d69252cc0db0b7eab24b6c13966f", "content_id": "3f671dddf01ac71acabe80fccde41cb40e3c4be7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2827, "license_type": "permissive", "max_line_length": 115, "num_lines": 99, "path": "/tools/split-select/Abi.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"Abi.h\"\r\n\r\nusing namespace android;\r\n\r\nnamespace split {\r\nnamespace abi {\r\n\r\nstatic Vector<Variant> buildVariants(Variant v1, Variant v2) {\r\n Vector<Variant> v;\r\n v.add(v1);\r\n v.add(v2);\r\n return v;\r\n}\r\n\r\nstatic Vector<Variant> buildVariants(Variant v1, Variant v2, Variant v3) {\r\n Vector<Variant> v;\r\n v.add(v1);\r\n v.add(v2);\r\n v.add(v3);\r\n return v;\r\n}\r\n\r\nstatic const Vector<Variant> sNoneVariants;\r\nstatic const Vector<Variant> sArmVariants = buildVariants(Variant_armeabi, Variant_armeabi_v7a, Variant_arm64_v8a);\r\nstatic const Vector<Variant> sIntelVariants = buildVariants(Variant_x86, Variant_x86_64);\r\nstatic const Vector<Variant> sMipsVariants = buildVariants(Variant_mips, Variant_mips64);\r\n\r\nFamily getFamily(Variant variant) {\r\n switch (variant) {\r\n case Variant_none:\r\n return Family_none;\r\n case Variant_armeabi:\r\n case Variant_armeabi_v7a:\r\n case Variant_arm64_v8a:\r\n return Family_arm;\r\n case Variant_x86:\r\n case Variant_x86_64:\r\n return Family_intel;\r\n case Variant_mips:\r\n case Variant_mips64:\r\n return Family_mips;\r\n }\r\n return Family_none;\r\n}\r\n\r\nconst Vector<Variant>& getVariants(Family family) {\r\n switch (family) {\r\n case Family_none:\r\n return sNoneVariants;\r\n case Family_arm:\r\n return sArmVariants;\r\n case Family_intel:\r\n return sIntelVariants;\r\n case Family_mips:\r\n return sMipsVariants;\r\n }\r\n return sNoneVariants;\r\n}\r\n\r\nconst char* toString(Variant variant) {\r\n switch (variant) {\r\n case Variant_none:\r\n return \"\";\r\n case Variant_armeabi:\r\n return \"armeabi\";\r\n case Variant_armeabi_v7a:\r\n return \"armeabi-v7a\";\r\n case Variant_arm64_v8a:\r\n return \"arm64-v8a\";\r\n case Variant_x86:\r\n return \"x86\";\r\n case Variant_x86_64:\r\n return \"x86_64\";\r\n case Variant_mips:\r\n return \"mips\";\r\n case Variant_mips64:\r\n return \"mips64\";\r\n }\r\n return \"\";\r\n}\r\n\r\n} // namespace abi\r\n} // namespace split\r\n" }, { "alpha_fraction": 0.6999140381813049, "alphanum_fraction": 0.7067927718162537, "avg_line_length": 28.605262756347656, "blob_id": "c5b820b9dab0094a165623cb923979550fecb78e", "content_id": "22146c90178093d1c5f3dd469975b5f389122d93", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1163, "license_type": "permissive", "max_line_length": 75, "num_lines": 38, "path": "/tests/MemoryUsage/src/com/android/tests/memoryusage/MemoryUsageInstrumentation.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.tests.memoryusage;\r\n\r\nimport android.os.Bundle;\r\nimport android.test.InstrumentationTestRunner;\r\n\r\n/**\r\n * InstrumentationTestRunner for use with the {@link MemoryUsageTest}.\r\n */\r\npublic class MemoryUsageInstrumentation extends InstrumentationTestRunner {\r\n\r\n private Bundle arguments;\r\n\r\n @Override\r\n public void onCreate(Bundle arguments) {\r\n this.arguments = arguments;\r\n super.onCreate(arguments);\r\n }\r\n\r\n public Bundle getBundle() {\r\n return arguments;\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6664565205574036, "alphanum_fraction": 0.6699243187904358, "avg_line_length": 37.650001525878906, "blob_id": "f7a9df36b5d5d39a10acf8d8376020d2af0bcf0b", "content_id": "dd8350ae3695552e636dba30138502f748ddbef0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 3172, "license_type": "permissive", "max_line_length": 98, "num_lines": 80, "path": "/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#\r\n# Copyright (C) 2008 The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n#\r\nLOCAL_PATH := $(call my-dir)\r\n\r\n# Load framework-specific path mappings used later in the build.\r\ninclude $(LOCAL_PATH)/pathmap.mk\r\n\r\n# Build the master framework library.\r\n# The framework contains too many method references (>64K) for poor old DEX.\r\n# So we first build the framework as a monolithic static library then split it\r\n# up into smaller pieces.\r\n# ============================================================\r\n\r\n# embedded builds use nothing in frameworks/base\r\nifneq ($(ANDROID_BUILD_EMBEDDED),true)\r\n\r\n# Copy AIDL files to be preprocessed and included in the SDK,\r\n# specified relative to the root of the build tree.\r\n# ============================================================\r\ninclude $(CLEAR_VARS)\r\n\r\n# This is used by ide.mk as the list of source files that are\r\n# always included.\r\nINTERNAL_SDK_SOURCE_DIRS := $(addprefix $(LOCAL_PATH)/,$(dirs_to_document))\r\n\r\n# sdk.atree needs to copy the whole dir: $(OUT_DOCS)/offline-sdk to the final zip.\r\n# So keep offline-sdk-timestamp target here, and unzip offline-sdk-docs.zip to\r\n# $(OUT_DOCS)/offline-sdk.\r\n$(OUT_DOCS)/offline-sdk-timestamp: $(OUT_DOCS)/offline-sdk-docs-docs.zip\r\n\t$(hide) rm -rf $(OUT_DOCS)/offline-sdk\r\n\t$(hide) mkdir -p $(OUT_DOCS)/offline-sdk\r\n\t( unzip -qo $< -d $(OUT_DOCS)/offline-sdk && touch -f $@ ) || exit 1\r\n\r\n.PHONY: docs offline-sdk-docs\r\ndocs offline-sdk-docs: $(OUT_DOCS)/offline-sdk-timestamp\r\n\r\nSDK_METADATA_DIR :=$= $(call intermediates-dir-for,PACKAGING,framework-doc-stubs-metadata,,COMMON)\r\nSDK_METADATA_FILES :=$= $(addprefix $(SDK_METADATA_DIR)/,\\\r\n activity_actions.txt \\\r\n broadcast_actions.txt \\\r\n categories.txt \\\r\n features.txt \\\r\n service_actions.txt \\\r\n widgets.txt)\r\nSDK_METADATA :=$= $(firstword $(SDK_METADATA_FILES))\r\n$(SDK_METADATA): .KATI_IMPLICIT_OUTPUTS := $(filter-out $(SDK_METADATA),$(SDK_METADATA_FILES))\r\n$(SDK_METADATA): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/framework-doc-stubs-metadata.zip\r\n\trm -rf $(SDK_METADATA_DIR)\r\n\tmkdir -p $(SDK_METADATA_DIR)\r\n\tunzip -qo $< -d $(SDK_METADATA_DIR)\r\n\r\n.PHONY: framework-doc-stubs\r\nframework-doc-stubs: $(SDK_METADATA)\r\n\r\n# Run this for checkbuild\r\ncheckbuild: doc-comment-check-docs\r\n\r\n# Include subdirectory makefiles\r\n# ============================================================\r\n\r\n# If we're building with ONE_SHOT_MAKEFILE (mm, mmm), then what the framework\r\n# team really wants is to build the stuff defined by this makefile.\r\nifeq (,$(ONE_SHOT_MAKEFILE))\r\ninclude $(call first-makefiles-under,$(LOCAL_PATH))\r\nendif\r\n\r\nendif # ANDROID_BUILD_EMBEDDED\r\n" }, { "alpha_fraction": 0.5431249737739563, "alphanum_fraction": 0.5877083539962769, "avg_line_length": 32.28571319580078, "blob_id": "b91f6967eec13d16f381e999275f3f4a92e0c3d4", "content_id": "0e8159c521ce578cb9f4d504efc2f7ff2a4687ae", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4800, "license_type": "permissive", "max_line_length": 90, "num_lines": 140, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/ScaledTextActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.animation.ObjectAnimator;\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.graphics.Path;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class ScaledTextActivity extends Activity {\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n final ScaledTextView view = new ScaledTextView(this);\r\n setContentView(view);\r\n\r\n ObjectAnimator animation = ObjectAnimator.ofFloat(view, \"textScale\", 1.0f, 10.0f);\r\n animation.setDuration(3000);\r\n animation.setRepeatCount(ObjectAnimator.INFINITE);\r\n animation.setRepeatMode(ObjectAnimator.REVERSE);\r\n animation.start();\r\n\r\n }\r\n\r\n public static class ScaledTextView extends View {\r\n private static final String TEXT = \"Hello libhwui! \";\r\n\r\n private final Paint mPaint;\r\n private final Paint mShadowPaint;\r\n private final Path mPath;\r\n\r\n private float mScale = 1.0f;\r\n\r\n public ScaledTextView(Context c) {\r\n super(c);\r\n setLayerType(LAYER_TYPE_HARDWARE, null);\r\n\r\n mPath = makePath();\r\n\r\n mPaint = new Paint();\r\n mPaint.setAntiAlias(true);\r\n mPaint.setTextSize(20.0f);\r\n\r\n mShadowPaint = new Paint();\r\n mShadowPaint.setAntiAlias(true);\r\n mShadowPaint.setShadowLayer(3.0f, 0.0f, 3.0f, 0xff000000);\r\n mShadowPaint.setTextSize(20.0f);\r\n }\r\n\r\n public float getTextScale() {\r\n return mScale;\r\n }\r\n\r\n public void setTextScale(float scale) {\r\n mScale = scale;\r\n invalidate();\r\n }\r\n\r\n private static Path makePath() {\r\n Path path = new Path();\r\n buildPath(path);\r\n return path;\r\n }\r\n\r\n private static void buildPath(Path path) {\r\n path.moveTo(0.0f, 0.0f);\r\n path.cubicTo(0.0f, 0.0f, 100.0f, 150.0f, 100.0f, 200.0f);\r\n path.cubicTo(100.0f, 200.0f, 50.0f, 300.0f, -80.0f, 200.0f);\r\n path.cubicTo(-80.0f, 200.0f, 100.0f, 200.0f, 200.0f, 0.0f);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n canvas.drawText(TEXT, 30.0f, 30.0f, mPaint);\r\n mPaint.setTextAlign(Paint.Align.CENTER);\r\n canvas.drawText(TEXT, 30.0f, 50.0f, mPaint);\r\n mPaint.setTextAlign(Paint.Align.RIGHT);\r\n canvas.drawText(TEXT, 30.0f, 70.0f, mPaint);\r\n\r\n canvas.save();\r\n canvas.translate(400.0f, 0.0f);\r\n canvas.scale(3.0f, 3.0f);\r\n mPaint.setTextAlign(Paint.Align.LEFT);\r\n mPaint.setStrikeThruText(true);\r\n canvas.drawText(TEXT, 30.0f, 30.0f, mPaint);\r\n mPaint.setStrikeThruText(false);\r\n mPaint.setTextAlign(Paint.Align.CENTER);\r\n canvas.drawText(TEXT, 30.0f, 50.0f, mPaint);\r\n mPaint.setTextAlign(Paint.Align.RIGHT);\r\n canvas.drawText(TEXT, 30.0f, 70.0f, mPaint);\r\n canvas.restore();\r\n\r\n mPaint.setTextAlign(Paint.Align.LEFT);\r\n canvas.translate(0.0f, 100.0f);\r\n\r\n canvas.save();\r\n canvas.scale(mScale, mScale);\r\n canvas.drawText(TEXT, 30.0f, 30.0f, mPaint);\r\n canvas.restore();\r\n\r\n canvas.translate(0.0f, 250.0f);\r\n canvas.save();\r\n canvas.scale(3.0f, 3.0f);\r\n canvas.drawText(TEXT, 30.0f, 30.0f, mShadowPaint);\r\n canvas.translate(100.0f, 0.0f);\r\n// canvas.drawTextOnPath(TEXT + TEXT + TEXT, mPath, 0.0f, 0.0f, mPaint);\r\n canvas.restore();\r\n\r\n float width = mPaint.measureText(TEXT);\r\n\r\n canvas.translate(500.0f, 0.0f);\r\n canvas.rotate(45.0f, width * 3.0f / 2.0f, 0.0f);\r\n canvas.scale(3.0f, 3.0f);\r\n canvas.drawText(TEXT, 30.0f, 30.0f, mPaint);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6607393622398376, "alphanum_fraction": 0.6627671122550964, "avg_line_length": 41.31756591796875, "blob_id": "62bd77162b8ea27ad820377a71d6c74b9ac5814a", "content_id": "6421e6ac2b47d45ab262e6a6dce0b620328f5202", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6411, "license_type": "permissive", "max_line_length": 100, "num_lines": 148, "path": "/core/java/android/hardware/camera2/marshal/Marshaler.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage android.hardware.camera2.marshal;\r\n\r\nimport android.hardware.camera2.utils.TypeReference;\r\n\r\nimport java.nio.ByteBuffer;\r\n\r\nimport static android.hardware.camera2.marshal.MarshalHelpers.*;\r\nimport static com.android.internal.util.Preconditions.*;\r\n\r\n/**\r\n * Base class to marshal data to/from managed/native metadata byte buffers.\r\n *\r\n * <p>This class should not be created directly; an instance of it can be obtained\r\n * using {@link MarshalQueryable#createMarshaler} for the same type {@code T} if the native type\r\n * mapping for {@code T} {@link MarshalQueryable#isTypeMappingSupported supported}.</p>\r\n *\r\n * @param <T> the compile-time managed type\r\n */\r\npublic abstract class Marshaler<T> {\r\n\r\n protected final TypeReference<T> mTypeReference;\r\n protected final int mNativeType;\r\n\r\n /**\r\n * Instantiate a marshaler between a single managed/native type combination.\r\n *\r\n * <p>This particular managed/native type combination must be supported by\r\n * {@link #isTypeMappingSupported}.</p>\r\n *\r\n * @param query an instance of {@link MarshalQueryable}\r\n * @param typeReference the managed type reference\r\n * Must be one for which {@link #isTypeMappingSupported} returns {@code true}\r\n * @param nativeType the native type, e.g.\r\n * {@link android.hardware.camera2.impl.CameraMetadataNative#TYPE_BYTE TYPE_BYTE}.\r\n * Must be one for which {@link #isTypeMappingSupported} returns {@code true}.\r\n *\r\n * @throws NullPointerException if any args were {@code null}\r\n * @throws UnsupportedOperationException if the type mapping was not supported\r\n */\r\n protected Marshaler(\r\n MarshalQueryable<T> query, TypeReference<T> typeReference, int nativeType) {\r\n mTypeReference = checkNotNull(typeReference, \"typeReference must not be null\");\r\n mNativeType = checkNativeType(nativeType);\r\n\r\n if (!query.isTypeMappingSupported(typeReference, nativeType)) {\r\n throw new UnsupportedOperationException(\r\n \"Unsupported type marshaling for managed type \"\r\n + typeReference + \" and native type \"\r\n + MarshalHelpers.toStringNativeType(nativeType));\r\n }\r\n }\r\n\r\n /**\r\n * Marshal the specified object instance (value) into a byte buffer.\r\n *\r\n * <p>Upon completion, the {@link ByteBuffer#position()} will have advanced by\r\n * the {@link #calculateMarshalSize marshal size} of {@code value}.</p>\r\n *\r\n * @param value the value of type T that we wish to write into the byte buffer\r\n * @param buffer the byte buffer into which the marshaled object will be written\r\n */\r\n public abstract void marshal(T value, ByteBuffer buffer);\r\n\r\n /**\r\n * Get the size in bytes for how much space would be required to write this {@code value}\r\n * into a byte buffer using the given {@code nativeType}.\r\n *\r\n * <p>If the size of this {@code T} instance when serialized into a buffer is always constant,\r\n * then this method will always return the same value (and particularly, it will return\r\n * an equivalent value to {@link #getNativeSize()}.</p>\r\n *\r\n * <p>Overriding this method is a must when the size is {@link NATIVE_SIZE_DYNAMIC dynamic}.</p>\r\n *\r\n * @param value the value of type T that we wish to write into the byte buffer\r\n * @return the size that would need to be written to the byte buffer\r\n */\r\n public int calculateMarshalSize(T value) {\r\n int nativeSize = getNativeSize();\r\n\r\n if (nativeSize == NATIVE_SIZE_DYNAMIC) {\r\n throw new AssertionError(\"Override this function for dynamically-sized objects\");\r\n }\r\n\r\n return nativeSize;\r\n }\r\n\r\n /**\r\n * Unmarshal a new object instance from the byte buffer into its managed type.\r\n *\r\n * <p>Upon completion, the {@link ByteBuffer#position()} will have advanced by\r\n * the {@link #calculateMarshalSize marshal size} of the returned {@code T} instance.</p>\r\n *\r\n * @param buffer the byte buffer, from which we will read the object\r\n * @return a new instance of type T read from the byte buffer\r\n */\r\n public abstract T unmarshal(ByteBuffer buffer);\r\n\r\n /**\r\n * Used to denote variable-length data structures.\r\n *\r\n * <p>If the size is dynamic then we can't know ahead of time how big of a data structure\r\n * to preallocate for e.g. arrays, so one object must be unmarshaled at a time.</p>\r\n */\r\n public static int NATIVE_SIZE_DYNAMIC = -1;\r\n\r\n /**\r\n * How many bytes a single instance of {@code T} will take up if marshalled to/from\r\n * {@code nativeType}.\r\n *\r\n * <p>When unmarshaling data from native to managed, the instance {@code T} is not yet\r\n * available. If the native size is always a fixed mapping regardless of the instance of\r\n * {@code T} (e.g. if the type is not a container of some sort), it can be used to preallocate\r\n * containers for {@code T} to avoid resizing them.</p>\r\n *\r\n * <p>In particular, the array marshaler takes advantage of this (when size is not dynamic)\r\n * to preallocate arrays of the right length when unmarshaling an array {@code T[]}.</p>\r\n *\r\n * @return a size in bytes, or {@link #NATIVE_SIZE_DYNAMIC} if the size is dynamic\r\n */\r\n public abstract int getNativeSize();\r\n\r\n /**\r\n * The type reference for {@code T} for the managed type side of this marshaler.\r\n */\r\n public TypeReference<T> getTypeReference() {\r\n return mTypeReference;\r\n }\r\n\r\n /** The native type corresponding to this marshaler for the native side of this marshaler.*/\r\n public int getNativeType() {\r\n return mNativeType;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5967323780059814, "alphanum_fraction": 0.5988070368766785, "avg_line_length": 39.021278381347656, "blob_id": "d14132512939dd83cac32902735919bedbdc1279", "content_id": "cb2d47c3c1899ba40f062929b63724d399d1dcd4", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 3856, "license_type": "permissive", "max_line_length": 100, "num_lines": 94, "path": "/media/mca/filterfw/jni/jni_native_frame.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_NATIVE_FRAME_H\r\n#define ANDROID_FILTERFW_JNI_NATIVE_FRAME_H\r\n\r\n#include <jni.h>\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeAllocate(JNIEnv* env, jobject thiz, jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeDeallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeIntSize(JNIEnv* env, jclass clazz);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeFloatSize(JNIEnv* env, jclass clazz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_setNativeInts(JNIEnv* env, jobject thiz, jintArray ints);\r\n\r\nJNIEXPORT jintArray JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeInts(JNIEnv* env, jobject thiz, jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_setNativeFloats(JNIEnv* env, jobject thiz, jfloatArray ints);\r\n\r\nJNIEXPORT jfloatArray JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeFloats(JNIEnv* env, jobject thiz, jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_setNativeData(JNIEnv* env,\r\n jobject thiz,\r\n jbyteArray data,\r\n jint offset,\r\n jint length);\r\n\r\nJNIEXPORT jbyteArray JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeData(JNIEnv* env, jobject thiz, jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeBuffer(JNIEnv* env, jobject thiz, jobject buffer);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_setNativeBitmap(JNIEnv* env,\r\n jobject thiz,\r\n jobject bitmap,\r\n jint size,\r\n jint bytes_per_sample);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeBitmap(JNIEnv* env,\r\n jobject thiz,\r\n jobject bitmap,\r\n jint size,\r\n jint bytes_per_sample);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_NativeFrame_getNativeCapacity(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeCopyFromNative(JNIEnv* env,\r\n jobject thiz,\r\n jobject frame);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeFrame_nativeCopyFromGL(JNIEnv* env,\r\n jobject thiz,\r\n jobject frame);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // ANDROID_FILTERFW_JNI_NATIVE_FRAME_H\r\n" }, { "alpha_fraction": 0.5805520415306091, "alphanum_fraction": 0.5817068815231323, "avg_line_length": 35.0042724609375, "blob_id": "f83fda0a9714c06176ee7f5c655f0d9fb0e1b119", "content_id": "439f791697e5f22b822eeca745165b48deff33bc", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 8659, "license_type": "permissive", "max_line_length": 104, "num_lines": 234, "path": "/tests/OneMedia/src/com/android/onemedia/OnePlayerActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.onemedia;\r\n\r\n\r\nimport android.app.Activity;\r\nimport android.app.Notification;\r\nimport android.app.NotificationManager;\r\nimport android.app.PendingIntent;\r\nimport android.content.ComponentName;\r\nimport android.content.Intent;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.media.MediaMetadata;\r\nimport android.media.session.PlaybackState;\r\nimport android.net.Uri;\r\nimport android.os.Bundle;\r\nimport android.provider.MediaStore;\r\nimport android.text.format.DateUtils;\r\nimport android.util.Log;\r\nimport android.view.Menu;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.CheckBox;\r\nimport android.widget.EditText;\r\nimport android.widget.ImageView;\r\nimport android.widget.TextView;\r\n\r\nimport java.io.IOException;\r\n\r\npublic class OnePlayerActivity extends Activity {\r\n private static final String TAG = \"OnePlayerActivity\";\r\n\r\n private static final int READ_REQUEST_CODE = 42;\r\n\r\n protected PlayerController mPlayer;\r\n\r\n private Button mStartButton;\r\n private Button mPlayButton;\r\n private Button mRouteButton;\r\n private TextView mStatusView;\r\n\r\n private EditText mContentText;\r\n private EditText mNextContentText;\r\n private CheckBox mHasVideo;\r\n private ImageView mArtView;\r\n\r\n private PlaybackState mPlaybackState;\r\n private Bitmap mAlbumArtBitmap;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.activity_one_player);\r\n mPlayer = new PlayerController(this, OnePlayerService.getServiceIntent(this));\r\n\r\n\r\n mStartButton = findViewById(R.id.start_button);\r\n mPlayButton = findViewById(R.id.play_button);\r\n mRouteButton = findViewById(R.id.route_button);\r\n mStatusView = findViewById(R.id.status);\r\n mContentText = findViewById(R.id.content);\r\n mNextContentText = findViewById(R.id.next_content);\r\n mHasVideo = findViewById(R.id.has_video);\r\n mArtView = findViewById(R.id.art);\r\n\r\n final Button artPicker = findViewById(R.id.art_picker);\r\n artPicker.setOnClickListener(mButtonListener);\r\n\r\n mStartButton.setOnClickListener(mButtonListener);\r\n mPlayButton.setOnClickListener(mButtonListener);\r\n mRouteButton.setOnClickListener(mButtonListener);\r\n\r\n }\r\n\r\n @Override\r\n public boolean onCreateOptionsMenu(Menu menu) {\r\n // Inflate the menu; this adds items to the action bar if it is present.\r\n getMenuInflater().inflate(R.menu.main, menu);\r\n return true;\r\n }\r\n\r\n @Override\r\n public void onResume() {\r\n super.onResume();\r\n mPlayer.onResume();\r\n mPlayer.setListener(mListener);\r\n }\r\n\r\n @Override\r\n public void onPause() {\r\n mPlayer.setListener(null);\r\n mPlayer.onPause();\r\n super.onPause();\r\n }\r\n\r\n @Override\r\n public void onActivityResult(int requestCode, int resultCode,\r\n Intent resultData) {\r\n if (requestCode == READ_REQUEST_CODE && resultCode == Activity.RESULT_OK) {\r\n Uri uri = null;\r\n if (resultData != null) {\r\n uri = resultData.getData();\r\n Log.i(TAG, \"Uri: \" + uri.toString());\r\n mAlbumArtBitmap = null;\r\n try {\r\n mAlbumArtBitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), uri);\r\n } catch (IOException e) {\r\n Log.v(TAG, \"Couldn't load album art\", e);\r\n }\r\n mArtView.setImageBitmap(mAlbumArtBitmap);\r\n if (mAlbumArtBitmap != null) {\r\n mArtView.setVisibility(View.VISIBLE);\r\n } else {\r\n mArtView.setVisibility(View.GONE);\r\n }\r\n mPlayer.setArt(mAlbumArtBitmap);\r\n }\r\n }\r\n }\r\n\r\n private void setControlsEnabled(boolean enabled) {\r\n mStartButton.setEnabled(enabled);\r\n mPlayButton.setEnabled(enabled);\r\n }\r\n\r\n private View.OnClickListener mButtonListener = new View.OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n final int state = mPlaybackState.getState();\r\n switch (v.getId()) {\r\n case R.id.play_button:\r\n Log.d(TAG, \"Play button pressed, in state \" + state);\r\n if (state == PlaybackState.STATE_PAUSED\r\n || state == PlaybackState.STATE_STOPPED) {\r\n mPlayer.play();\r\n } else if (state == PlaybackState.STATE_PLAYING) {\r\n mPlayer.pause();\r\n }\r\n break;\r\n case R.id.start_button:\r\n Log.d(TAG, \"Start button pressed, in state \" + state);\r\n mPlayer.setContent(mContentText.getText().toString());\r\n break;\r\n case R.id.route_button:\r\n mPlayer.showRoutePicker();\r\n break;\r\n case R.id.art_picker:\r\n Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT);\r\n intent.addCategory(Intent.CATEGORY_OPENABLE);\r\n intent.setType(\"image/*\");\r\n\r\n startActivityForResult(intent, READ_REQUEST_CODE);\r\n break;\r\n }\r\n\r\n }\r\n };\r\n\r\n private PlayerController.Listener mListener = new PlayerController.Listener() {\r\n public MediaMetadata mMetadata;\r\n\r\n @Override\r\n public void onPlaybackStateChange(PlaybackState state) {\r\n mPlaybackState = state;\r\n boolean enablePlay = false;\r\n boolean enableControls = true;\r\n StringBuilder statusBuilder = new StringBuilder();\r\n switch (mPlaybackState.getState()) {\r\n case PlaybackState.STATE_PLAYING:\r\n statusBuilder.append(\"playing\");\r\n mPlayButton.setText(\"Pause\");\r\n enablePlay = true;\r\n break;\r\n case PlaybackState.STATE_PAUSED:\r\n statusBuilder.append(\"paused\");\r\n mPlayButton.setText(\"Play\");\r\n enablePlay = true;\r\n break;\r\n case PlaybackState.STATE_STOPPED:\r\n statusBuilder.append(\"ended\");\r\n mPlayButton.setText(\"Play\");\r\n enablePlay = true;\r\n break;\r\n case PlaybackState.STATE_ERROR:\r\n statusBuilder.append(\"error: \").append(state.getErrorMessage());\r\n break;\r\n case PlaybackState.STATE_BUFFERING:\r\n statusBuilder.append(\"buffering\");\r\n break;\r\n case PlaybackState.STATE_NONE:\r\n statusBuilder.append(\"none\");\r\n break;\r\n case PlaybackState.STATE_CONNECTING:\r\n statusBuilder.append(\"connecting\");\r\n enableControls = false;\r\n break;\r\n default:\r\n statusBuilder.append(mPlaybackState);\r\n }\r\n statusBuilder.append(\" -- At position: \").append(state.getPosition());\r\n mStatusView.setText(statusBuilder.toString());\r\n mPlayButton.setEnabled(enablePlay);\r\n setControlsEnabled(enableControls);\r\n }\r\n\r\n @Override\r\n public void onConnectionStateChange(int state) {\r\n if (state == PlayerController.STATE_DISCONNECTED) {\r\n setControlsEnabled(false);\r\n } else if (state == PlayerController.STATE_CONNECTED) {\r\n setControlsEnabled(true);\r\n }\r\n }\r\n\r\n @Override\r\n public void onMetadataChange(MediaMetadata metadata) {\r\n mMetadata = metadata;\r\n }\r\n };\r\n}\r\n" }, { "alpha_fraction": 0.6720827221870422, "alphanum_fraction": 0.6779911518096924, "avg_line_length": 28.772727966308594, "blob_id": "b4eba6f3152ab4281b3a85ec769ea68d3aab2aba", "content_id": "3723f772190f219f1c818675c3e1fdf8c9aba319", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1354, "license_type": "permissive", "max_line_length": 91, "num_lines": 44, "path": "/tests/ActivityTests/src/com/google/android/test/activity/IsolatedService.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.google.android.test.activity;\r\n\r\nimport android.app.Service;\r\nimport android.content.Intent;\r\nimport android.os.Binder;\r\nimport android.os.IBinder;\r\nimport android.util.Log;\r\n\r\npublic class IsolatedService extends Service {\r\n Binder mBinder = new Binder();\r\n\r\n @Override\r\n public void onCreate() {\r\n super.onCreate();\r\n Log.i(\"IsolatedService\", \"Service created in pid \" + android.os.Process.myPid());\r\n }\r\n\r\n @Override\r\n public IBinder onBind(Intent intent) {\r\n return mBinder;\r\n }\r\n\r\n @Override\r\n public void onDestroy() {\r\n super.onDestroy();\r\n Log.i(\"IsolatedService\", \"Service destroyed in pid \" + android.os.Process.myPid());\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5769968032836914, "alphanum_fraction": 0.5787007212638855, "avg_line_length": 26.113773345947266, "blob_id": "4460e1a849a1929cf5d611948c119bbccc89b2a2", "content_id": "b3b17b50cf69e7d9aa43c85aa7b94f36da2c4055", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4695, "license_type": "permissive", "max_line_length": 91, "num_lines": 167, "path": "/core/java/android/nfc/dta/NfcDta.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.nfc.dta;\r\n\r\nimport android.content.Context;\r\nimport android.nfc.INfcDta;\r\nimport android.nfc.NfcAdapter;\r\nimport android.os.RemoteException;\r\nimport android.util.Log;\r\n\r\nimport java.util.HashMap;\r\n\r\n/**\r\n * This class provides the primary API for DTA operations.\r\n * @hide\r\n */\r\npublic final class NfcDta {\r\n private static final String TAG = \"NfcDta\";\r\n\r\n private static INfcDta sService;\r\n private static HashMap<Context, NfcDta> sNfcDtas = new HashMap<Context, NfcDta>();\r\n\r\n private final Context mContext;\r\n\r\n private NfcDta(Context context, INfcDta service) {\r\n mContext = context.getApplicationContext();\r\n sService = service;\r\n }\r\n\r\n /**\r\n * Helper to get an instance of this class.\r\n *\r\n * @param adapter A reference to an NfcAdapter object.\r\n * @return\r\n */\r\n public static synchronized NfcDta getInstance(NfcAdapter adapter) {\r\n if (adapter == null) throw new NullPointerException(\"NfcAdapter is null\");\r\n Context context = adapter.getContext();\r\n if (context == null) {\r\n Log.e(TAG, \"NfcAdapter context is null.\");\r\n throw new UnsupportedOperationException();\r\n }\r\n\r\n NfcDta manager = sNfcDtas.get(context);\r\n if (manager == null) {\r\n INfcDta service = adapter.getNfcDtaInterface();\r\n if (service == null) {\r\n Log.e(TAG, \"This device does not implement the INfcDta interface.\");\r\n throw new UnsupportedOperationException();\r\n }\r\n manager = new NfcDta(context, service);\r\n sNfcDtas.put(context, manager);\r\n }\r\n return manager;\r\n }\r\n\r\n /**\r\n * Enables DTA mode\r\n *\r\n * @return true/false if enabling was successful\r\n */\r\n public boolean enableDta() {\r\n try {\r\n sService.enableDta();\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * Disables DTA mode\r\n *\r\n * @return true/false if disabling was successful\r\n */\r\n public boolean disableDta() {\r\n try {\r\n sService.disableDta();\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * Enables Server\r\n *\r\n * @return true/false if enabling was successful\r\n */\r\n public boolean enableServer(String serviceName, int serviceSap, int miu,\r\n int rwSize, int testCaseId) {\r\n try {\r\n return sService.enableServer(serviceName, serviceSap, miu, rwSize, testCaseId);\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n }\r\n\r\n /**\r\n * Disables Server\r\n *\r\n * @return true/false if disabling was successful\r\n */\r\n public boolean disableServer() {\r\n try {\r\n sService.disableServer();\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * Enables Client\r\n *\r\n * @return true/false if enabling was successful\r\n */\r\n public boolean enableClient(String serviceName, int miu, int rwSize,\r\n int testCaseId) {\r\n try {\r\n return sService.enableClient(serviceName, miu, rwSize, testCaseId);\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n }\r\n\r\n /**\r\n * Disables client\r\n *\r\n * @return true/false if disabling was successful\r\n */\r\n public boolean disableClient() {\r\n try {\r\n sService.disableClient();\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * Registers Message Service\r\n *\r\n * @return true/false if registration was successful\r\n */\r\n public boolean registerMessageService(String msgServiceName) {\r\n try {\r\n return sService.registerMessageService(msgServiceName);\r\n } catch (RemoteException e) {\r\n return false;\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6957689523696899, "alphanum_fraction": 0.7011417150497437, "avg_line_length": 32.627906799316406, "blob_id": "64a252579634b20689bc9d38f047fa962a4276d7", "content_id": "d4dc5cc67177bbac108c0daa6f01724462b3fb43", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1489, "license_type": "permissive", "max_line_length": 88, "num_lines": 43, "path": "/packages/SystemUI/src/com/android/systemui/statusbar/notification/row/DungeonRow.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n* Copyright (C) 2020 The Android Open Source Project\r\n*\r\n* Licensed under the Apache License, Version 2.0 (the \"License\");\r\n* you may not use this file except in compliance with the License.\r\n* You may obtain a copy of the License at\r\n*\r\n* http://www.apache.org/licenses/LICENSE-2.0\r\n*\r\n* Unless required by applicable law or agreed to in writing, software\r\n* distributed under the License is distributed on an \"AS IS\" BASIS,\r\n* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n* See the License for the specific language governing permissions and\r\n* limitations under the License.\r\n*/\r\n\r\npackage com.android.systemui.statusbar.notification.row\r\n\r\nimport android.content.Context\r\nimport android.util.AttributeSet\r\nimport android.widget.LinearLayout\r\nimport android.widget.TextView\r\nimport com.android.systemui.R\r\nimport com.android.systemui.statusbar.StatusBarIconView\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntry\r\n\r\nclass DungeonRow(context: Context, attrs: AttributeSet) : LinearLayout(context, attrs) {\r\n var entry: NotificationEntry? = null\r\n set(value) {\r\n field = value\r\n update()\r\n }\r\n\r\n private fun update() {\r\n (findViewById(R.id.app_name) as TextView).apply {\r\n text = entry?.row?.appName\r\n }\r\n\r\n (findViewById(R.id.icon) as StatusBarIconView).apply {\r\n set(entry?.icons?.statusBarIcon?.statusBarIcon)\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5660685300827026, "alphanum_fraction": 0.5970636010169983, "avg_line_length": 20.703702926635742, "blob_id": "5a607c8492bca427439bd1109790fd51c8a26d13", "content_id": "019a052de998178694594f61398808ce7e4ade2c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 613, "license_type": "permissive", "max_line_length": 99, "num_lines": 27, "path": "/media/tests/audiotests/shared_mem_test.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "// Copyright 2008 The Android Open Source Project\r\n\r\n#ifndef AUDIOTRACKTEST_H_\r\n#define AUDIOTRACKTEST_H_\r\n\r\nnamespace android {\r\n\r\nclass AudioTrackTest{\r\n public:\r\n AudioTrackTest(void);\r\n ~AudioTrackTest() {};\r\n\r\n void Execute(void);\r\n int Test01();\r\n\r\n void Generate(short *buffer, long bufferSz, long amplitude, unsigned long &phi, long dPhi);\r\n void InitSine();\r\n short ComputeSine(long amplitude, long phi);\r\n\r\n #define SIN_SZ 1024\r\n short sin1024[SIN_SZ]; // sine table 2*pi = 1024\r\n};\r\n\r\n};\r\n\r\n\r\n#endif /*AUDIOTRACKTEST_H_*/\r\n" }, { "alpha_fraction": 0.460088312625885, "alphanum_fraction": 0.463994562625885, "avg_line_length": 26.582523345947266, "blob_id": "3effd6e92aaa9c5345ed50578a49be5615f4c8fa", "content_id": "797b7e368433c4ab4d8fa1e65be4cc994d5e8970", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5888, "license_type": "permissive", "max_line_length": 75, "num_lines": 206, "path": "/tools/split-select/Rule.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"Rule.h\"\r\n\r\n#include <utils/String8.h>\r\n\r\nusing namespace android;\r\n\r\nnamespace split {\r\n\r\ninline static void indentStr(String8& str, int indent) {\r\n while (indent > 0) {\r\n str.append(\" \");\r\n indent--;\r\n }\r\n}\r\n\r\nRule::Rule(const Rule& rhs)\r\n : RefBase()\r\n , op(rhs.op)\r\n , key(rhs.key)\r\n , negate(rhs.negate)\r\n , stringArgs(rhs.stringArgs)\r\n , longArgs(rhs.longArgs)\r\n , subrules(rhs.subrules) {\r\n}\r\n\r\nString8 Rule::toJson(int indent) const {\r\n String8 str;\r\n indentStr(str, indent);\r\n str.append(\"{\\n\");\r\n indent++;\r\n indentStr(str, indent);\r\n str.append(\"\\\"op\\\": \\\"\");\r\n switch (op) {\r\n case ALWAYS_TRUE:\r\n str.append(\"ALWAYS_TRUE\");\r\n break;\r\n case GREATER_THAN:\r\n str.append(\"GREATER_THAN\");\r\n break;\r\n case LESS_THAN:\r\n str.append(\"LESS_THAN\");\r\n break;\r\n case EQUALS:\r\n str.append(\"EQUALS\");\r\n break;\r\n case AND_SUBRULES:\r\n str.append(\"AND_SUBRULES\");\r\n break;\r\n case OR_SUBRULES:\r\n str.append(\"OR_SUBRULES\");\r\n break;\r\n case CONTAINS_ANY:\r\n str.append(\"CONTAINS_ANY\");\r\n break;\r\n default:\r\n str.appendFormat(\"%d\", op);\r\n break;\r\n }\r\n str.append(\"\\\"\");\r\n\r\n if (negate) {\r\n str.append(\",\\n\");\r\n indentStr(str, indent);\r\n str.append(\"\\\"negate\\\": true\");\r\n }\r\n\r\n bool includeKey = true;\r\n switch (op) {\r\n case AND_SUBRULES:\r\n case OR_SUBRULES:\r\n includeKey = false;\r\n break;\r\n default:\r\n break;\r\n }\r\n\r\n if (includeKey) {\r\n str.append(\",\\n\");\r\n indentStr(str, indent);\r\n str.append(\"\\\"property\\\": \\\"\");\r\n switch (key) {\r\n case NONE:\r\n str.append(\"NONE\");\r\n break;\r\n case SDK_VERSION:\r\n str.append(\"SDK_VERSION\");\r\n break;\r\n case SCREEN_DENSITY:\r\n str.append(\"SCREEN_DENSITY\");\r\n break;\r\n case NATIVE_PLATFORM:\r\n str.append(\"NATIVE_PLATFORM\");\r\n break;\r\n case LANGUAGE:\r\n str.append(\"LANGUAGE\");\r\n break;\r\n default:\r\n str.appendFormat(\"%d\", key);\r\n break;\r\n }\r\n str.append(\"\\\"\");\r\n }\r\n\r\n if (op == AND_SUBRULES || op == OR_SUBRULES) {\r\n str.append(\",\\n\");\r\n indentStr(str, indent);\r\n str.append(\"\\\"subrules\\\": [\\n\");\r\n const size_t subruleCount = subrules.size();\r\n for (size_t i = 0; i < subruleCount; i++) {\r\n str.append(subrules[i]->toJson(indent + 1));\r\n if (i != subruleCount - 1) {\r\n str.append(\",\");\r\n }\r\n str.append(\"\\n\");\r\n }\r\n indentStr(str, indent);\r\n str.append(\"]\");\r\n } else {\r\n switch (key) {\r\n case SDK_VERSION:\r\n case SCREEN_DENSITY: {\r\n str.append(\",\\n\");\r\n indentStr(str, indent);\r\n str.append(\"\\\"args\\\": [\");\r\n const size_t argCount = longArgs.size();\r\n for (size_t i = 0; i < argCount; i++) {\r\n if (i != 0) {\r\n str.append(\", \");\r\n }\r\n str.appendFormat(\"%d\", longArgs[i]);\r\n }\r\n str.append(\"]\");\r\n break;\r\n }\r\n case LANGUAGE:\r\n case NATIVE_PLATFORM: {\r\n str.append(\",\\n\");\r\n indentStr(str, indent);\r\n str.append(\"\\\"args\\\": [\");\r\n const size_t argCount = stringArgs.size();\r\n for (size_t i = 0; i < argCount; i++) {\r\n if (i != 0) {\r\n str.append(\", \");\r\n }\r\n str.append(stringArgs[i]);\r\n }\r\n str.append(\"]\");\r\n break;\r\n }\r\n default:\r\n break;\r\n }\r\n }\r\n str.append(\"\\n\");\r\n indent--;\r\n indentStr(str, indent);\r\n str.append(\"}\");\r\n return str;\r\n}\r\n\r\nsp<Rule> Rule::simplify(sp<Rule> rule) {\r\n if (rule->op != AND_SUBRULES && rule->op != OR_SUBRULES) {\r\n return rule;\r\n }\r\n\r\n Vector<sp<Rule> > newSubrules;\r\n newSubrules.setCapacity(rule->subrules.size());\r\n const size_t subruleCount = rule->subrules.size();\r\n for (size_t i = 0; i < subruleCount; i++) {\r\n sp<Rule> simplifiedRule = simplify(rule->subrules.editItemAt(i));\r\n if (simplifiedRule != NULL) {\r\n if (simplifiedRule->op == rule->op) {\r\n newSubrules.appendVector(simplifiedRule->subrules);\r\n } else {\r\n newSubrules.add(simplifiedRule);\r\n }\r\n }\r\n }\r\n\r\n const size_t newSubruleCount = newSubrules.size();\r\n if (newSubruleCount == 0) {\r\n return NULL;\r\n } else if (subruleCount == 1) {\r\n return newSubrules.editTop();\r\n }\r\n rule->subrules = newSubrules;\r\n return rule;\r\n}\r\n\r\n} // namespace split\r\n" }, { "alpha_fraction": 0.7542761564254761, "alphanum_fraction": 0.7596996426582336, "avg_line_length": 37.95000076293945, "blob_id": "b526a6cc723fdd5e5c40716a24bc60c49a50495c", "content_id": "a0b9ffaa45b60119c639dfa368d75f721b489c25", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 2397, "license_type": "permissive", "max_line_length": 79, "num_lines": 60, "path": "/tests/Camera2Tests/SmartCamera/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "Copyright 2013 The Android Open Source Project\r\n\r\nLicensed under the Apache License, Version 2.0 (the \"License\");\r\nyou may not use this file except in compliance with the License.\r\nYou may obtain a copy of the License at\r\n\r\n http://www.apache.org/licenses/LICENSE-2.0\r\n\r\nUnless required by applicable law or agreed to in writing, software\r\ndistributed under the License is distributed on an \"AS IS\" BASIS,\r\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\nSee the License for the specific language governing permissions and\r\nlimitations under the License.\r\n\r\n\r\nSmart Camera / Auto Snapshot (formerly named SimpleCamera) ReadMe\r\n\r\nCreated by: Benjamin W Hendricks\r\n\r\nHow to build the application:\r\nFrom root: make SmartCamera will build the apk for generic\r\nOtherwise, to build the application for a specific device, lunch to that device\r\nand then run mm while in the SimpleCamera directory.\r\nThen take the given Install path (out/target/.../SmartCamera.apk)\r\nand run adb install out/target/.../SmartCamera.apk. The application should\r\nthen appear in the launcher of your device.\r\nYou might also need to run adb sync after building to sync the\r\nlibsmartcamera_jni library\r\nSummarized:\r\n make SmartCamera\r\n adb remount\r\n adb sync\r\n adb install -r $ANDROID_PRODUCT_OUT/data/app/SmartCamera.apk\r\n\r\nHow to run the application:\r\nOn a Nexus 7, open up the application from the launcher, and the camera preview\r\nshould appear. From there, you can go to the gallery with the gallery button or\r\npress start to start capturing images. You can also change the number of images\r\nto be captured by changing the number on the spinner (between 1-10).\r\n\r\nWhat does it do:\r\nThe application tries to take good pictures for you automatically when in the\r\nstart mode. On stop, the application will capture whatever images are in the\r\nbottom preview and save them to the Gallery. It does this by looking at the\r\nfollowing image features:\r\n - Sharpness\r\n - Brightness\r\n - Motion of the device\r\n - Colorfulness\r\n - Contrast\r\n - Exposure (over/under)\r\n\r\nBy comparing each of these features frame by frame, a score is calculated to\r\ndetermine whether an image is better or worse than the previous few frames,\r\nand from that score I can determine the great images from the bad ones.\r\n\r\nWhat libraries does it use:\r\n- Mobile Filter Framework (MFF)\r\n- Camera2 API\r\n- Renderscript\r\n" }, { "alpha_fraction": 0.7013487219810486, "alphanum_fraction": 0.7013487219810486, "avg_line_length": 20.565217971801758, "blob_id": "3ffe64d5b2a2d5f06ffb7dcf64191ebde1cbb738", "content_id": "4a01f7b220497345c3a7c8263787e4613d184ae4", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 519, "license_type": "permissive", "max_line_length": 68, "num_lines": 23, "path": "/tests/RollbackTest/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This directory contains a test for the rollback manager service.\r\n\r\nDirectory structure\r\n===================\r\nRollbackTest\r\n - device driven test for rollbacks not involving staged rollbacks.\r\n\r\nStagedRollbackTest\r\n - device driven test for staged rollbacks.\r\n\r\nTestApp\r\n - source for dummy apks used in testing.\r\n\r\nTestApex\r\n - source for dummy apex modules used in testing.\r\n\r\nRunning the tests\r\n=================\r\n\r\nYou can manually run the tests as follows:\r\n\r\n atest RollbackTest\r\n atest StagedRollbackTest\r\n" }, { "alpha_fraction": 0.6081113219261169, "alphanum_fraction": 0.6135345697402954, "avg_line_length": 35.86606979370117, "blob_id": "77db50d3275826e03186ec2e72ca689ee3c203c9", "content_id": "1e0f56cc1dc7b5a7ae7c70e6c2930db134ae45e0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4241, "license_type": "permissive", "max_line_length": 87, "num_lines": 112, "path": "/services/core/java/com/android/server/pm/ProcessLoggingHandler.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.pm;\r\n\r\nimport android.app.admin.SecurityLog;\r\nimport android.content.Intent;\r\nimport android.os.Bundle;\r\nimport android.os.Handler;\r\nimport android.os.Message;\r\n\r\nimport com.android.internal.os.BackgroundThread;\r\n\r\nimport java.io.File;\r\nimport java.io.FileInputStream;\r\nimport java.io.IOException;\r\nimport java.security.MessageDigest;\r\nimport java.security.NoSuchAlgorithmException;\r\nimport java.util.HashMap;\r\nimport android.util.Slog;\r\n\r\npublic final class ProcessLoggingHandler extends Handler {\r\n\r\n private static final String TAG = \"ProcessLoggingHandler\";\r\n static final int LOG_APP_PROCESS_START_MSG = 1;\r\n static final int INVALIDATE_BASE_APK_HASH_MSG = 2;\r\n\r\n private final HashMap<String, String> mProcessLoggingBaseApkHashes = new HashMap();\r\n\r\n ProcessLoggingHandler() {\r\n super(BackgroundThread.getHandler().getLooper());\r\n }\r\n\r\n @Override\r\n public void handleMessage(Message msg) {\r\n switch (msg.what) {\r\n case LOG_APP_PROCESS_START_MSG: {\r\n Bundle bundle = msg.getData();\r\n String processName = bundle.getString(\"processName\");\r\n int uid = bundle.getInt(\"uid\");\r\n String seinfo = bundle.getString(\"seinfo\");\r\n String apkFile = bundle.getString(\"apkFile\");\r\n int pid = bundle.getInt(\"pid\");\r\n long startTimestamp = bundle.getLong(\"startTimestamp\");\r\n String apkHash = computeStringHashOfApk(apkFile);\r\n SecurityLog.writeEvent(SecurityLog.TAG_APP_PROCESS_START, processName,\r\n startTimestamp, uid, pid, seinfo, apkHash);\r\n break;\r\n }\r\n case INVALIDATE_BASE_APK_HASH_MSG: {\r\n Bundle bundle = msg.getData();\r\n mProcessLoggingBaseApkHashes.remove(bundle.getString(\"apkFile\"));\r\n break;\r\n }\r\n }\r\n }\r\n\r\n void invalidateProcessLoggingBaseApkHash(String apkPath) {\r\n Bundle data = new Bundle();\r\n data.putString(\"apkFile\", apkPath);\r\n Message msg = obtainMessage(INVALIDATE_BASE_APK_HASH_MSG);\r\n msg.setData(data);\r\n sendMessage(msg);\r\n }\r\n\r\n private String computeStringHashOfApk(String apkFile) {\r\n if (apkFile == null) {\r\n return \"No APK\";\r\n }\r\n String apkHash = mProcessLoggingBaseApkHashes.get(apkFile);\r\n if (apkHash == null) {\r\n try {\r\n byte[] hash = computeHashOfApkFile(apkFile);\r\n StringBuilder sb = new StringBuilder();\r\n for (int i = 0; i < hash.length; i++) {\r\n sb.append(String.format(\"%02x\", hash[i]));\r\n }\r\n apkHash = sb.toString();\r\n mProcessLoggingBaseApkHashes.put(apkFile, apkHash);\r\n } catch (IOException | NoSuchAlgorithmException e) {\r\n Slog.w(TAG, \"computeStringHashOfApk() failed\", e);\r\n }\r\n }\r\n return apkHash != null ? apkHash : \"Failed to count APK hash\";\r\n }\r\n\r\n private byte[] computeHashOfApkFile(String packageArchiveLocation)\r\n throws IOException, NoSuchAlgorithmException {\r\n MessageDigest md = MessageDigest.getInstance(\"SHA-256\");\r\n FileInputStream input = new FileInputStream(new File(packageArchiveLocation));\r\n byte[] buffer = new byte[65536];\r\n int size;\r\n while ((size = input.read(buffer)) > 0) {\r\n md.update(buffer, 0, size);\r\n }\r\n input.close();\r\n return md.digest();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5847760438919067, "alphanum_fraction": 0.5899972319602966, "avg_line_length": 33.33009719848633, "blob_id": "74f7b00f330aac6fbe737c2d6210e855780b3e49", "content_id": "d2c83ac251145d493d510661a79087985e1e415a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3639, "license_type": "permissive", "max_line_length": 100, "num_lines": 103, "path": "/tests/TransitionTests/src/com/android/transitiontests/ListViewAddRemoveNoTransition.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.transitiontests;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\nimport android.widget.AdapterView;\r\nimport android.widget.ArrayAdapter;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.ListView;\r\nimport android.widget.TextView;\r\n\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.HashMap;\r\nimport java.util.List;\r\n\r\npublic class ListViewAddRemoveNoTransition extends Activity {\r\n\r\n final ArrayList<String> numList = new ArrayList<String>();\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.list_view_add_remove);\r\n\r\n final LinearLayout container = (LinearLayout) findViewById(R.id.container);\r\n\r\n final ListView listview = (ListView) findViewById(R.id.listview);\r\n for (int i = 0; i < 200; ++i) {\r\n numList.add(Integer.toString(i));\r\n }\r\n final StableArrayAdapter adapter = new StableArrayAdapter(this,\r\n android.R.layout.simple_list_item_1, numList);\r\n listview.setAdapter(adapter);\r\n\r\n listview.setOnItemClickListener(new AdapterView.OnItemClickListener() {\r\n\r\n @Override\r\n public void onItemClick(AdapterView<?> parent, final View view, int position, long id) {\r\n String item = (String) parent.getItemAtPosition(position);\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView v = (TextView) listview.getChildAt(i);\r\n if (!item.equals(v.getText())) {\r\n v.setHasTransientState(true);\r\n }\r\n }\r\n numList.remove(item);\r\n adapter.notifyDataSetChanged();\r\n view.postDelayed(new Runnable() {\r\n @Override\r\n public void run() {\r\n for (int i = 0; i < listview.getChildCount(); ++i) {\r\n TextView v = (TextView) listview.getChildAt(i);\r\n v.setHasTransientState(false);\r\n }\r\n }\r\n }, 200);\r\n }\r\n\r\n });\r\n }\r\n\r\n private class StableArrayAdapter extends ArrayAdapter<String> {\r\n\r\n HashMap<String, Integer> mIdMap = new HashMap<String, Integer>();\r\n\r\n public StableArrayAdapter(Context context, int textViewResourceId,\r\n List<String> objects) {\r\n super(context, textViewResourceId, objects);\r\n for (int i = 0; i < objects.size(); ++i) {\r\n mIdMap.put(objects.get(i), i);\r\n }\r\n }\r\n\r\n @Override\r\n public long getItemId(int position) {\r\n String item = getItem(position);\r\n return mIdMap.get(item);\r\n }\r\n\r\n @Override\r\n public boolean hasStableIds() {\r\n return true;\r\n }\r\n\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6912698149681091, "alphanum_fraction": 0.6984127163887024, "avg_line_length": 36.181819915771484, "blob_id": "45317300f5f044d048db8e555a40c7afd57d4916", "content_id": "0c52762c3d8d9c04b79419ac2c51a8314609d38c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1260, "license_type": "permissive", "max_line_length": 90, "num_lines": 33, "path": "/core/java/android/hardware/camera2/impl/GetCommand.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.hardware.camera2.impl;\r\n\r\n/**\r\n * Getter interface for use with Command pattern metadata value getters.\r\n */\r\npublic interface GetCommand {\r\n\r\n /**\r\n * Get the value from the given {@link CameraMetadataNative} object.\r\n *\r\n * @param metadata the {@link CameraMetadataNative} object to get the value from.\r\n * @param key the {@link CameraMetadataNative.Key} to look up.\r\n * @param <T> the type of the value.\r\n * @return the value for a given {@link CameraMetadataNative.Key}.\r\n */\r\n public <T> T getValue(CameraMetadataNative metadata, CameraMetadataNative.Key<T> key);\r\n}\r\n" }, { "alpha_fraction": 0.677190899848938, "alphanum_fraction": 0.6825022101402283, "avg_line_length": 34.440860748291016, "blob_id": "946ce9c0f4b06f3cd36f992a877766a8ebe0c3c1", "content_id": "4ed175aff099106cf7386073f3abc29521dc0c49", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3389, "license_type": "permissive", "max_line_length": 89, "num_lines": 93, "path": "/packages/SystemUI/tests/src/com/android/keyguard/clock/SettingsWrapperTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.keyguard.clock\r\n\r\nimport android.testing.AndroidTestingRunner\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.google.common.truth.Truth.assertThat\r\nimport org.json.JSONObject\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.Mockito.mock\r\nimport org.mockito.Mockito.never\r\nimport org.mockito.Mockito.verify\r\n\r\nprivate const val PACKAGE = \"com.android.keyguard.clock.Clock\"\r\nprivate const val CLOCK_FIELD = \"clock\"\r\nprivate const val TIMESTAMP_FIELD = \"_applied_timestamp\"\r\nprivate const val USER_ID = 0\r\n\r\n@RunWith(AndroidTestingRunner::class)\r\n@SmallTest\r\nclass SettingsWrapperTest : SysuiTestCase() {\r\n\r\n private lateinit var wrapper: SettingsWrapper\r\n private lateinit var migration: SettingsWrapper.Migration\r\n\r\n @Before\r\n fun setUp() {\r\n migration = mock(SettingsWrapper.Migration::class.java)\r\n wrapper = SettingsWrapper(getContext().contentResolver, migration)\r\n }\r\n\r\n @Test\r\n fun testDecodeUnnecessary() {\r\n // GIVEN a settings value that doesn't need to be decoded\r\n val value = PACKAGE\r\n // WHEN the value is decoded\r\n val decoded = wrapper.decode(value, USER_ID)\r\n // THEN the same value is returned, because decoding isn't necessary.\r\n // TODO(b/135674383): Null should be returned when the migration code in removed.\r\n assertThat(decoded).isEqualTo(value)\r\n // AND the value is migrated to JSON format\r\n verify(migration).migrate(value, USER_ID)\r\n }\r\n\r\n @Test\r\n fun testDecodeJSON() {\r\n // GIVEN a settings value that is encoded in JSON\r\n val json: JSONObject = JSONObject()\r\n json.put(CLOCK_FIELD, PACKAGE)\r\n json.put(TIMESTAMP_FIELD, System.currentTimeMillis())\r\n val value = json.toString()\r\n // WHEN the value is decoded\r\n val decoded = wrapper.decode(value, USER_ID)\r\n // THEN the clock field should have been extracted\r\n assertThat(decoded).isEqualTo(PACKAGE)\r\n }\r\n\r\n @Test\r\n fun testDecodeJSONWithoutClockField() {\r\n // GIVEN a settings value that doesn't contain the CLOCK_FIELD\r\n val json: JSONObject = JSONObject()\r\n json.put(TIMESTAMP_FIELD, System.currentTimeMillis())\r\n val value = json.toString()\r\n // WHEN the value is decoded\r\n val decoded = wrapper.decode(value, USER_ID)\r\n // THEN null is returned\r\n assertThat(decoded).isNull()\r\n // AND the value is not migrated to JSON format\r\n verify(migration, never()).migrate(value, USER_ID)\r\n }\r\n\r\n @Test\r\n fun testDecodeNullJSON() {\r\n assertThat(wrapper.decode(null, USER_ID)).isNull()\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6570122241973877, "alphanum_fraction": 0.6768292784690857, "avg_line_length": 33.4054069519043, "blob_id": "72186e5f17b7327f8d9492e542ebaaf072f46329", "content_id": "da85a7b61e0c11729351e72b92083e2710a434ac", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1312, "license_type": "permissive", "max_line_length": 99, "num_lines": 37, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/pixelutils.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"pixelutils.h\"\r\n\r\n#include <stdint.h>\r\n\r\ntypedef uint32_t uint32;\r\n\r\nvoid JNI_PIXELUTILS_METHOD(nativeCopyPixels)(\r\n JNIEnv* env, jclass clazz, jobject input, jobject output, jint width, jint height, jint offset,\r\n jint pixStride, jint rowStride) {\r\n uint32* pInPix = static_cast<uint32*>(env->GetDirectBufferAddress(input));\r\n uint32* pOutput = static_cast<uint32*>(env->GetDirectBufferAddress(output));\r\n uint32* pOutRow = pOutput + offset;\r\n for (int y = 0; y < height; ++y) {\r\n uint32* pOutPix = pOutRow;\r\n for (int x = 0; x < width; ++x) {\r\n *pOutPix = *(pInPix++);\r\n pOutPix += pixStride;\r\n }\r\n pOutRow += rowStride;\r\n }\r\n}\r\n\r\n" }, { "alpha_fraction": 0.618950366973877, "alphanum_fraction": 0.6211501955986023, "avg_line_length": 36.569698333740234, "blob_id": "f0e861011b958b32d333eb033af0bd5a9fe3c0e6", "content_id": "8dd326fceafbdff33ae187b0c6535cefae22428e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6364, "license_type": "permissive", "max_line_length": 97, "num_lines": 165, "path": "/packages/PrintSpooler/src/com/android/printspooler/util/MediaSizeUtils.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.printspooler.util;\r\n\r\nimport android.annotation.NonNull;\r\nimport android.content.Context;\r\nimport android.content.pm.ActivityInfo;\r\nimport android.content.res.Configuration;\r\nimport android.print.PrintAttributes.MediaSize;\r\n\r\nimport com.android.printspooler.R;\r\n\r\nimport java.util.Comparator;\r\nimport java.util.HashMap;\r\nimport java.util.Map;\r\n\r\n/**\r\n * Utility functions and classes for dealing with media sizes.\r\n */\r\npublic final class MediaSizeUtils {\r\n\r\n private static Map<MediaSize, Integer> sMediaSizeToStandardMap;\r\n\r\n /** The media size standard for all media sizes no standard is defined for */\r\n private static int sMediaSizeStandardIso;\r\n\r\n private MediaSizeUtils() {\r\n /* do nothing - hide constructor */\r\n }\r\n\r\n /**\r\n * Gets the default media size for the current locale.\r\n *\r\n * @param context Context for accessing resources.\r\n * @return The default media size.\r\n */\r\n public static MediaSize getDefault(Context context) {\r\n String mediaSizeId = context.getString(R.string.mediasize_default);\r\n return MediaSize.getStandardMediaSizeById(mediaSizeId);\r\n }\r\n\r\n /**\r\n * Get the standard the {@link MediaSize} belongs to.\r\n *\r\n * @param context The context of the caller\r\n * @param mediaSize The {@link MediaSize} to be resolved\r\n *\r\n * @return The standard the {@link MediaSize} belongs to\r\n */\r\n private static int getStandardForMediaSize(Context context, MediaSize mediaSize) {\r\n if (sMediaSizeToStandardMap == null) {\r\n sMediaSizeStandardIso = Integer.parseInt(context.getString(\r\n R.string.mediasize_standard_iso));\r\n\r\n sMediaSizeToStandardMap = new HashMap<>();\r\n String[] mediaSizeToStandardMapValues = context.getResources()\r\n .getStringArray(R.array.mediasize_to_standard_map);\r\n final int mediaSizeToStandardCount = mediaSizeToStandardMapValues.length;\r\n for (int i = 0; i < mediaSizeToStandardCount; i += 2) {\r\n String mediaSizeId = mediaSizeToStandardMapValues[i];\r\n MediaSize key = MediaSize.getStandardMediaSizeById(mediaSizeId);\r\n int value = Integer.parseInt(mediaSizeToStandardMapValues[i + 1]);\r\n sMediaSizeToStandardMap.put(key, value);\r\n }\r\n }\r\n Integer standard = sMediaSizeToStandardMap.get(mediaSize);\r\n return (standard != null) ? standard : sMediaSizeStandardIso;\r\n }\r\n\r\n /**\r\n * Comparator for ordering standard media sizes. The ones for the current\r\n * standard go to the top and the ones for the other standards follow grouped\r\n * by standard. Media sizes of the same standard are ordered alphabetically.\r\n */\r\n public static final class MediaSizeComparator implements Comparator<MediaSize> {\r\n private final Context mContext;\r\n\r\n /** Current configuration */\r\n private Configuration mCurrentConfig;\r\n\r\n /** The standard to use for the current locale */\r\n private int mCurrentStandard;\r\n\r\n /** Mapping from media size to label */\r\n private final @NonNull Map<MediaSize, String> mMediaSizeToLabel;\r\n\r\n public MediaSizeComparator(Context context) {\r\n mContext = context;\r\n mMediaSizeToLabel = new HashMap<>();\r\n mCurrentStandard = Integer.parseInt(mContext.getString(R.string.mediasize_standard));\r\n }\r\n\r\n /**\r\n * Handle a configuration change by reloading all resources.\r\n *\r\n * @param newConfig The new configuration that will be applied.\r\n */\r\n public void onConfigurationChanged(@NonNull Configuration newConfig) {\r\n if (mCurrentConfig == null ||\r\n (newConfig.diff(mCurrentConfig) & ActivityInfo.CONFIG_LOCALE) != 0) {\r\n mCurrentStandard = Integer\r\n .parseInt(mContext.getString(R.string.mediasize_standard));\r\n mMediaSizeToLabel.clear();\r\n\r\n mCurrentConfig = newConfig;\r\n }\r\n }\r\n\r\n /**\r\n * Get the label for a {@link MediaSize}.\r\n *\r\n * @param context The context the label should be loaded for\r\n * @param mediaSize The {@link MediaSize} to resolve\r\n *\r\n * @return The label for the media size\r\n */\r\n public @NonNull String getLabel(@NonNull Context context, @NonNull MediaSize mediaSize) {\r\n String label = mMediaSizeToLabel.get(mediaSize);\r\n\r\n if (label == null) {\r\n label = mediaSize.getLabel(context.getPackageManager());\r\n mMediaSizeToLabel.put(mediaSize, label);\r\n }\r\n\r\n return label;\r\n }\r\n\r\n @Override\r\n public int compare(MediaSize lhs, MediaSize rhs) {\r\n int lhsStandard = getStandardForMediaSize(mContext, lhs);\r\n int rhsStandard = getStandardForMediaSize(mContext, rhs);\r\n\r\n // The current standard always wins.\r\n if (lhsStandard == mCurrentStandard) {\r\n if (rhsStandard != mCurrentStandard) {\r\n return -1;\r\n }\r\n } else if (rhsStandard == mCurrentStandard) {\r\n return 1;\r\n }\r\n\r\n if (lhsStandard != rhsStandard) {\r\n // Different standards - use the standard ordering.\r\n return Integer.valueOf(lhsStandard).compareTo(rhsStandard);\r\n } else {\r\n // Same standard - sort alphabetically by label.\r\n return getLabel(mContext, lhs).compareTo(getLabel(mContext, rhs));\r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7854251265525818, "alphanum_fraction": 0.7854251265525818, "avg_line_length": 47, "blob_id": "ff13850537b70bad45d59cb5aeb1b1ffd1165fa8", "content_id": "f01bab2b2f4b4a7da691567131d1f2319f9b2ada", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 247, "license_type": "permissive", "max_line_length": 87, "num_lines": 5, "path": "/core/java/android/ddm/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "Some classes that handle DDM traffic.\r\n\r\nIt's not necessary to put all DDM-related code in this package; this just\r\nhas the essentials. Subclass org.apache.harmony.dalvik.ddmc.ChunkHandler and add a new\r\nregistration call in DdmRegister.java.\r\n\r\n" }, { "alpha_fraction": 0.618468165397644, "alphanum_fraction": 0.6238368153572083, "avg_line_length": 29.393259048461914, "blob_id": "3cd99d17048ab953a44321c0404b7b48392fb25a", "content_id": "f987198b4af3b41cfa193ad3c2cfef0a4d8f1742", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2794, "license_type": "permissive", "max_line_length": 97, "num_lines": 89, "path": "/services/core/java/com/android/server/timezone/PackageStatus.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.timezone;\r\n\r\nimport android.annotation.IntDef;\r\n\r\nimport java.lang.annotation.Retention;\r\nimport java.lang.annotation.RetentionPolicy;\r\n\r\n/**\r\n * Information about the status of the time zone update / data packages that are persisted by the\r\n * Android system.\r\n */\r\nfinal class PackageStatus {\r\n\r\n @Retention(RetentionPolicy.SOURCE)\r\n @IntDef({ CHECK_STARTED, CHECK_COMPLETED_SUCCESS, CHECK_COMPLETED_FAILURE })\r\n @interface CheckStatus {}\r\n\r\n /** A time zone update check has been started but not yet completed. */\r\n static final int CHECK_STARTED = 1;\r\n /** A time zone update check has been completed and succeeded. */\r\n static final int CHECK_COMPLETED_SUCCESS = 2;\r\n /** A time zone update check has been completed and failed. */\r\n static final int CHECK_COMPLETED_FAILURE = 3;\r\n\r\n @CheckStatus\r\n final int mCheckStatus;\r\n\r\n // Non-null\r\n final PackageVersions mVersions;\r\n\r\n PackageStatus(@CheckStatus int checkStatus, PackageVersions versions) {\r\n this.mCheckStatus = checkStatus;\r\n if (checkStatus < 1 || checkStatus > 3) {\r\n throw new IllegalArgumentException(\"Unknown checkStatus \" + checkStatus);\r\n }\r\n if (versions == null) {\r\n throw new NullPointerException(\"versions == null\");\r\n }\r\n this.mVersions = versions;\r\n }\r\n\r\n @Override\r\n public boolean equals(Object o) {\r\n if (this == o) {\r\n return true;\r\n }\r\n if (o == null || getClass() != o.getClass()) {\r\n return false;\r\n }\r\n\r\n PackageStatus that = (PackageStatus) o;\r\n\r\n if (mCheckStatus != that.mCheckStatus) {\r\n return false;\r\n }\r\n return mVersions.equals(that.mVersions);\r\n }\r\n\r\n @Override\r\n public int hashCode() {\r\n int result = mCheckStatus;\r\n result = 31 * result + mVersions.hashCode();\r\n return result;\r\n }\r\n\r\n @Override\r\n public String toString() {\r\n return \"PackageStatus{\" +\r\n \"mCheckStatus=\" + mCheckStatus +\r\n \", mVersions=\" + mVersions +\r\n '}';\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6988636255264282, "alphanum_fraction": 0.6988636255264282, "avg_line_length": 27, "blob_id": "b6b13edf2a573fd23d351b5a81ef744ad351e31d", "content_id": "da97475ea211f9f182ed8f1c7b3dfd812e677b97", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 176, "license_type": "permissive", "max_line_length": 58, "num_lines": 6, "path": "/cmds/vr/vr", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\n# Script to start \"vr\" on the device\r\n#\r\nbase=/system\r\nexport CLASSPATH=$base/framework/vr.jar\r\nexec app_process $base/bin com.android.commands.vr.Vr \"$@\"\r\n\r\n" }, { "alpha_fraction": 0.5806045532226562, "alphanum_fraction": 0.5887909531593323, "avg_line_length": 36.73170852661133, "blob_id": "2d21ea515ff273ebf58c18c31b7e2943991d754b", "content_id": "3cadc826816b97331921444402547bbf3b608704", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1588, "license_type": "permissive", "max_line_length": 96, "num_lines": 41, "path": "/tests/FrameworkPerf/src/com/android/frameworkperf/FrameworkPerfTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.frameworkperf;\r\n\r\nimport android.app.Activity;\r\nimport android.os.Bundle;\r\nimport android.test.ActivityInstrumentationTestCase2;\r\n\r\npublic class FrameworkPerfTest extends ActivityInstrumentationTestCase2<FrameworkPerfActivity> {\r\n\r\n private static final int TEST_TIMEOUT = 15 * 60 * 1000; //15 minutes\r\n\r\n public FrameworkPerfTest() {\r\n super(\"com.android.frameworkperf\", FrameworkPerfActivity.class);\r\n }\r\n\r\n public void testFrameworkPerf() {\r\n final FrameworkPerfActivity activity = getActivity();\r\n synchronized (activity.mResultNotifier) {\r\n getInstrumentation().runOnMainSync(new Runnable() {\r\n @Override\r\n public void run() {\r\n activity.startRunning();\r\n }\r\n });\r\n try {\r\n activity.mResultNotifier.wait(TEST_TIMEOUT);\r\n } catch (InterruptedException e) {\r\n fail(\"test interrupted.\");\r\n }\r\n }\r\n Bundle testResult = new Bundle();\r\n synchronized (activity.mResults) {\r\n assertTrue(\"test results were empty.\", activity.mResults.size() > 0);\r\n for (RunResult result : activity.mResults) {\r\n testResult.putString(result.name, String.format(\"%f,%d,%d,%f,%d,%d\",\r\n result.getFgMsPerOp(), result.fgOps, result.fgTime,\r\n result.getBgMsPerOp(), result.bgOps, result.bgTime));\r\n }\r\n }\r\n getInstrumentation().sendStatus(Activity.RESULT_OK, testResult);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7861635088920593, "alphanum_fraction": 0.7861635088920593, "avg_line_length": 51, "blob_id": "cd50f922e5709b9b3ec0f44b669b54277f8abb14", "content_id": "3b6ecf5301b33c71b78a13cb34db9e5f33a2d46a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1908, "license_type": "permissive", "max_line_length": 85, "num_lines": 36, "path": "/media/lib/remotedisplay/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "There are two libraries defined in this directory:\r\nFirst, com.android.media.remotedisplay.jar is a shared java library\r\ncontaining classes required by unbundled remote display providers.\r\nSecond, com.android.media.remotedisplay.stubs.jar is a stub for the shared\r\nlibrary which provides build-time APIs to the unbundled clients.\r\n\r\nAt runtime, the shared library is added to the classloader of the app via the\r\n<uses-library> tag. And since Java always tries to load a class from the\r\nparent classloader, regardless of whether the stub library is linked to the\r\napp statically or dynamically, the real classes are loaded from the shared\r\nlibrary.\r\n\r\n--- Rules of this library ---\r\no The stub library is effectively a PUBLIC API for unbundled remote display providers\r\n that may be distributed outside the system image. So it MUST BE API STABLE.\r\n You can add but not remove. The rules are the same as for the\r\n public platform SDK API.\r\no This library can see and instantiate internal platform classes, but it must not\r\n expose them in any public method (or by extending them via inheritance). This would\r\n break clients of the library because they cannot see the internal platform classes.\r\n\r\nThis library is distributed in the system image, and loaded as\r\na shared library. So you can change the implementation, but not\r\nthe interface. In this way it is like framework.jar.\r\n\r\n--- Why does this library exists? ---\r\n\r\nUnbundled remote display providers (such as Cast) cannot use internal\r\nplatform classes.\r\n\r\nThis library will eventually be replaced when the media route provider\r\ninfrastructure that is currently defined in the support library is reintegrated\r\nwith the framework in a new API. That API isn't ready yet so this\r\nlibrary is a compromise to make new capabilities available to the system\r\nwithout exposing the full surface area of the support library media\r\nroute provider protocol.\r\n" }, { "alpha_fraction": 0.5945945978164673, "alphanum_fraction": 0.642642617225647, "avg_line_length": 18.8125, "blob_id": "128218357822192f18715fdc5d55bb4f10ce11f9", "content_id": "38c31963d1e5c3a5738918d0f9bd905a8dcfa224", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 333, "license_type": "permissive", "max_line_length": 58, "num_lines": 16, "path": "/startop/scripts/app_startup/lib/adb_utils_test.py", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "import adb_utils\r\n\r\n# pip imports\r\nimport pytest\r\n\r\ndef test_parse_time_to_milliseconds():\r\n # Act\r\n result1 = adb_utils.parse_time_to_milliseconds('+1s7ms')\r\n result2 = adb_utils.parse_time_to_milliseconds('+523ms')\r\n\r\n # Assert\r\n assert result1 == 1007\r\n assert result2 == 523\r\n\r\nif __name__ == '__main__':\r\n pytest.main()\r\n" }, { "alpha_fraction": 0.6190476417541504, "alphanum_fraction": 0.6190476417541504, "avg_line_length": 19, "blob_id": "fc2fe0f7447b1c0698cb3f7c4c9f7ac9863f6665", "content_id": "450dcf2395b77a29e374d4450138604718164d42", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 42, "license_type": "permissive", "max_line_length": 22, "num_lines": 2, "path": "/cmds/device_config/device_config", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\ncmd device_config \"$@\"\r\n" }, { "alpha_fraction": 0.7653061151504517, "alphanum_fraction": 0.7653061151504517, "avg_line_length": 27.399999618530273, "blob_id": "5d28e3171c4583086a21237d8e81996eb621aa6d", "content_id": "26c25c313decd081f3a3499770a1b3872d3dcb3e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 294, "license_type": "permissive", "max_line_length": 60, "num_lines": 10, "path": "/packages/SettingsLib/search/common.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Include this file to generate SearchIndexableResourcesImpl\r\n\r\nLOCAL_ANNOTATION_PROCESSORS += \\\r\n SettingsLib-annotation-processor\r\n\r\nLOCAL_ANNOTATION_PROCESSOR_CLASSES += \\\r\n com.android.settingslib.search.IndexableProcessor\r\n\r\nLOCAL_STATIC_JAVA_LIBRARIES += \\\r\n SettingsLib-search\r\n" }, { "alpha_fraction": 0.71417635679245, "alphanum_fraction": 0.7157073020935059, "avg_line_length": 35.54022979736328, "blob_id": "56df0651a9de994d84c7d1ef439adfb53fd8260b", "content_id": "ca3aa42b39b6b123b26b5ff6d3d2c4dfeee5cebf", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6532, "license_type": "permissive", "max_line_length": 96, "num_lines": 174, "path": "/packages/SystemUI/tests/src/com/android/systemui/doze/DozeUiTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.doze;\r\n\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_AOD;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_AOD_PAUSED;\r\nimport static com.android.systemui.doze.DozeMachine.State.INITIALIZED;\r\nimport static com.android.systemui.doze.DozeMachine.State.UNINITIALIZED;\r\n\r\nimport static org.mockito.ArgumentMatchers.any;\r\nimport static org.mockito.ArgumentMatchers.anyInt;\r\nimport static org.mockito.ArgumentMatchers.anyLong;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.clearInvocations;\r\nimport static org.mockito.Mockito.never;\r\nimport static org.mockito.Mockito.reset;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.app.AlarmManager;\r\nimport android.os.Handler;\r\nimport android.os.HandlerThread;\r\n\r\nimport androidx.test.filters.SmallTest;\r\nimport androidx.test.runner.AndroidJUnit4;\r\n\r\nimport com.android.keyguard.KeyguardUpdateMonitor;\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.statusbar.phone.DozeParameters;\r\nimport com.android.systemui.util.wakelock.WakeLockFake;\r\n\r\nimport org.junit.After;\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@RunWith(AndroidJUnit4.class)\r\n@SmallTest\r\npublic class DozeUiTest extends SysuiTestCase {\r\n\r\n @Mock\r\n private AlarmManager mAlarmManager;\r\n @Mock\r\n private DozeMachine mMachine;\r\n @Mock\r\n private DozeParameters mDozeParameters;\r\n @Mock\r\n private KeyguardUpdateMonitor mKeyguardUpdateMonitor;\r\n @Mock\r\n private DozeHost mHost;\r\n @Mock\r\n private DozeLog mDozeLog;\r\n private WakeLockFake mWakeLock;\r\n private Handler mHandler;\r\n private HandlerThread mHandlerThread;\r\n private DozeUi mDozeUi;\r\n\r\n @Before\r\n public void setUp() throws Exception {\r\n MockitoAnnotations.initMocks(this);\r\n\r\n mHandlerThread = new HandlerThread(\"DozeUiTest\");\r\n mHandlerThread.start();\r\n mWakeLock = new WakeLockFake();\r\n mHandler = mHandlerThread.getThreadHandler();\r\n\r\n mDozeUi = new DozeUi(mContext, mAlarmManager, mMachine, mWakeLock, mHost, mHandler,\r\n mDozeParameters, mKeyguardUpdateMonitor, mDozeLog);\r\n }\r\n\r\n @After\r\n public void tearDown() throws Exception {\r\n mHandlerThread.quit();\r\n mHandler = null;\r\n mHandlerThread = null;\r\n }\r\n\r\n @Test\r\n public void pausingAndUnpausingAod_registersTimeTickAfterUnpausing() throws Exception {\r\n mDozeUi.transitionTo(UNINITIALIZED, INITIALIZED);\r\n mDozeUi.transitionTo(INITIALIZED, DOZE_AOD);\r\n mDozeUi.transitionTo(DOZE_AOD, DOZE_AOD_PAUSED);\r\n\r\n clearInvocations(mAlarmManager);\r\n\r\n mDozeUi.transitionTo(DOZE_AOD_PAUSED, DOZE_AOD);\r\n\r\n verify(mAlarmManager).setExact(anyInt(), anyLong(), eq(\"doze_time_tick\"), any(), any());\r\n }\r\n\r\n @Test\r\n public void propagatesAnimateScreenOff_noAlwaysOn() {\r\n reset(mHost);\r\n when(mDozeParameters.getAlwaysOn()).thenReturn(false);\r\n when(mDozeParameters.getDisplayNeedsBlanking()).thenReturn(false);\r\n\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(false);\r\n verify(mHost).setAnimateScreenOff(eq(false));\r\n }\r\n\r\n @Test\r\n public void propagatesAnimateScreenOff_alwaysOn() {\r\n reset(mHost);\r\n when(mDozeParameters.getAlwaysOn()).thenReturn(true);\r\n when(mDozeParameters.getDisplayNeedsBlanking()).thenReturn(false);\r\n\r\n // Take over when the keyguard is visible.\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(true);\r\n verify(mHost).setAnimateScreenOff(eq(true));\r\n\r\n // Do not animate screen-off when keyguard isn't visible - PowerManager will do it.\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(false);\r\n verify(mHost).setAnimateScreenOff(eq(false));\r\n }\r\n\r\n @Test\r\n public void neverAnimateScreenOff_whenNotSupported() {\r\n // Re-initialize DozeParameters saying that the display requires blanking.\r\n reset(mDozeParameters);\r\n reset(mHost);\r\n when(mDozeParameters.getDisplayNeedsBlanking()).thenReturn(true);\r\n mDozeUi = new DozeUi(mContext, mAlarmManager, mMachine, mWakeLock, mHost, mHandler,\r\n mDozeParameters, mKeyguardUpdateMonitor, mDozeLog);\r\n\r\n // Never animate if display doesn't support it.\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(true);\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(false);\r\n verify(mHost, never()).setAnimateScreenOff(eq(false));\r\n }\r\n\r\n @Test\r\n public void transitionSetsAnimateWakeup_alwaysOn() {\r\n when(mDozeParameters.getAlwaysOn()).thenReturn(true);\r\n when(mDozeParameters.getDisplayNeedsBlanking()).thenReturn(false);\r\n mDozeUi.transitionTo(UNINITIALIZED, DOZE);\r\n verify(mHost).setAnimateWakeup(eq(true));\r\n }\r\n\r\n @Test\r\n public void keyguardVisibility_changesControlScreenOffAnimation() {\r\n // Pre-condition\r\n reset(mDozeParameters);\r\n when(mDozeParameters.getAlwaysOn()).thenReturn(true);\r\n when(mDozeParameters.getDisplayNeedsBlanking()).thenReturn(false);\r\n\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(false);\r\n verify(mDozeParameters).setControlScreenOffAnimation(eq(false));\r\n mDozeUi.getKeyguardCallback().onKeyguardVisibilityChanged(true);\r\n verify(mDozeParameters).setControlScreenOffAnimation(eq(true));\r\n }\r\n\r\n @Test\r\n public void transitionSetsAnimateWakeup_noAlwaysOn() {\r\n mDozeUi.transitionTo(UNINITIALIZED, DOZE);\r\n verify(mHost).setAnimateWakeup(eq(false));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6377440094947815, "alphanum_fraction": 0.6377440094947815, "avg_line_length": 17.20833396911621, "blob_id": "76bf9c7ffc3a1b694947c46fb6ffd9d14359af9b", "content_id": "40fda35dc6e6e586bf8622f146b8cb2bbe0eef13", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 922, "license_type": "permissive", "max_line_length": 85, "num_lines": 48, "path": "/tools/streaming_proto/Errors.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#include <stdio.h>\r\n\r\n#include <string>\r\n#include <vector>\r\n\r\nnamespace android {\r\nnamespace stream_proto {\r\n\r\nusing namespace std;\r\n\r\nstruct Error\r\n{\r\n Error();\r\n Error(const Error& that);\r\n Error(const string& filename, int lineno, const char* message);\r\n\r\n string filename;\r\n int lineno;\r\n string message;\r\n};\r\n\r\nclass Errors\r\n{\r\npublic:\r\n Errors();\r\n ~Errors();\r\n\r\n // Add an error\r\n void Add(const string& filename, int lineno, const char* format, ...);\r\n\r\n // Print the errors to stderr if there are any.\r\n void Print() const;\r\n\r\n bool HasErrors() const;\r\n\r\nprivate:\r\n // The errors that have been added\r\n vector<Error> m_errors;\r\n void AddImpl(const string& filename, int lineno, const char* format, va_list ap);\r\n};\r\n\r\nextern Errors ERRORS;\r\nextern const string UNKNOWN_FILE;\r\nextern const int UNKNOWN_LINE;\r\n\r\n\r\n} // namespace stream_proto\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.5972759127616882, "alphanum_fraction": 0.6006389856338501, "avg_line_length": 34.042423248291016, "blob_id": "4e6d280ef24d81c0fa62cdd6ff550d6a2c4ec2e2", "content_id": "f9bcb7ec391b8800d38483f2fafe0d2323d9fc25", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5947, "license_type": "permissive", "max_line_length": 89, "num_lines": 165, "path": "/tests/UsbHostExternalManagmentTest/AoapTestHost/src/com/android/hardware/usb/aoaphosttest/UsbAoapHostTestActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.hardware.usb.aoaphosttest;\r\n\r\nimport android.app.Activity;\r\nimport android.content.BroadcastReceiver;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.content.IntentFilter;\r\nimport android.hardware.usb.UsbAccessory;\r\nimport android.hardware.usb.UsbConstants;\r\nimport android.hardware.usb.UsbDevice;\r\nimport android.hardware.usb.UsbDeviceConnection;\r\nimport android.hardware.usb.UsbEndpoint;\r\nimport android.hardware.usb.UsbInterface;\r\nimport android.hardware.usb.UsbManager;\r\nimport android.os.Bundle;\r\nimport android.os.Handler;\r\nimport android.os.HandlerThread;\r\nimport android.os.Process;\r\nimport android.text.TextUtils;\r\nimport android.util.Log;\r\n\r\nimport libcore.io.IoUtils;\r\n\r\npublic class UsbAoapHostTestActivity extends Activity {\r\n\r\n private static final String TAG = UsbAoapHostTestActivity.class.getSimpleName();\r\n\r\n private UsbManager mUsbManager;\r\n private UsbStateReceiver mReceiver;\r\n private UsbDevice mUsbDevice;\r\n private UsbDeviceConnection mUsbConnection;\r\n private ReaderThread mReaderThread;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n setContentView(R.layout.host);\r\n\r\n mUsbManager = (UsbManager)getSystemService(Context.USB_SERVICE);\r\n IntentFilter filter = new IntentFilter();\r\n filter.addAction(UsbManager.ACTION_USB_DEVICE_DETACHED);\r\n mReceiver = new UsbStateReceiver();\r\n registerReceiver(mReceiver, filter);\r\n\r\n Intent intent = getIntent();\r\n if (intent.getAction().equals(UsbManager.ACTION_USB_DEVICE_ATTACHED)) {\r\n mUsbDevice = intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);\r\n mUsbConnection = mUsbManager.openDevice(mUsbDevice);\r\n mReaderThread = new ReaderThread(mUsbDevice, mUsbConnection);\r\n mReaderThread.start();\r\n } else {\r\n finish();\r\n }\r\n }\r\n\r\n @Override\r\n protected void onDestroy() {\r\n super.onDestroy();\r\n unregisterReceiver(mReceiver);\r\n if (mUsbConnection != null) {\r\n mUsbConnection.close();\r\n }\r\n if (mReaderThread != null) {\r\n mReaderThread.requestToQuit();\r\n try {\r\n mReaderThread.join(1000);\r\n } catch (InterruptedException e) {\r\n }\r\n if (mReaderThread.isAlive()) { // reader thread stuck\r\n throw new RuntimeException(\"ReaderThread still alive\");\r\n }\r\n }\r\n }\r\n\r\n private static boolean isDevicesMatching(UsbDevice l, UsbDevice r) {\r\n if (l.getVendorId() == r.getVendorId() && l.getProductId() == r.getProductId() &&\r\n TextUtils.equals(l.getSerialNumber(), r.getSerialNumber())) {\r\n return true;\r\n }\r\n return false;\r\n }\r\n\r\n private class ReaderThread extends Thread {\r\n private boolean mShouldQuit = false;\r\n private final UsbDevice mDevice;\r\n private final UsbDeviceConnection mUsbConnection;\r\n private final UsbEndpoint mBulkIn;\r\n private final UsbEndpoint mBulkOut;\r\n private final byte[] mBuffer = new byte[16384];\r\n\r\n private ReaderThread(UsbDevice device, UsbDeviceConnection conn) {\r\n super(\"AOAP\");\r\n mDevice = device;\r\n mUsbConnection = conn;\r\n UsbInterface iface = mDevice.getInterface(0);\r\n // Setup bulk endpoints.\r\n UsbEndpoint bulkIn = null;\r\n UsbEndpoint bulkOut = null;\r\n for (int i = 0; i < iface.getEndpointCount(); i++) {\r\n UsbEndpoint ep = iface.getEndpoint(i);\r\n if (ep.getDirection() == UsbConstants.USB_DIR_IN) {\r\n if (bulkIn == null) {\r\n bulkIn = ep;\r\n }\r\n } else {\r\n if (bulkOut == null) {\r\n bulkOut = ep;\r\n }\r\n }\r\n }\r\n if (bulkIn == null || bulkOut == null) {\r\n throw new IllegalStateException(\"Unable to find bulk endpoints\");\r\n }\r\n mBulkIn = bulkIn;\r\n mBulkOut = bulkOut;\r\n }\r\n\r\n private synchronized void requestToQuit() {\r\n mShouldQuit = true;\r\n }\r\n\r\n private synchronized boolean shouldQuit() {\r\n return mShouldQuit;\r\n }\r\n\r\n @Override\r\n public void run() {\r\n while (!shouldQuit()) {\r\n int read = mUsbConnection.bulkTransfer(mBulkIn, mBuffer, mBuffer.length,\r\n Integer.MAX_VALUE);\r\n if (read < 0) {\r\n throw new RuntimeException(\"bulkTransfer failed, read = \" + read);\r\n }\r\n }\r\n }\r\n }\r\n\r\n private class UsbStateReceiver extends BroadcastReceiver {\r\n @Override\r\n public void onReceive(Context context, Intent intent) {\r\n if (UsbManager.ACTION_USB_DEVICE_DETACHED.equals(intent.getAction())) {\r\n UsbDevice device = intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);\r\n if (isDevicesMatching(mUsbDevice, device)) {\r\n finish();\r\n }\r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6190662980079651, "alphanum_fraction": 0.6320125460624695, "avg_line_length": 36.044776916503906, "blob_id": "56b3b42456e47c3882a23722ee44dcca4d36cb8e", "content_id": "0d2b6b3a8c704c01865c74c8e7d5c1f6aa338037", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2549, "license_type": "permissive", "max_line_length": 90, "num_lines": 67, "path": "/libs/storage/IMountServiceListener.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include <storage/IMountServiceListener.h>\r\n#include <binder/Parcel.h>\r\n\r\nnamespace android {\r\n\r\nenum {\r\n TRANSACTION_onUsbMassStorageConnectionChanged = IBinder::FIRST_CALL_TRANSACTION,\r\n TRANSACTION_onStorageStateChanged,\r\n};\r\n\r\nclass BpMountServiceListener: public BpInterface<IMountServiceListener> {\r\npublic:\r\n explicit BpMountServiceListener(const sp<IBinder>& impl)\r\n : BpInterface<IMountServiceListener>(impl) { }\r\n\r\n virtual void onUsbMassStorageConnectionChanged(const bool /* connected */) { }\r\n virtual void onStorageStateChanged(const String16& /* path */,\r\n const String16& /* oldState */, const String16& /* newState */) { }\r\n};\r\n\r\nIMPLEMENT_META_INTERFACE(MountServiceListener, \"android.os.storage.IStorageEventListener\")\r\n\r\n// ----------------------------------------------------------------------\r\n\r\nstatus_t BnMountServiceListener::onTransact(\r\n uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)\r\n{\r\n switch(code) {\r\n case TRANSACTION_onUsbMassStorageConnectionChanged: {\r\n CHECK_INTERFACE(IMountServiceListener, data, reply);\r\n bool connected = (data.readInt32() != 0);\r\n onUsbMassStorageConnectionChanged(connected);\r\n reply->writeNoException();\r\n return NO_ERROR;\r\n }\r\n case TRANSACTION_onStorageStateChanged: {\r\n CHECK_INTERFACE(IMountServiceListener, data, reply);\r\n String16 path = data.readString16();\r\n String16 oldState = data.readString16();\r\n String16 newState = data.readString16();\r\n onStorageStateChanged(path, oldState, newState);\r\n reply->writeNoException();\r\n return NO_ERROR;\r\n }\r\n default:\r\n return BBinder::onTransact(code, data, reply, flags);\r\n }\r\n}\r\n// ----------------------------------------------------------------------\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7037037014961243, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 16.33333396911621, "blob_id": "b756839da54f64f04f3bb04df9119503c137ae80", "content_id": "9e4f13e36e39eb849f653acd0d332c12f98411a7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 108, "license_type": "permissive", "max_line_length": 50, "num_lines": 6, "path": "/packages/SettingsLib/tests/robotests/readme.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "Unit test suite for SettingsLib using Robolectric.\r\n\r\n```\r\n$ croot\r\n$ make RunSettingsLibRoboTests -j40\r\n```" }, { "alpha_fraction": 0.673355758190155, "alphanum_fraction": 0.6760745644569397, "avg_line_length": 36.23762512207031, "blob_id": "6bf9c038c7c9fa60b20bb53a898cf95ddfd63d03", "content_id": "ce357747fa9cfd671f75eab4ca0e71f8db778092", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": true, "language": "Java", "length_bytes": 7724, "license_type": "permissive", "max_line_length": 99, "num_lines": 202, "path": "/packages/SystemUI/tests/src/com/android/systemui/qs/external/TileServicesTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\npackage com.android.systemui.qs.external;\r\n\r\nimport static junit.framework.Assert.assertEquals;\r\nimport static junit.framework.Assert.assertTrue;\r\n\r\nimport static org.mockito.ArgumentMatchers.any;\r\nimport static org.mockito.ArgumentMatchers.anyLong;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.never;\r\nimport static org.mockito.Mockito.times;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.content.ComponentName;\r\nimport android.content.IntentFilter;\r\nimport android.os.Handler;\r\nimport android.os.Looper;\r\nimport android.os.UserHandle;\r\nimport android.service.quicksettings.Tile;\r\nimport android.service.quicksettings.TileService;\r\nimport android.test.suitebuilder.annotation.SmallTest;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\nimport android.testing.TestableLooper.RunWithLooper;\r\n\r\nimport com.android.internal.logging.UiEventLogger;\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.broadcast.BroadcastDispatcher;\r\nimport com.android.systemui.dump.DumpManager;\r\nimport com.android.systemui.qs.QSTileHost;\r\nimport com.android.systemui.qs.logging.QSLogger;\r\nimport com.android.systemui.qs.tileimpl.QSFactoryImpl;\r\nimport com.android.systemui.shared.plugins.PluginManager;\r\nimport com.android.systemui.statusbar.phone.AutoTileManager;\r\nimport com.android.systemui.statusbar.phone.StatusBar;\r\nimport com.android.systemui.statusbar.phone.StatusBarIconController;\r\nimport com.android.systemui.statusbar.policy.BluetoothController;\r\nimport com.android.systemui.tuner.TunerService;\r\n\r\nimport org.junit.After;\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.ArgumentCaptor;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.Optional;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\n@RunWithLooper\r\npublic class TileServicesTest extends SysuiTestCase {\r\n private static int NUM_FAKES = TileServices.DEFAULT_MAX_BOUND * 2;\r\n\r\n private TileServices mTileService;\r\n private ArrayList<TileServiceManager> mManagers;\r\n @Mock\r\n private BroadcastDispatcher mBroadcastDispatcher;\r\n @Mock\r\n private StatusBarIconController mStatusBarIconController;\r\n @Mock\r\n private QSFactoryImpl mQSFactory;\r\n @Mock\r\n private PluginManager mPluginManager;\r\n @Mock\r\n private TunerService mTunerService;\r\n @Mock\r\n private AutoTileManager mAutoTileManager;\r\n @Mock\r\n private DumpManager mDumpManager;\r\n @Mock\r\n private StatusBar mStatusBar;\r\n @Mock\r\n private QSLogger mQSLogger;\r\n @Mock\r\n private UiEventLogger mUiEventLogger;\r\n\r\n @Before\r\n public void setUp() throws Exception {\r\n MockitoAnnotations.initMocks(this);\r\n mDependency.injectMockDependency(BluetoothController.class);\r\n mManagers = new ArrayList<>();\r\n QSTileHost host = new QSTileHost(mContext,\r\n mStatusBarIconController,\r\n mQSFactory,\r\n new Handler(),\r\n Looper.myLooper(),\r\n mPluginManager,\r\n mTunerService,\r\n () -> mAutoTileManager,\r\n mDumpManager,\r\n mock(BroadcastDispatcher.class),\r\n Optional.of(mStatusBar),\r\n mQSLogger,\r\n mUiEventLogger);\r\n mTileService = new TestTileServices(host, Looper.getMainLooper(), mBroadcastDispatcher);\r\n }\r\n\r\n @After\r\n public void tearDown() throws Exception {\r\n mTileService.getHost().destroy();\r\n TestableLooper.get(this).processAllMessages();\r\n }\r\n\r\n @Test\r\n public void testActiveTileListenerRegisteredOnAllUsers() {\r\n ArgumentCaptor<IntentFilter> captor = ArgumentCaptor.forClass(IntentFilter.class);\r\n verify(mBroadcastDispatcher).registerReceiver(any(), captor.capture(), any(), eq(\r\n UserHandle.ALL));\r\n assertTrue(captor.getValue().hasAction(TileService.ACTION_REQUEST_LISTENING));\r\n }\r\n\r\n @Test\r\n public void testRecalculateBindAllowance() {\r\n // Add some fake tiles.\r\n for (int i = 0; i < NUM_FAKES; i++) {\r\n mTileService.getTileWrapper(mock(CustomTile.class));\r\n }\r\n assertEquals(NUM_FAKES, mManagers.size());\r\n\r\n for (int i = 0; i < NUM_FAKES; i++) {\r\n when(mManagers.get(i).getBindPriority()).thenReturn(i);\r\n }\r\n mTileService.recalculateBindAllowance();\r\n for (int i = 0; i < NUM_FAKES; i++) {\r\n verify(mManagers.get(i), times(1)).calculateBindPriority(anyLong());\r\n ArgumentCaptor<Boolean> captor = ArgumentCaptor.forClass(Boolean.class);\r\n verify(mManagers.get(i), times(1)).setBindAllowed(captor.capture());\r\n\r\n assertEquals(\"\" + i + \"th service\", i >= (NUM_FAKES - TileServices.DEFAULT_MAX_BOUND),\r\n (boolean) captor.getValue());\r\n }\r\n }\r\n\r\n @Test\r\n public void testSetMemoryPressure() {\r\n testRecalculateBindAllowance();\r\n mTileService.setMemoryPressure(true);\r\n\r\n for (int i = 0; i < NUM_FAKES; i++) {\r\n ArgumentCaptor<Boolean> captor = ArgumentCaptor.forClass(Boolean.class);\r\n verify(mManagers.get(i), times(2)).setBindAllowed(captor.capture());\r\n\r\n assertEquals(\"\" + i + \"th service\", i >= (NUM_FAKES - TileServices.REDUCED_MAX_BOUND),\r\n (boolean) captor.getValue());\r\n }\r\n }\r\n\r\n @Test\r\n public void testCalcFew() {\r\n for (int i = 0; i < TileServices.DEFAULT_MAX_BOUND - 1; i++) {\r\n mTileService.getTileWrapper(mock(CustomTile.class));\r\n }\r\n mTileService.recalculateBindAllowance();\r\n\r\n for (int i = 0; i < TileServices.DEFAULT_MAX_BOUND - 1; i++) {\r\n // Shouldn't get bind prioirities calculated when there are less than the max services.\r\n verify(mManagers.get(i), never()).calculateBindPriority(\r\n anyLong());\r\n\r\n // All should be bound since there are less than the max services.\r\n ArgumentCaptor<Boolean> captor = ArgumentCaptor.forClass(Boolean.class);\r\n verify(mManagers.get(i), times(1)).setBindAllowed(captor.capture());\r\n\r\n assertTrue(captor.getValue());\r\n }\r\n }\r\n\r\n private class TestTileServices extends TileServices {\r\n TestTileServices(QSTileHost host, Looper looper,\r\n BroadcastDispatcher broadcastDispatcher) {\r\n super(host, looper, broadcastDispatcher);\r\n }\r\n\r\n @Override\r\n protected TileServiceManager onCreateTileService(ComponentName component, Tile qsTile,\r\n BroadcastDispatcher broadcastDispatcher) {\r\n TileServiceManager manager = mock(TileServiceManager.class);\r\n mManagers.add(manager);\r\n when(manager.isLifecycleStarted()).thenReturn(true);\r\n return manager;\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7830650806427002, "alphanum_fraction": 0.7830650806427002, "avg_line_length": 36.621620178222656, "blob_id": "d95419c62c068ea7c84ec748558b2090ab91d3d9", "content_id": "0160c6da4a6142abb4f21d421c20ebe01d1eca5c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1429, "license_type": "permissive", "max_line_length": 97, "num_lines": 37, "path": "/startop/apps/test/README.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This directory contains a simple Android app that is meant to help in doing\r\ncontrolled startup performance experiments.\r\n\r\nThis app is structured as a number of activities that each are useful for a\r\ndifferent aspect of startup testing.\r\n\r\n# Activities\r\n\r\n## EmptyActivity\r\n\r\nThis is the simplest possible Android activity. Starting this exercises only the\r\nsystem parts of startup without any app-specific behavior.\r\n\r\n adb shell am start -n com.android.startop.test/.EmptyActivity\r\n\r\n## LayoutInflation\r\n\r\nThis activity inflates a reasonably complex layout to see the impact of layout\r\ninflation. The layout is supported by the viewcompiler, so this can be used for\r\ntesting precompiled layout performance.\r\n\r\nThe activity adds an `inflate#activity_main` slice to atrace around the time\r\nspent in view inflation to make it easier to focus on the time spent in view\r\ninflation.\r\n\r\n adb shell am start -n com.android.startop.test/.ComplexLayoutInflationActivity\r\n\r\n## NonInteractiveSystemServerBenchmark\r\n\r\nThis activity is for running microbenchmarks from the command line. Run as follows:\r\n\r\n adb shell am start -W -n com.android.startop.test .NonInteractiveSystemServerBenchmarkActivity\r\n\r\nIt takes awhile (and there's currently no automated way to make sure it's done),\r\nbut when it finishes, you can get the results like this:\r\n\r\n adb shell cat /sdcard/Android/data/com.android.startop.test/files/benchmark.csv\r\n" }, { "alpha_fraction": 0.648019015789032, "alphanum_fraction": 0.6557844877243042, "avg_line_length": 43.72463607788086, "blob_id": "915e4ce6991919b7f23f1a6c3c48bfd80424d8a9", "content_id": "00aa6d95e41277ca660c1c3dd649cd23188a2ad7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6310, "license_type": "permissive", "max_line_length": 108, "num_lines": 138, "path": "/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/MediaMetadataTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2008 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.mediaframeworktest.functional;\r\n\r\nimport android.media.MediaMetadataRetriever;\r\nimport android.test.AndroidTestCase;\r\nimport android.test.suitebuilder.annotation.MediumTest;\r\nimport android.test.suitebuilder.annotation.Suppress;\r\nimport android.util.Log;\r\n\r\nimport com.android.mediaframeworktest.MediaNames;\r\nimport com.android.mediaframeworktest.MediaProfileReader;\r\n/**\r\n * This metadata test suite test the basic functionality of the \r\n * MediaMetadataRetriever\r\n * \r\n */\r\npublic class MediaMetadataTest extends AndroidTestCase {\r\n \r\n private static final String TAG = \"MediaMetadataTest\";\r\n\r\n public static enum METADATA_EXPECTEDRESULT{\r\n FILE_PATH,CD_TRACK, ALBUM,\r\n ARTIST, AUTHOR, COMPOSER,\r\n DATE, GENRE, TITLE,\r\n YEAR, DURATION, NUM_TRACKS, WRITER\r\n }\r\n \r\n public static enum MP3_TEST_FILE{\r\n ID3V1V2, ID3V2, ID3V1\r\n }\r\n \r\n public static METADATA_EXPECTEDRESULT meta;\r\n public static MP3_TEST_FILE mp3_test_file;\r\n \r\n @MediumTest\r\n public static void testID3V1V2Metadata() throws Exception {\r\n validateMetatData(mp3_test_file.ID3V1V2.ordinal(), MediaNames.META_DATA_MP3);\r\n }\r\n \r\n @MediumTest\r\n public static void testID3V2Metadata() throws Exception {\r\n validateMetatData(mp3_test_file.ID3V2.ordinal(), MediaNames.META_DATA_MP3);\r\n }\r\n \r\n @MediumTest\r\n public static void testID3V1Metadata() throws Exception {\r\n validateMetatData(mp3_test_file.ID3V1.ordinal(), MediaNames.META_DATA_MP3);\r\n }\r\n\r\n private static void validateMetatData(int fileIndex, String meta_data_file[][]) {\r\n Log.v(TAG, \"filePath = \"+ meta_data_file[fileIndex][0]);\r\n if ((meta_data_file[fileIndex][0].endsWith(\"wma\") && !MediaProfileReader.getWMAEnable()) ||\r\n (meta_data_file[fileIndex][0].endsWith(\"wmv\") && !MediaProfileReader.getWMVEnable())) {\r\n return;\r\n }\r\n String value = null;\r\n MediaMetadataRetriever retriever = new MediaMetadataRetriever();\r\n try {\r\n retriever.setDataSource(meta_data_file[fileIndex][0]);\r\n } catch(Exception e) {\r\n Log.v(TAG, \"Failed: \"+meta_data_file[fileIndex][0] + \" \" + e.toString());\r\n //Set the test case failure whenever it failed to setDataSource\r\n assertTrue(\"Failed to setDataSource \", false);\r\n }\r\n \r\n //METADATA_KEY_CD_TRACK_NUMBER should return the TCRK value\r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER);\r\n Log.v(TAG, \"CD_TRACK_NUMBER : \" + value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.CD_TRACK.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ALBUM);\r\n Log.v(TAG, \"Album : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.ALBUM.ordinal()], value); \r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_ARTIST);\r\n Log.v(TAG, \"Artist : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.ARTIST.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_AUTHOR);\r\n Log.v(TAG, \"Author : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.AUTHOR.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_COMPOSER);\r\n Log.v(TAG, \"Composer : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.COMPOSER.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DATE);\r\n Log.v(TAG, \"Date : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.DATE.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_GENRE);\r\n Log.v(TAG, \"Genre : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.GENRE.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE);\r\n Log.v(TAG, \"Title : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.TITLE.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_YEAR);\r\n Log.v(TAG, \"Year : \"+ value);\r\n assertEquals(TAG, meta_data_file[fileIndex][meta.YEAR.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);\r\n Log.v(TAG, \"Expected = \" + meta_data_file[fileIndex][meta.DURATION.ordinal()] + \"reult = \" + value);\r\n // Only require that the returned duration is within 100ms of the expected\r\n // one as PV and stagefright differ slightly in their implementation.\r\n assertTrue(TAG, Math.abs(Integer.parseInt(\r\n meta_data_file[fileIndex][meta.DURATION.ordinal()])\r\n - Integer.parseInt(value)) < 100);\r\n \r\n //METADATA_KEY_NUM_TRACKS should return the total number of tracks in the media\r\n //include the video and audio\r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_NUM_TRACKS);\r\n Log.v(TAG, \"Track : \"+ value);\r\n assertEquals(TAG,meta_data_file[fileIndex][meta.NUM_TRACKS.ordinal()], value);\r\n \r\n value = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_WRITER);\r\n Log.v(TAG, \"Writer : \"+ value);\r\n assertEquals(TAG,meta_data_file[fileIndex][meta.WRITER.ordinal()], value);\r\n\r\n retriever.release(); \r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6592920422554016, "alphanum_fraction": 0.6643489003181458, "avg_line_length": 23.516128540039062, "blob_id": "997417205c82f23cf17d7ac37baccd6dcb75d9e6", "content_id": "1d85bf6ba95be488ec943e20d400a7d96e4029a8", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1582, "license_type": "permissive", "max_line_length": 86, "num_lines": 62, "path": "/tools/aapt2/link/XmlNamespaceRemover.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"link/Linkers.h\"\r\n\r\n#include <algorithm>\r\n\r\n#include \"ResourceTable.h\"\r\n\r\nnamespace aapt {\r\n\r\nnamespace {\r\n\r\n// Visits each xml Node, removing URI references and nested namespaces.\r\nclass XmlVisitor : public xml::Visitor {\r\n public:\r\n explicit XmlVisitor(bool keep_uris) : keep_uris_(keep_uris) {}\r\n\r\n void Visit(xml::Element* el) override {\r\n el->namespace_decls.clear();\r\n\r\n if (!keep_uris_) {\r\n for (xml::Attribute& attr : el->attributes) {\r\n attr.namespace_uri.clear();\r\n }\r\n el->namespace_uri.clear();\r\n }\r\n xml::Visitor::Visit(el);\r\n }\r\n\r\n private:\r\n DISALLOW_COPY_AND_ASSIGN(XmlVisitor);\r\n\r\n bool keep_uris_;\r\n};\r\n\r\n} // namespace\r\n\r\nbool XmlNamespaceRemover::Consume(IAaptContext* context, xml::XmlResource* resource) {\r\n if (!resource->root) {\r\n return false;\r\n }\r\n\r\n XmlVisitor visitor(keep_uris_);\r\n resource->root->Accept(&visitor);\r\n return true;\r\n}\r\n\r\n} // namespace aapt\r\n" }, { "alpha_fraction": 0.6902465224266052, "alphanum_fraction": 0.6948910355567932, "avg_line_length": 42.42856979370117, "blob_id": "ba0d537fa687c604936223feb1fdfbf3b2c7359a", "content_id": "122df30a4c653da97203ac7e373a89fb334be9e6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2799, "license_type": "permissive", "max_line_length": 99, "num_lines": 63, "path": "/core/java/android/hardware/camera2/marshal/MarshalQueryable.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage android.hardware.camera2.marshal;\r\n\r\nimport android.hardware.camera2.utils.TypeReference;\r\n\r\n/**\r\n * Query if a marshaler can marshal to/from a particular native and managed type; if it supports\r\n * the combination, allow creating a marshaler instance to do the serialization.\r\n *\r\n * <p>Not all queryable instances will support exactly one combination. Some, such as the\r\n * primitive queryable will support all primitive to/from managed mappings (as long as they are\r\n * 1:1). Others, such as the rectangle queryable will only support integer to rectangle mappings.\r\n * </p>\r\n *\r\n * <p>Yet some others are codependent on other queryables; e.g. array queryables might only support\r\n * a type map for {@code T[]} if another queryable exists with support for the component type\r\n * {@code T}.</p>\r\n */\r\npublic interface MarshalQueryable<T> {\r\n /**\r\n * Create a marshaler between the selected managed and native type.\r\n *\r\n * <p>This marshaler instance is only good for that specific type mapping; and will refuse\r\n * to map other managed types, other native types, or an other combination that isn't\r\n * this exact one.</p>\r\n *\r\n * @param managedType a managed type reference\r\n * @param nativeType the native type, e.g.\r\n * {@link android.hardware.camera2.impl.CameraMetadataNative#TYPE_BYTE TYPE_BYTE}\r\n * @return\r\n *\r\n * @throws UnsupportedOperationException\r\n * if {@link #isTypeMappingSupported} returns {@code false}\r\n */\r\n public Marshaler<T> createMarshaler(\r\n TypeReference<T> managedType, int nativeType);\r\n\r\n /**\r\n * Determine whether or not this query marshal is able to create a marshaler that will\r\n * support the managed type and native type mapping.\r\n *\r\n * <p>If this returns {@code true}, then a marshaler can be instantiated by\r\n * {@link #createMarshaler} that will marshal data to/from the native type\r\n * from/to the managed type.</p>\r\n *\r\n * <p>Most marshalers are likely to only support one type map.</p>\r\n */\r\n public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType);\r\n}\r\n" }, { "alpha_fraction": 0.7862281799316406, "alphanum_fraction": 0.7862281799316406, "avg_line_length": 52, "blob_id": "6079ee435b646d3da26ce93dc7a7937d88fbe461", "content_id": "168dd94c2f8d75ea15f4f2f678828e67f6907a8b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1946, "license_type": "permissive", "max_line_length": 85, "num_lines": 36, "path": "/media/lib/signer/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "There are two libraries defined in this directory:\r\nFirst, com.android.mediadrm.signer.jar is a shared java library\r\ncontaining classes required by unbundled apps running on devices that use\r\nthe certficate provisioning and private key signing capabilities provided\r\nby the MediaDrm API.\r\nSecond, com.android.mediadrm.signer.stubs.jar is a stub for the shared library\r\nwhich provides build-time APIs to the unbundled clients.\r\n\r\nAt runtime, the shared library is added to the classloader of the app via the\r\n<uses-library> tag. And since Java always tries to load a class from the\r\nparent classloader, regardless of whether the stub library is linked to the\r\napp statically or dynamically, the real classes are loaded from the shared\r\nlibrary.\r\n\r\n--- Rules of this library ---\r\no The stub library is effectively a PUBLIC API for unbundled CAST receivers\r\n that may be distributed outside the system image. So it MUST BE API STABLE.\r\n You can add but not remove. The rules are the same as for the\r\n public platform SDK API.\r\no This library can see and instantiate internal platform classes, but it must not\r\n expose them in any public method (or by extending them via inheritance). This would\r\n break clients of the library because they cannot see the internal platform classes.\r\n\r\nThis library is distributed in the system image, and loaded as\r\na shared library. So you can change the implementation, but not\r\nthe interface. In this way it is like framework.jar.\r\n\r\n--- Why does this library exist? ---\r\n\r\nUnbundled apps cannot use internal platform classes.\r\n\r\nThis library will eventually be replaced when the provisioned certificate-\r\nbased signing infrastructure that is currently defined in the support library\r\nis reintegrated with the framework in a new API. That API isn't ready yet so\r\nthis library is a compromise to make new capabilities available to the system\r\nwithout exposing the full surface area of the support library.\r\n\r\n" }, { "alpha_fraction": 0.6140719056129456, "alphanum_fraction": 0.6172659397125244, "avg_line_length": 36.04861068725586, "blob_id": "ba0af417347787394229a48b1c59f90cbc5a8d95", "content_id": "2c9d6bf4bead138a535310659e55a2d41dca90a7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 10958, "license_type": "permissive", "max_line_length": 92, "num_lines": 288, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/SmartCamera.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw.samples.simplecamera;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Intent;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.drawable.BitmapDrawable;\r\nimport android.graphics.drawable.Drawable;\r\nimport android.os.Bundle;\r\nimport android.os.Handler;\r\nimport android.provider.MediaStore;\r\nimport android.util.Log;\r\nimport android.view.LayoutInflater;\r\nimport android.view.SurfaceView;\r\nimport android.view.View;\r\nimport android.view.View.OnClickListener;\r\nimport android.widget.AdapterView;\r\nimport android.widget.Button;\r\nimport android.widget.ImageView;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.Spinner;\r\nimport android.widget.TextView;\r\nimport androidx.media.filterfw.FilterGraph;\r\nimport androidx.media.filterfw.GraphReader;\r\nimport androidx.media.filterfw.GraphRunner;\r\nimport androidx.media.filterfw.MffContext;\r\n\r\nimport java.io.IOException;\r\nimport java.text.SimpleDateFormat;\r\nimport java.util.ArrayList;\r\nimport java.util.Calendar;\r\n\r\n\r\npublic class SmartCamera extends Activity {\r\n\r\n private SurfaceView mCameraView;\r\n private TextView mGoodBadTextView;\r\n private TextView mFPSTextView;\r\n private TextView mEyesTextView;\r\n private TextView mSmilesTextView;\r\n private TextView mScoreTextView;\r\n private static ImageView mImageView1;\r\n private static ImageView mImageView2;\r\n private static ImageView mImageView3;\r\n private static ImageView mImageView4;\r\n private static ImageView mImageView5;\r\n private Button mStartStopButton;\r\n private TextView mImagesSavedTextView;\r\n private Spinner mSpinner;\r\n private LinearLayout mLinearLayout;\r\n\r\n private MffContext mContext;\r\n private FilterGraph mGraph;\r\n private GraphRunner mRunner;\r\n private Handler mHandler = new Handler();\r\n\r\n private static final String TAG = \"SmartCamera\";\r\n private static final boolean sUseFacialExpression = false;\r\n private boolean isPendingRunGraph = false;\r\n\r\n private static ArrayList<ImageView> mImages;\r\n private static int count = -1;\r\n private static boolean countHasReachedMax = false;\r\n private static int numImages = 0;\r\n\r\n // Function to return the correct image view to display the current bitmap\r\n public static ImageView getImageView() {\r\n if (count == numImages-1) countHasReachedMax = true;\r\n count = (count+1) % numImages;\r\n return mImages.get(count);\r\n }\r\n\r\n // Function used to run images through the graph, mainly for CSV data generation\r\n public void runGraphOnImage(String filePath, String fileName) {\r\n if(fileName.endsWith(\".jpg\") == false) {\r\n return;\r\n }\r\n mGraph.getVariable(\"gallerySource\").setValue(filePath + \"/\" + fileName);\r\n Log.v(TAG, \"runGraphOnImage : : \" + filePath + \" name: \" + fileName);\r\n mGraph.getVariable(\"imageName\").setValue(fileName);\r\n mGraph.getVariable(\"filePath\").setValue(filePath); // wrong\r\n try {\r\n Thread.sleep(400);\r\n } catch (InterruptedException e) {\r\n // TODO Auto-generated catch block\r\n e.printStackTrace();\r\n }\r\n }\r\n\r\n // Function to clear the \"Images Saved\" text off the screen\r\n private void clearImagesSavedTextView() {\r\n mImagesSavedTextView.setText(\"\");\r\n }\r\n\r\n // Function to capture the images in the current imageviews and save them to the gallery\r\n private void captureImages() {\r\n ((WaveTriggerFilter) mGraph.getFilter(\"snapEffect\")).trigger();\r\n mGraph.getVariable(\"startCapture\").setValue(false);\r\n Bitmap bitmap = null;\r\n Drawable res = getResources().getDrawable(R.drawable.black_screen);\r\n Calendar cal = Calendar.getInstance();\r\n SimpleDateFormat sdf = new SimpleDateFormat(\"yyyy-MM-dd_HH:mm:ss\");\r\n\r\n Log.v(TAG, \"numImages: \" + numImages + \" count: \" + count +\r\n \" hasReachedMax: \" + countHasReachedMax);\r\n int maxI = countHasReachedMax ? numImages : count+1;\r\n if(maxI != 0) {\r\n if (maxI == 1) mImagesSavedTextView.setText(\"Image Saved\");\r\n else {\r\n mImagesSavedTextView.setText(\"\" + maxI + \" Images Saved\");\r\n }\r\n }\r\n for (int i = 0; i < maxI; i++) {\r\n bitmap = ((BitmapDrawable)mImages.get(i).getDrawable()).getBitmap();\r\n mImages.get(i).setImageDrawable(res);\r\n MediaStore.Images.Media.insertImage(getContentResolver(), bitmap,\r\n sdf.format(cal.getTime()) + \"_image\" + i + \".jpg\", \"image \" + i);\r\n }\r\n mStartStopButton.setText(\"Start\");\r\n count = -1;\r\n countHasReachedMax = false;\r\n mSpinner.setEnabled(true);\r\n mHandler.postDelayed(new Runnable() {\r\n public void run() {\r\n clearImagesSavedTextView();\r\n }\r\n }, 5000);\r\n }\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.simplecamera);\r\n setTitle(\"Smart Camera\");\r\n\r\n mContext = new MffContext(this);\r\n\r\n mCameraView = (SurfaceView) findViewById(R.id.cameraView);\r\n mGoodBadTextView = (TextView) findViewById(R.id.goodOrBadTextView);\r\n mFPSTextView = (TextView) findViewById(R.id.fpsTextView);\r\n mScoreTextView = (TextView) findViewById(R.id.scoreTextView);\r\n mStartStopButton = (Button) findViewById(R.id.startButton);\r\n mImagesSavedTextView = (TextView) findViewById(R.id.imagesSavedTextView);\r\n mImagesSavedTextView.setText(\"\");\r\n mSpinner = (Spinner) findViewById(R.id.spinner);\r\n mLinearLayout = (LinearLayout) findViewById(R.id.scrollViewLinearLayout);\r\n mImages = new ArrayList<ImageView>();\r\n\r\n // Spinner is used to determine how many image views are displayed at the bottom\r\n // of the screen. Based on the item position that is selected, we inflate that\r\n // many imageviews into the bottom linear layout.\r\n mSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {\r\n @Override\r\n public void onItemSelected(AdapterView<?> parentView, View selectedItemView,\r\n int position, long id) {\r\n mLinearLayout.removeViews(0,numImages);\r\n numImages = position+1;\r\n mImages.clear();\r\n LayoutInflater inflater = getLayoutInflater();\r\n for (int i = 0; i < numImages; i++) {\r\n ImageView tmp = (ImageView) inflater.inflate(R.layout.imageview, null);\r\n mImages.add(tmp);\r\n mLinearLayout.addView(tmp);\r\n }\r\n }\r\n\r\n @Override\r\n public void onNothingSelected(AdapterView<?> parentView) {\r\n }\r\n });\r\n\r\n numImages = mSpinner.getSelectedItemPosition()+1;\r\n mImages.clear();\r\n LayoutInflater inflater = getLayoutInflater();\r\n for (int i = 0; i < numImages; i++) {\r\n ImageView tmp = (ImageView) inflater.inflate(R.layout.imageview, null);\r\n mImages.add(tmp);\r\n mLinearLayout.addView(tmp);\r\n\r\n }\r\n\r\n // Button used to start and stop the capture of images when they are deemed great\r\n mStartStopButton.setOnClickListener(new OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n if (mStartStopButton.getText().equals(\"Start\")) {\r\n mGraph.getVariable(\"startCapture\").setValue(true);\r\n mStartStopButton.setText(\"Stop\");\r\n mSpinner.setEnabled(false);\r\n } else {\r\n boolean tmp = (Boolean) mGraph.getVariable(\"startCapture\").getValue();\r\n if (tmp == false) {\r\n return;\r\n }\r\n if (count == numImages-1) countHasReachedMax = true;\r\n captureImages();\r\n }\r\n }\r\n });\r\n\r\n // Button to open the gallery to show the images in there\r\n Button galleryOpen = (Button) findViewById(R.id.galleryOpenButton);\r\n galleryOpen.setOnClickListener(new OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n Intent openGalleryIntent = new Intent(Intent.ACTION_MAIN);\r\n openGalleryIntent.addCategory(Intent.CATEGORY_APP_GALLERY);\r\n startActivity(openGalleryIntent);\r\n }\r\n });\r\n\r\n loadGraph();\r\n mGraph.getVariable(\"startCapture\").setValue(false);\r\n runGraph();\r\n }\r\n\r\n @Override\r\n public void onPause() {\r\n super.onPause();\r\n Log.i(TAG, \"onPause\");\r\n if (mContext != null) {\r\n mContext.onPause();\r\n }\r\n }\r\n\r\n @Override\r\n public void onResume() {\r\n super.onResume();\r\n Log.i(TAG, \"onResume\");\r\n if (mContext != null) {\r\n mContext.onResume();\r\n }\r\n if (isPendingRunGraph) {\r\n isPendingRunGraph = false;\r\n runGraph();\r\n }\r\n }\r\n\r\n @Override\r\n public void onStop() {\r\n super.onStop();\r\n Log.i(TAG, \"onStop\");\r\n }\r\n\r\n // Build the Filtergraph for Camera\r\n private void loadGraph() {\r\n try {\r\n mGraph = GraphReader.readXmlGraphResource(mContext, R.raw.camera_graph);\r\n mRunner = mGraph.getRunner();\r\n\r\n // Connect views\r\n mGraph.bindFilterToView(\"camViewTarget\", mCameraView);\r\n mGraph.bindFilterToView(\"goodOrBadTextView\", mGoodBadTextView);\r\n mGraph.bindFilterToView(\"fpsTextView\", mFPSTextView);\r\n mGraph.bindFilterToView(\"scoreTextView\", mScoreTextView);\r\n\r\n // Used for Facial Expressions\r\n if (sUseFacialExpression) {\r\n mGraph.bindFilterToView(\"eyesTextView\", mEyesTextView);\r\n mGraph.bindFilterToView(\"smilesTextView\", mSmilesTextView);\r\n }\r\n\r\n } catch (IOException e) {\r\n e.printStackTrace();\r\n }\r\n }\r\n\r\n // Asynchronously run the filtergraph\r\n private void runGraph() {\r\n mRunner.setIsVerbose(true);\r\n mRunner.start(mGraph);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.8099290728569031, "alphanum_fraction": 0.8553191423416138, "avg_line_length": 115.5, "blob_id": "ae43d7657e66240fccb8a673e447be623f4eacc9", "content_id": "1fcf0221e356376ddd1b7915ff019759614549f3", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 705, "license_type": "permissive", "max_line_length": 135, "num_lines": 6, "path": "/wifi/tests/assets/hsr1/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "HSR1ProfileWithCACert.conf - unencoded installation file that contains a Passpoint profile and a CA Certificate\r\nHSR1ProfileWithCACert.base64 - base64 encoded of the data contained in HSR1ProfileWithCAWith.conf\r\nHSR1ProfileWithNonBase64Part.base64 - base64 encoded installation file that contains a part of non-base64 encoding type\r\nHSR1ProfileWithMissingBoundary.base64 - base64 encoded installation file with missing end-boundary in the MIME data\r\nHSR1ProfileWithInvalidContentType.base64 - base64 encoded installation file with that contains a MIME part with an invalid content type\r\nHSR1ProfileWithUpdateIdentifier.base64 - base64 encoded installation file with that contains an R2 update identifier\r\n" }, { "alpha_fraction": 0.6617963314056396, "alphanum_fraction": 0.6678126454353333, "avg_line_length": 33.79999923706055, "blob_id": "ddab5b8b5d38cb22bb85799790ddead35e4796eb", "content_id": "12ac303d0aeb39e3297b08fab2fd23ab72d9fa70", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2327, "license_type": "permissive", "max_line_length": 97, "num_lines": 65, "path": "/tests/net/common/java/android/net/metrics/DhcpErrorEventTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package android.net.metrics\r\n\r\nimport android.net.metrics.DhcpErrorEvent.DHCP_INVALID_OPTION_LENGTH\r\nimport android.net.metrics.DhcpErrorEvent.errorCodeWithOption\r\nimport androidx.test.filters.SmallTest\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.testutils.parcelingRoundTrip\r\nimport java.lang.reflect.Modifier\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Assert.assertNotNull\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\nprivate const val TEST_ERROR_CODE = 12345\r\n//DHCP Optional Type: DHCP Subnet Mask (Copy from DhcpPacket.java due to it's protected)\r\nprivate const val DHCP_SUBNET_MASK = 1\r\n\r\n@RunWith(AndroidJUnit4::class)\r\n@SmallTest\r\nclass DhcpErrorEventTest {\r\n\r\n @Test\r\n fun testConstructor() {\r\n val event = DhcpErrorEvent(TEST_ERROR_CODE)\r\n assertEquals(TEST_ERROR_CODE, event.errorCode)\r\n }\r\n\r\n @Test\r\n fun testParcelUnparcel() {\r\n val event = DhcpErrorEvent(TEST_ERROR_CODE)\r\n val parceled = parcelingRoundTrip(event)\r\n assertEquals(TEST_ERROR_CODE, parceled.errorCode)\r\n }\r\n\r\n @Test\r\n fun testErrorCodeWithOption() {\r\n val errorCode = errorCodeWithOption(DHCP_INVALID_OPTION_LENGTH, DHCP_SUBNET_MASK);\r\n assertTrue((DHCP_INVALID_OPTION_LENGTH and errorCode) == DHCP_INVALID_OPTION_LENGTH);\r\n assertTrue((DHCP_SUBNET_MASK and errorCode) == DHCP_SUBNET_MASK);\r\n }\r\n\r\n @Test\r\n fun testToString() {\r\n val names = listOf(\"L2_ERROR\", \"L3_ERROR\", \"L4_ERROR\", \"DHCP_ERROR\", \"MISC_ERROR\")\r\n val errorFields = DhcpErrorEvent::class.java.declaredFields.filter {\r\n it.type == Int::class.javaPrimitiveType\r\n && Modifier.isPublic(it.modifiers) && Modifier.isStatic(it.modifiers)\r\n && it.name !in names\r\n }\r\n\r\n errorFields.forEach {\r\n val intValue = it.getInt(null)\r\n val stringValue = DhcpErrorEvent(intValue).toString()\r\n assertTrue(\"Invalid string for error 0x%08X (field %s): %s\".format(intValue, it.name,\r\n stringValue),\r\n stringValue.contains(it.name))\r\n }\r\n }\r\n\r\n @Test\r\n fun testToString_InvalidErrorCode() {\r\n assertNotNull(DhcpErrorEvent(TEST_ERROR_CODE).toString())\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7560975551605225, "alphanum_fraction": 0.7560975551605225, "avg_line_length": 25.33333396911621, "blob_id": "ebf37fcda5f636b039ba7fe9041e0b2b005b4cd3", "content_id": "721ce983f460633692011d5f722e07cb34eb1486", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 82, "license_type": "permissive", "max_line_length": 52, "num_lines": 3, "path": "/docs/downloads/partner/audio/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "A/V sync and latency PCB\r\n\r\nfabrication drawing, EAGLE CAD files, schematic, BOM\r\n" }, { "alpha_fraction": 0.655572772026062, "alphanum_fraction": 0.6695046424865723, "avg_line_length": 32, "blob_id": "20fc20bdd2e6733c6fb39b2699541ac1513098fd", "content_id": "dbf500556360a58e55667a84e4a7ff2c50c5d23e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1292, "license_type": "permissive", "max_line_length": 75, "num_lines": 38, "path": "/core/tests/coretests/src/android/widget/scroll/arrowscroll/MultiPageTextWithPadding.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.widget.scroll.arrowscroll;\r\n\r\nimport android.util.ScrollViewScenario;\r\n\r\n/**\r\n * One TextView with a text covering several pages. Padding is added\r\n * above and below the ScrollView.\r\n */\r\npublic class MultiPageTextWithPadding extends ScrollViewScenario {\r\n\r\n @Override\r\n protected void init(Params params) {\r\n\r\n String text = \"This is a long text.\";\r\n String longText = \"First text.\";\r\n for (int i = 0; i < 300; i++) {\r\n longText = longText + \" \" + text;\r\n }\r\n longText = longText + \" Last text.\";\r\n params.addTextView(longText, -1.0f).addPaddingToScrollView(50, 50);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5643371939659119, "alphanum_fraction": 0.5737659335136414, "avg_line_length": 32.67307662963867, "blob_id": "59e32c0d535afbbfed96eab01dbabccdffddf637", "content_id": "a26b3d08f7674ecd8e0969b4cb6764e206e35d28", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3606, "license_type": "permissive", "max_line_length": 88, "num_lines": 104, "path": "/tests/LargeAssetTest/src/com/android/largeassettest/LargeAssetTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2006 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.largeassettest;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.content.res.AssetManager;\r\nimport android.os.AsyncTask;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.TextView;\r\n\r\nimport java.io.InputStream;\r\nimport java.io.IOException;\r\n\r\n/**\r\n * Skeleton to test large-asset handling. The asset in question is one million\r\n * four-byte integers, in ascending numeric order.\r\n */\r\npublic class LargeAssetTest extends Activity {\r\n Button mValidateButton;\r\n TextView mResultText;\r\n Validator mValidateThread;\r\n\r\n @Override\r\n protected void onCreate(Bundle icicle) {\r\n super.onCreate(icicle);\r\n setContentView(R.layout.lat);\r\n\r\n mResultText = findViewById(R.id.result);\r\n mValidateButton = findViewById(R.id.validate);\r\n\r\n mValidateButton.setOnClickListener(mClickListener);\r\n }\r\n\r\n View.OnClickListener mClickListener = new View.OnClickListener() {\r\n public void onClick(View v) {\r\n mValidateButton.setEnabled(false);\r\n mValidateThread = new Validator();\r\n mValidateThread.execute(LargeAssetTest.this.getAssets());\r\n }\r\n };\r\n\r\n /**\r\n * Validation happens in a separate thread\r\n */\r\n class Validator extends AsyncTask<AssetManager, Integer, Boolean> {\r\n static final String TAG = \"Validator\";\r\n\r\n @Override\r\n protected Boolean doInBackground(AssetManager... params) {\r\n AssetManager am = params[0];\r\n try {\r\n InputStream is = am.open(\"million-ints\", AssetManager.ACCESS_STREAMING);\r\n byte[] buf = new byte[4];\r\n\r\n for (int i = 0; i < 1000000; i++) {\r\n int num = is.read(buf, 0, 4);\r\n if (num != 4) {\r\n Log.e(TAG, \"Wanted 4 bytes but read \" + num);\r\n return false;\r\n }\r\n // the byte array is stored in the asset in little-endian order\r\n int value = (buf[3] << 24) + ((buf[2] & 0xFF) << 16)\r\n + ((buf[1] & 0xFF) << 8) + (buf[0] & 0xFF);\r\n if (value != i) {\r\n Log.e(TAG, \"Mismatch: index \" + i + \" : value \" + value);\r\n return false;\r\n }\r\n }\r\n\r\n is.close();\r\n } catch (IOException e) {\r\n Log.w(TAG, \"Couldn't open asset\", e);\r\n return false;\r\n }\r\n Log.i(TAG, \"Finished, reporting valid\");\r\n return true;\r\n }\r\n\r\n @Override\r\n protected void onPostExecute(Boolean result) {\r\n CharSequence text = (result) ? \"Valid!\" : \"NOT VALID\";\r\n mResultText.setText(text);\r\n mValidateButton.setEnabled(true);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5814636945724487, "alphanum_fraction": 0.5929487347602844, "avg_line_length": 41.546512603759766, "blob_id": "5a2aadb2dc3c006c79acc01eace021c71b063f16", "content_id": "7e4bcb0d9670e8a616e90627af7444fccb6cdbc2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7488, "license_type": "permissive", "max_line_length": 98, "num_lines": 172, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/samples/simplecamera/FaceSquareFilterTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw.samples.simplecamera;\r\n\r\n\r\nimport android.content.res.AssetManager;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.net.Uri;\r\nimport android.provider.MediaStore;\r\n\r\nimport androidx.media.filterfw.Filter;\r\nimport androidx.media.filterfw.FrameImage2D;\r\nimport androidx.media.filterfw.FrameType;\r\nimport androidx.media.filterfw.FrameValues;\r\nimport androidx.media.filterfw.MffContext;\r\nimport androidx.media.filterfw.MffFilterTestCase;\r\n\r\nimport java.io.FileNotFoundException;\r\nimport java.io.IOException;\r\nimport java.util.concurrent.ExecutionException;\r\nimport java.util.concurrent.TimeoutException;\r\n\r\nimport android.hardware.Camera;\r\nimport android.hardware.Camera.Face;\r\nimport android.graphics.Rect;\r\n\r\n\r\npublic class FaceSquareFilterTest extends MffFilterTestCase {\r\n\r\n private AssetManager assetMgr = null;\r\n @Override\r\n protected Filter createFilter(MffContext mffContext) {\r\n assetMgr = mffContext.getApplicationContext().getAssets();\r\n return new FaceSquareFilter(mffContext, \"faceSquareFilter\");\r\n }\r\n\r\n public void testFaceSquareFilter() throws Exception{\r\n final int INPUT_WIDTH = 1536;\r\n final int INPUT_HEIGHT = 2048;\r\n FrameImage2D image =\r\n createFrame(FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_CPU),\r\n new int[] {INPUT_WIDTH,INPUT_HEIGHT}).asFrameImage2D();\r\n\r\n FrameValues facesFrame = createFrame(FrameType.array(Camera.Face.class), new int[] {1,1}).\r\n asFrameValues();\r\n\r\n Bitmap bitmap = BitmapFactory.decodeStream(assetMgr.open(\"XZZ019.jpg\"));\r\n image.setBitmap(bitmap);\r\n injectInputFrame(\"image\", image);\r\n\r\n Face face = new Face();\r\n Rect faceRect = new Rect();\r\n // These are the values for image 141 with 1 face\r\n faceRect.set(-533, -453, 369, 224);\r\n face.rect = faceRect;\r\n Face[] faces = new Face[1];\r\n faces[0] = face;\r\n facesFrame.setValue(faces);\r\n injectInputFrame(\"faces\", facesFrame);\r\n process();\r\n\r\n // ensure the output image has the rectangle in the right place\r\n FrameImage2D outputImage = getOutputFrame(\"image\").asFrameImage2D();\r\n int[] pixels = new int[bitmap.getByteCount()];\r\n bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(),\r\n bitmap.getHeight());\r\n\r\n final int FACE_X_RANGE = 2000;\r\n final int WIDTH_OFFSET = 1000;\r\n final int HEIGHT_OFFSET = 1000;\r\n\r\n int top = (faceRect.top+HEIGHT_OFFSET)*bitmap.getHeight()/FACE_X_RANGE;\r\n int bottom = (faceRect.bottom+HEIGHT_OFFSET)*bitmap.getHeight()/FACE_X_RANGE;\r\n int left = (faceRect.left+WIDTH_OFFSET)*bitmap.getWidth()/FACE_X_RANGE;\r\n int right = (faceRect.right+WIDTH_OFFSET)*bitmap.getWidth()/FACE_X_RANGE;\r\n\r\n if (top < 0) {\r\n top = 0;\r\n } else if (top > bitmap.getHeight()) {\r\n top = bitmap.getHeight();\r\n }\r\n if (left < 0) {\r\n left = 0;\r\n } else if (left > bitmap.getWidth()) {\r\n left = bitmap.getWidth();\r\n }\r\n if (bottom > bitmap.getHeight()) {\r\n bottom = bitmap.getHeight();\r\n } else if (bottom < 0) {\r\n bottom = 0;\r\n }\r\n if (right > bitmap.getWidth()) {\r\n right = bitmap.getWidth();\r\n } else if (right < 0) {\r\n right = 0;\r\n }\r\n\r\n for (int j = 0; j < (bottom - top); j++) {\r\n // Left edge\r\n if (left > 0 && top > 0) {\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +\r\n ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +\r\n ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + left) +\r\n ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n }\r\n\r\n // Right edge\r\n if (right > 0 && top > 0) {\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +\r\n ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +\r\n ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * (top + j) + right) +\r\n ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n }\r\n\r\n }\r\n for (int k = 0; k < (right - left); k++) {\r\n // Top edge\r\n if (top < bitmap.getHeight()) {\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +\r\n ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +\r\n ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * top + left + k) +\r\n ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n\r\n }\r\n // Bottom edge\r\n if (bottom < bitmap.getHeight()) {\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +\r\n ImageConstants.RED_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +\r\n ImageConstants.GREEN_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n pixels[ImageConstants.PIX_CHANNELS * (bitmap.getWidth() * bottom + left + k) +\r\n ImageConstants.BLUE_OFFSET] = (byte) ImageConstants.MAX_BYTE;\r\n }\r\n }\r\n\r\n Bitmap outputBitmap = outputImage.toBitmap();\r\n int[] outputPixels = new int[outputBitmap.getByteCount()];\r\n outputBitmap.getPixels(outputPixels, 0, outputBitmap.getWidth(), 0, 0,\r\n outputBitmap.getWidth(), outputBitmap.getHeight());\r\n int equalCount = 0;\r\n for ( int i = 0; i < outputBitmap.getByteCount(); i++) {\r\n if (pixels[i] == outputPixels[i])\r\n equalCount++;\r\n }\r\n\r\n if (equalCount + (0.05f*outputBitmap.getByteCount()) < outputBitmap.getByteCount()) {\r\n // Assertion will fail if condition is true\r\n assertEquals(equalCount, outputBitmap.getByteCount());\r\n }\r\n }\r\n}" }, { "alpha_fraction": 0.7443946003913879, "alphanum_fraction": 0.7690582871437073, "avg_line_length": 32.30769348144531, "blob_id": "7f5845e29a5d4c0630a43f02f1048df16ab7585b", "content_id": "c7a281d68ef516cce2086c170deaaf2629a1bc43", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 446, "license_type": "permissive", "max_line_length": 80, "num_lines": 13, "path": "/samples/demo/haptic-assessment/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "README\r\n======\r\n\r\nThis haptic assessment sample app allows the user to play with three different\r\ntypes of VibrationEffects:\r\n\r\n1) Predefined click effect with default strength.\r\n\r\n2) One shot vibration with 20ms duration at max amplitude.\r\n\r\n3) Waveform vibration with 500ms duration at half amplitude, then 500ms duration\r\n at max amplitude. This will pass if the device supports vibration amplitude\r\n control, and visibly fail otherwise.\r\n" }, { "alpha_fraction": 0.6154752373695374, "alphanum_fraction": 0.620581328868866, "avg_line_length": 27.976470947265625, "blob_id": "42c0180941043149a03d68779614911fe93bac9b", "content_id": "e05d3a5a7b03ece784de2c8d8ea50f03dfa41164", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2546, "license_type": "permissive", "max_line_length": 75, "num_lines": 85, "path": "/tests/FrameworkPerf/src/com/android/frameworkperf/RunResult.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.frameworkperf;\r\n\r\nimport android.os.Parcel;\r\nimport android.os.Parcelable;\r\n\r\npublic class RunResult implements Parcelable {\r\n final String name;\r\n final String fgLongName;\r\n final String bgLongName;\r\n final long fgTime;\r\n final long fgOps;\r\n final long bgTime;\r\n final long bgOps;\r\n\r\n RunResult(TestService.TestRunner op) {\r\n name = op.getName();\r\n fgLongName = op.getForegroundLongName();\r\n bgLongName = op.getBackgroundLongName();\r\n fgTime = op.getForegroundTime();\r\n fgOps = op.getForegroundOps();\r\n bgTime = op.getBackgroundTime();\r\n bgOps = op.getBackgroundOps();\r\n }\r\n\r\n RunResult(Parcel source) {\r\n name = source.readString();\r\n fgLongName = source.readString();\r\n bgLongName = source.readString();\r\n fgTime = source.readLong();\r\n fgOps = source.readLong();\r\n bgTime = source.readLong();\r\n bgOps = source.readLong();\r\n }\r\n\r\n float getFgMsPerOp() {\r\n return fgOps != 0 ? (fgTime / (float)fgOps) : 0;\r\n }\r\n\r\n float getBgMsPerOp() {\r\n return bgOps != 0 ? (bgTime / (float)bgOps) : 0;\r\n }\r\n\r\n @Override\r\n public int describeContents() {\r\n return 0;\r\n }\r\n\r\n @Override\r\n public void writeToParcel(Parcel dest, int flags) {\r\n dest.writeString(name);\r\n dest.writeString(fgLongName);\r\n dest.writeString(bgLongName);\r\n dest.writeLong(fgTime);\r\n dest.writeLong(fgOps);\r\n dest.writeLong(bgTime);\r\n dest.writeLong(bgOps);\r\n }\r\n\r\n public static final Parcelable.Creator<RunResult> CREATOR\r\n = new Parcelable.Creator<RunResult>() {\r\n public RunResult createFromParcel(Parcel in) {\r\n return new RunResult(in);\r\n }\r\n\r\n public RunResult[] newArray(int size) {\r\n return new RunResult[size];\r\n }\r\n };\r\n}" }, { "alpha_fraction": 0.8096885681152344, "alphanum_fraction": 0.8131487965583801, "avg_line_length": 70.5, "blob_id": "fac03195a401ba5d89d24728389084a901ecfdc8", "content_id": "ffdb904fc22d31e4dbefbfc9cb1a5185fce066fb", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 578, "license_type": "permissive", "max_line_length": 142, "num_lines": 8, "path": "/apct-tests/perftests/textclassifier/run.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "set -e\r\nbuild/soong/soong_ui.bash --make-mode TextClassifierPerfTests perf-setup.sh\r\nadb install ${OUT}/testcases/TextClassifierPerfTests/arm64/TextClassifierPerfTests.apk\r\nadb shell cmd package compile -m speed -f com.android.perftests.textclassifier\r\nadb push ${OUT}/obj/EXECUTABLES/perf-setup.sh_intermediates/perf-setup.sh /data/local/tmp/\r\nadb shell chmod +x /data/local/tmp/perf-setup.sh\r\nadb shell /data/local/tmp/perf-setup.sh\r\nadb shell am instrument -w -e package android.view.textclassifier com.android.perftests.textclassifier/androidx.test.runner.AndroidJUnitRunner" }, { "alpha_fraction": 0.6654929518699646, "alphanum_fraction": 0.6767605543136597, "avg_line_length": 24.79245376586914, "blob_id": "065a8fe85b55089c325e05c1f0a76659d92ff64c", "content_id": "a4a9b9b0e2d727eac3115e7861a7c0f23e90070a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1420, "license_type": "permissive", "max_line_length": 75, "num_lines": 53, "path": "/cmds/interrupter/interrupter.c", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2012, The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n\r\n/**\r\n * The probability of a syscall failing from 0.0 to 1.0\r\n */\r\n#define PROBABILITY 0.9\r\n\r\n\r\n\r\n#include <stdio.h>\r\n#include <stdlib.h>\r\n#include <errno.h>\r\n\r\n/* for various intercepted calls */\r\n#include <sys/types.h>\r\n#include <sys/socket.h>\r\n#include <sys/stat.h>\r\n#include <fcntl.h>\r\n\r\n/* For builds on glibc */\r\n#define __USE_GNU\r\n#include <dlfcn.h>\r\n\r\n#include \"interrupter.h\"\r\n\r\nstatic int probability = PROBABILITY * RAND_MAX;\r\n\r\nstatic int maybe_interrupt() {\r\n if (rand() < probability) {\r\n return 1;\r\n }\r\n return 0;\r\n}\r\n\r\nDEFINE_INTERCEPT(read, ssize_t, int, void*, size_t);\r\nDEFINE_INTERCEPT(write, ssize_t, int, const void*, size_t);\r\nDEFINE_INTERCEPT(accept, int, int, struct sockaddr*, socklen_t*);\r\nDEFINE_INTERCEPT(creat, int, const char*, mode_t);\r\n" }, { "alpha_fraction": 0.5280026793479919, "alphanum_fraction": 0.529689610004425, "avg_line_length": 27.346534729003906, "blob_id": "0f3f8ea5af94bc916f6f73b7e3e4e96826952909", "content_id": "24f3c27e0543e0a9c87f0bff1ad2900b5b164339", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2964, "license_type": "permissive", "max_line_length": 92, "num_lines": 101, "path": "/tests/HierarchyViewerTest/src/com/android/test/hierarchyviewer/Decoder.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.test.hierarchyviewer;\r\n\r\nimport java.nio.ByteBuffer;\r\nimport java.nio.charset.Charset;\r\nimport java.util.HashMap;\r\nimport java.util.Map;\r\n\r\npublic class Decoder {\r\n // Prefixes for simple primitives. These match the JNI definitions.\r\n public static final byte SIG_BOOLEAN = 'Z';\r\n public static final byte SIG_BYTE = 'B';\r\n public static final byte SIG_SHORT = 'S';\r\n public static final byte SIG_INT = 'I';\r\n public static final byte SIG_LONG = 'J';\r\n public static final byte SIG_FLOAT = 'F';\r\n public static final byte SIG_DOUBLE = 'D';\r\n\r\n // Prefixes for some commonly used objects\r\n public static final byte SIG_STRING = 'R';\r\n\r\n public static final byte SIG_MAP = 'M'; // a map with an short key\r\n public static final short SIG_END_MAP = 0;\r\n\r\n private final ByteBuffer mBuf;\r\n\r\n public Decoder(byte[] buf) {\r\n this(ByteBuffer.wrap(buf));\r\n }\r\n\r\n public Decoder(ByteBuffer buf) {\r\n mBuf = buf;\r\n }\r\n\r\n public boolean hasRemaining() {\r\n return mBuf.hasRemaining();\r\n }\r\n\r\n public Object readObject() {\r\n byte sig = mBuf.get();\r\n\r\n switch (sig) {\r\n case SIG_BOOLEAN:\r\n return mBuf.get() == 0 ? Boolean.FALSE : Boolean.TRUE;\r\n case SIG_BYTE:\r\n return mBuf.get();\r\n case SIG_SHORT:\r\n return mBuf.getShort();\r\n case SIG_INT:\r\n return mBuf.getInt();\r\n case SIG_LONG:\r\n return mBuf.getLong();\r\n case SIG_FLOAT:\r\n return mBuf.getFloat();\r\n case SIG_DOUBLE:\r\n return mBuf.getDouble();\r\n case SIG_STRING:\r\n return readString();\r\n case SIG_MAP:\r\n return readMap();\r\n default:\r\n throw new DecoderException(sig, mBuf.position() - 1);\r\n }\r\n }\r\n\r\n private String readString() {\r\n short len = mBuf.getShort();\r\n byte[] b = new byte[len];\r\n mBuf.get(b, 0, len);\r\n return new String(b, Charset.forName(\"utf-8\"));\r\n }\r\n\r\n private Map<Short, Object> readMap() {\r\n Map<Short, Object> m = new HashMap<Short, Object>();\r\n\r\n while (true) {\r\n Object o = readObject();\r\n if (!(o instanceof Short)) {\r\n throw new DecoderException(\"Expected short key, got \" + o.getClass());\r\n }\r\n\r\n Short key = (Short)o;\r\n if (key == SIG_END_MAP) {\r\n break;\r\n }\r\n\r\n m.put(key, readObject());\r\n }\r\n\r\n return m;\r\n }\r\n\r\n public static class DecoderException extends RuntimeException {\r\n public DecoderException(byte seen, int pos) {\r\n super(String.format(\"Unexpected byte %c seen at position %d\", (char)seen, pos));\r\n }\r\n\r\n public DecoderException(String msg) {\r\n super(msg);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.55859375, "alphanum_fraction": 0.5662715435028076, "avg_line_length": 29.324893951416016, "blob_id": "625c15c26b5e9a175f70d208dcdc2017501528c7", "content_id": "d905f8c6ccbf9b010f298d19360ca70ee9239e5d", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7424, "license_type": "permissive", "max_line_length": 96, "num_lines": 237, "path": "/core/java/com/android/internal/util/ProgressReporter.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.internal.util;\r\n\r\nimport android.annotation.Nullable;\r\nimport android.content.Intent;\r\nimport android.os.Bundle;\r\nimport android.os.IProgressListener;\r\nimport android.os.RemoteCallbackList;\r\nimport android.os.RemoteException;\r\nimport android.util.MathUtils;\r\n\r\nimport com.android.internal.annotations.GuardedBy;\r\n\r\n/**\r\n * Tracks and reports progress of a single task to a {@link IProgressListener}.\r\n * The reported progress of a task ranges from 0-100, but the task can be\r\n * segmented into smaller pieces using {@link #startSegment(int)} and\r\n * {@link #endSegment(int[])}, and segments can be nested.\r\n * <p>\r\n * Here's an example in action; when finished the overall task progress will be\r\n * at 60.\r\n *\r\n * <pre>\r\n * prog.setProgress(20);\r\n * {\r\n * final int restore = prog.startSegment(40);\r\n * for (int i = 0; i < N; i++) {\r\n * prog.setProgress(i, N);\r\n * ...\r\n * }\r\n * prog.endSegment(restore);\r\n * }\r\n * </pre>\r\n *\r\n * @hide\r\n */\r\npublic class ProgressReporter {\r\n private static final int STATE_INIT = 0;\r\n private static final int STATE_STARTED = 1;\r\n private static final int STATE_FINISHED = 2;\r\n\r\n private final int mId;\r\n\r\n @GuardedBy(\"this\")\r\n private final RemoteCallbackList<IProgressListener> mListeners = new RemoteCallbackList<>();\r\n\r\n @GuardedBy(\"this\")\r\n private int mState = STATE_INIT;\r\n @GuardedBy(\"this\")\r\n private int mProgress = 0;\r\n @GuardedBy(\"this\")\r\n private Bundle mExtras = new Bundle();\r\n\r\n /**\r\n * Current segment range: first element is starting progress of this\r\n * segment, second element is length of segment.\r\n */\r\n @GuardedBy(\"this\")\r\n private int[] mSegmentRange = new int[] { 0, 100 };\r\n\r\n /**\r\n * Create a new task with the given identifier whose progress will be\r\n * reported to the given listener.\r\n */\r\n public ProgressReporter(int id) {\r\n mId = id;\r\n }\r\n\r\n /**\r\n * Add given listener to watch for progress events. The current state will\r\n * be immediately dispatched to the given listener.\r\n */\r\n public void addListener(@Nullable IProgressListener listener) {\r\n if (listener == null) return;\r\n synchronized (this) {\r\n mListeners.register(listener);\r\n switch (mState) {\r\n case STATE_INIT:\r\n // Nothing has happened yet\r\n break;\r\n case STATE_STARTED:\r\n try {\r\n listener.onStarted(mId, null);\r\n listener.onProgress(mId, mProgress, mExtras);\r\n } catch (RemoteException ignored) {\r\n }\r\n break;\r\n case STATE_FINISHED:\r\n try {\r\n listener.onFinished(mId, null);\r\n } catch (RemoteException ignored) {\r\n }\r\n break;\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Set the progress of the currently active segment.\r\n *\r\n * @param progress Segment progress between 0-100.\r\n */\r\n public void setProgress(int progress) {\r\n setProgress(progress, 100, null);\r\n }\r\n\r\n /**\r\n * Set the progress of the currently active segment.\r\n *\r\n * @param progress Segment progress between 0-100.\r\n */\r\n public void setProgress(int progress, @Nullable CharSequence title) {\r\n setProgress(progress, 100, title);\r\n }\r\n\r\n /**\r\n * Set the fractional progress of the currently active segment.\r\n */\r\n public void setProgress(int n, int m) {\r\n setProgress(n, m, null);\r\n }\r\n\r\n /**\r\n * Set the fractional progress of the currently active segment.\r\n */\r\n public void setProgress(int n, int m, @Nullable CharSequence title) {\r\n synchronized (this) {\r\n if (mState != STATE_STARTED) {\r\n throw new IllegalStateException(\"Must be started to change progress\");\r\n }\r\n mProgress = mSegmentRange[0]\r\n + MathUtils.constrain((n * mSegmentRange[1]) / m, 0, mSegmentRange[1]);\r\n if (title != null) {\r\n mExtras.putCharSequence(Intent.EXTRA_TITLE, title);\r\n }\r\n notifyProgress(mId, mProgress, mExtras);\r\n }\r\n }\r\n\r\n /**\r\n * Start a new inner segment that will contribute the given range towards\r\n * the currently active segment. You must pass the returned value to\r\n * {@link #endSegment(int[])} when finished.\r\n */\r\n public int[] startSegment(int size) {\r\n synchronized (this) {\r\n final int[] lastRange = mSegmentRange;\r\n mSegmentRange = new int[] { mProgress, (size * mSegmentRange[1] / 100) };\r\n return lastRange;\r\n }\r\n }\r\n\r\n /**\r\n * End the current segment.\r\n */\r\n public void endSegment(int[] lastRange) {\r\n synchronized (this) {\r\n mProgress = mSegmentRange[0] + mSegmentRange[1];\r\n mSegmentRange = lastRange;\r\n }\r\n }\r\n\r\n int getProgress() {\r\n return mProgress;\r\n }\r\n\r\n int[] getSegmentRange() {\r\n return mSegmentRange;\r\n }\r\n\r\n /**\r\n * Report this entire task as being started.\r\n */\r\n public void start() {\r\n synchronized (this) {\r\n mState = STATE_STARTED;\r\n notifyStarted(mId, null);\r\n notifyProgress(mId, mProgress, mExtras);\r\n }\r\n }\r\n\r\n /**\r\n * Report this entire task as being finished.\r\n */\r\n public void finish() {\r\n synchronized (this) {\r\n mState = STATE_FINISHED;\r\n notifyFinished(mId, null);\r\n mListeners.kill();\r\n }\r\n }\r\n\r\n private void notifyStarted(int id, Bundle extras) {\r\n for (int i = mListeners.beginBroadcast() - 1; i >= 0; i--) {\r\n try {\r\n mListeners.getBroadcastItem(i).onStarted(id, extras);\r\n } catch (RemoteException ignored) {\r\n }\r\n }\r\n mListeners.finishBroadcast();\r\n }\r\n\r\n private void notifyProgress(int id, int progress, Bundle extras) {\r\n for (int i = mListeners.beginBroadcast() - 1; i >= 0; i--) {\r\n try {\r\n mListeners.getBroadcastItem(i).onProgress(id, progress, extras);\r\n } catch (RemoteException ignored) {\r\n }\r\n }\r\n mListeners.finishBroadcast();\r\n }\r\n\r\n private void notifyFinished(int id, Bundle extras) {\r\n for (int i = mListeners.beginBroadcast() - 1; i >= 0; i--) {\r\n try {\r\n mListeners.getBroadcastItem(i).onFinished(id, extras);\r\n } catch (RemoteException ignored) {\r\n }\r\n }\r\n mListeners.finishBroadcast();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5845208764076233, "alphanum_fraction": 0.5884429216384888, "avg_line_length": 32.30044937133789, "blob_id": "03f42e4697a053421c118d6b809e16b5f8474278", "content_id": "3add2430b71011777f73c44d6d32c3a04ab1ee3d", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7649, "license_type": "permissive", "max_line_length": 100, "num_lines": 223, "path": "/core/java/android/hardware/camera2/params/RggbChannelVector.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.hardware.camera2.params;\r\n\r\nimport static com.android.internal.util.Preconditions.*;\r\n\r\n/**\r\n * Immutable class to store a 4-element vector of floats indexable by a bayer RAW 2x2 pixel block.\r\n */\r\npublic final class RggbChannelVector {\r\n /**\r\n * The number of color channels in this vector.\r\n */\r\n public static final int COUNT = 4;\r\n\r\n /** Red color channel in a bayer Raw pattern. */\r\n public static final int RED = 0;\r\n\r\n /** Green color channel in a bayer Raw pattern used by the even rows. */\r\n public static final int GREEN_EVEN = 1;\r\n\r\n /** Green color channel in a bayer Raw pattern used by the odd rows. */\r\n public static final int GREEN_ODD = 2;\r\n\r\n /** Blue color channel in a bayer Raw pattern. */\r\n public static final int BLUE = 3;\r\n\r\n /**\r\n * Create a new {@link RggbChannelVector} from an RGGB 2x2 pixel.\r\n *\r\n * <p>All pixel values are considered normalized within {@code [0.0f, 1.0f]}\r\n * (i.e. {@code 1.0f} could be linearized to {@code 255} if converting to a\r\n * non-floating point pixel representation).</p>\r\n *\r\n * <p>All arguments must be finite; NaN and infinity is not allowed.</p>\r\n *\r\n * @param red red pixel\r\n * @param greenEven green pixel (even row)\r\n * @param greenOdd green pixel (odd row)\r\n * @param blue blue pixel\r\n *\r\n * @throws IllegalArgumentException if any of the arguments were not finite\r\n */\r\n public RggbChannelVector(final float red, final float greenEven, final float greenOdd,\r\n final float blue) {\r\n mRed = checkArgumentFinite(red, \"red\");\r\n mGreenEven = checkArgumentFinite(greenEven, \"greenEven\");\r\n mGreenOdd = checkArgumentFinite(greenOdd, \"greenOdd\");\r\n mBlue = checkArgumentFinite(blue, \"blue\");\r\n }\r\n\r\n /**\r\n * Get the red component.\r\n *\r\n * @return a floating point value (guaranteed to be finite)\r\n */\r\n public final float getRed() {\r\n return mRed;\r\n }\r\n\r\n /**\r\n * Get the green (even rows) component.\r\n *\r\n * @return a floating point value (guaranteed to be finite)\r\n */\r\n public float getGreenEven() {\r\n return mGreenEven;\r\n }\r\n\r\n /**\r\n * Get the green (odd rows) component.\r\n *\r\n * @return a floating point value (guaranteed to be finite)\r\n */\r\n public float getGreenOdd() {\r\n return mGreenOdd;\r\n }\r\n\r\n /**\r\n * Get the blue component.\r\n *\r\n * @return a floating point value (guaranteed to be finite)\r\n */\r\n public float getBlue() {\r\n return mBlue;\r\n }\r\n\r\n /**\r\n * Get the component by the color channel index.\r\n *\r\n * <p>{@code colorChannel} must be one of {@link #RED}, {@link #GREEN_EVEN}, {@link #GREEN_ODD},\r\n * {@link #BLUE}.</p>\r\n *\r\n * @param colorChannel greater or equal to {@code 0} and less than {@link #COUNT}\r\n * @return a floating point value (guaranteed to be finite)\r\n *\r\n * @throws IllegalArgumentException if {@code colorChannel} was out of range\r\n */\r\n public float getComponent(final int colorChannel) {\r\n if (colorChannel < 0 || colorChannel >= COUNT) {\r\n throw new IllegalArgumentException(\"Color channel out of range\");\r\n }\r\n\r\n switch (colorChannel) {\r\n case RED:\r\n return mRed;\r\n case GREEN_EVEN:\r\n return mGreenEven;\r\n case GREEN_ODD:\r\n return mGreenOdd;\r\n case BLUE:\r\n return mBlue;\r\n default:\r\n throw new AssertionError(\"Unhandled case \" + colorChannel);\r\n }\r\n }\r\n\r\n /**\r\n * Copy the vector into the destination in the order {@code [R, Geven, Godd, B]}.\r\n *\r\n * @param destination\r\n * an array big enough to hold at least {@value #COUNT} elements after the\r\n * {@code offset}\r\n * @param offset\r\n * a non-negative offset into the array\r\n *\r\n * @throws NullPointerException\r\n * If {@code destination} was {@code null}\r\n * @throws ArrayIndexOutOfBoundsException\r\n * If there's not enough room to write the elements at the specified destination and\r\n * offset.\r\n */\r\n public void copyTo(final float[] destination, final int offset) {\r\n checkNotNull(destination, \"destination must not be null\");\r\n if (destination.length - offset < COUNT) {\r\n throw new ArrayIndexOutOfBoundsException(\"destination too small to fit elements\");\r\n }\r\n\r\n destination[offset + RED] = mRed;\r\n destination[offset + GREEN_EVEN] = mGreenEven;\r\n destination[offset + GREEN_ODD] = mGreenOdd;\r\n destination[offset + BLUE] = mBlue;\r\n }\r\n\r\n /**\r\n * Check if this {@link RggbChannelVector} is equal to another {@link RggbChannelVector}.\r\n *\r\n * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>\r\n *\r\n * @return {@code true} if the objects were equal, {@code false} otherwise\r\n */\r\n @Override\r\n public boolean equals(final Object obj) {\r\n if (obj == null) {\r\n return false;\r\n } else if (this == obj) {\r\n return true;\r\n } else if (obj instanceof RggbChannelVector) {\r\n final RggbChannelVector other = (RggbChannelVector) obj;\r\n return mRed == other.mRed &&\r\n mGreenEven == other.mGreenEven &&\r\n mGreenOdd == other.mGreenOdd &&\r\n mBlue == other.mBlue;\r\n }\r\n return false;\r\n }\r\n\r\n /**\r\n * {@inheritDoc}\r\n */\r\n @Override\r\n public int hashCode() {\r\n return Float.floatToIntBits(mRed) ^\r\n Float.floatToIntBits(mGreenEven) ^\r\n Float.floatToIntBits(mGreenOdd) ^\r\n Float.floatToIntBits(mBlue);\r\n }\r\n\r\n /**\r\n * Return the RggbChannelVector as a string representation.\r\n *\r\n * <p> {@code \"RggbChannelVector{R:%f, G_even:%f, G_odd:%f, B:%f}\"}, where each\r\n * {@code %f} respectively represents one of the the four color channels. </p>\r\n *\r\n * @return string representation of {@link RggbChannelVector}\r\n */\r\n @Override\r\n public String toString() {\r\n return String.format(\"RggbChannelVector%s\", toShortString());\r\n }\r\n\r\n /**\r\n * Return the RggbChannelVector as a string in compact form.\r\n *\r\n * <p> {@code \"{R:%f, G_even:%f, G_odd:%f, B:%f}\"}, where each {@code %f}\r\n * respectively represents one of the the four color channels. </p>\r\n *\r\n * @return compact string representation of {@link RggbChannelVector}\r\n */\r\n private String toShortString() {\r\n return String.format(\"{R:%f, G_even:%f, G_odd:%f, B:%f}\",\r\n mRed, mGreenEven, mGreenOdd, mBlue);\r\n }\r\n\r\n private final float mRed;\r\n private final float mGreenEven;\r\n private final float mGreenOdd;\r\n private final float mBlue;\r\n}\r\n" }, { "alpha_fraction": 0.7638888955116272, "alphanum_fraction": 0.8055555820465088, "avg_line_length": 22.66666603088379, "blob_id": "681f9e9ab2bcad1078c28077d74220da1d4c91bb", "content_id": "4a8ca60c602189fd9ffdf8bb0d3d37231fd4213a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 72, "license_type": "permissive", "max_line_length": 38, "num_lines": 3, "path": "/docs/source.properties", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "Pkg.Revision=24.0\r\nPkg.Desc=Android offline API reference\r\nPkg.Path=docs" }, { "alpha_fraction": 0.7281022071838379, "alphanum_fraction": 0.7281022071838379, "avg_line_length": 25.399999618530273, "blob_id": "ae6f67f9d1966d34529e65becb8cc4f6181f45a9", "content_id": "54fd1f53fbabac9a31e8dc18e7923fedd213f6df", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 548, "license_type": "permissive", "max_line_length": 84, "num_lines": 20, "path": "/core/tests/privacytests/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "LOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\n# We only want this apk build for tests.\r\nLOCAL_MODULE_TAGS := tests\r\n\r\n# Include all test java files.\r\nLOCAL_SRC_FILES := \\\r\n $(call all-java-files-under, src)\r\n\r\nLOCAL_STATIC_JAVA_LIBRARIES := junit rappor-tests androidx.test.rules truth-prebuilt\r\n\r\nLOCAL_JAVA_LIBRARIES := android.test.runner\r\nLOCAL_PACKAGE_NAME := FrameworksPrivacyLibraryTests\r\nLOCAL_PRIVATE_PLATFORM_APIS := true\r\n\r\nLOCAL_CERTIFICATE := platform\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\n\r\ninclude $(BUILD_PACKAGE)\r\n" }, { "alpha_fraction": 0.7202572226524353, "alphanum_fraction": 0.7459806799888611, "avg_line_length": 26.930233001708984, "blob_id": "9a7448f9e771bcf11a6bcb09813697e96e859167", "content_id": "830169ae80a07d6a34d8a9b45bb51c6e057a0371", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1244, "license_type": "permissive", "max_line_length": 80, "num_lines": 43, "path": "/data/sounds/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This README describes the audio assets, and how they relate to each other.\r\n\r\nThe product .mk references one of the AudioPackage*.mk,\r\nwhich installs the appropriate assets into the destination directory.\r\n\r\nFor UI sound effects,\r\nframeworks/base/media/java/android/media/AudioService.java array\r\nSOUND_EFFECT_FILES contains a hard-coded list of asset filenames, stored\r\nin directory SOUND_EFFECTS_PATH.\r\n\r\nTouch sounds\r\n------------\r\n\r\neffects/Effect_Tick.ogg\r\n old, referenced by AudioPackage[2345].mk OriginalAudio.mk\r\n\r\neffects/ogg/Effect_Tick.ogg\r\n new, referenced by AudioPackage[6789].mk AudioPackage7alt.mk AudioPackage10.mk\r\n\r\neffects/ogg/Effect_Tick_48k.ogg\r\n oggdec -o temp.wav ogg/Effect_Tick.ogg\r\n sox temp.wav -r 48000 temp48k.wav\r\n oggenc -b 80 -o ogg/Effect_Tick_48k.ogg temp48k.wav\r\n\r\neffects/wav/Effect_Tick.wav\r\n does not appear to be related to the other files in any obvious way\r\n\r\nVideo recording\r\n---------------\r\n\r\n./effects/ogg/VideoStop_48k.ogg\r\n unused\r\n\r\nNFC\r\n---\r\n\r\n./effects/ogg/NFCFailure.ogg\r\n./effects/ogg/NFCInitiated.ogg\r\n./effects/ogg/NFCSuccess.ogg\r\n./effects/ogg/NFCTransferComplete.ogg\r\n./effects/ogg/NFCTransferInitiated.ogg\r\n\r\nreferenced in AudioPackage14.mk (= AudioPackage13.mk + NFC sounds).\r\n" }, { "alpha_fraction": 0.576138436794281, "alphanum_fraction": 0.6118397116661072, "avg_line_length": 35.86206817626953, "blob_id": "2abe096ca454f9cfaf7983fce084ff4c70622dbd", "content_id": "c4aebbaba42679523debc8ac003d4fb32f9e49c9", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5490, "license_type": "permissive", "max_line_length": 100, "num_lines": 145, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/PathsActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.graphics.BitmapShader;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.graphics.Path;\r\nimport android.graphics.RectF;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class PathsActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final PathsView view = new PathsView(this);\r\n setContentView(view);\r\n }\r\n\r\n public static class PathsView extends View {\r\n private final Bitmap mBitmap1;\r\n private final Paint mSmallPaint;\r\n private final Paint mMediumPaint;\r\n private final Paint mLargePaint;\r\n private final BitmapShader mShader;\r\n private final Path mPath;\r\n private final RectF mPathBounds;\r\n private final Paint mBoundsPaint;\r\n private final Bitmap mBitmap;\r\n private final float mOffset;\r\n private final Paint mLinePaint;\r\n\r\n public PathsView(Context c) {\r\n super(c);\r\n\r\n mBitmap1 = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset1);\r\n\r\n mSmallPaint = new Paint();\r\n mSmallPaint.setAntiAlias(true);\r\n mSmallPaint.setColor(0xffff0000);\r\n mSmallPaint.setStrokeWidth(1.0f);\r\n mSmallPaint.setStyle(Paint.Style.STROKE);\r\n\r\n mLinePaint = new Paint();\r\n mLinePaint.setAntiAlias(true);\r\n mLinePaint.setColor(0xffff00ff);\r\n mLinePaint.setStrokeWidth(1.0f);\r\n mLinePaint.setStyle(Paint.Style.STROKE);\r\n\r\n mMediumPaint = new Paint();\r\n mMediumPaint.setAntiAlias(true);\r\n mMediumPaint.setColor(0xe00000ff);\r\n mMediumPaint.setStrokeWidth(10.0f);\r\n mMediumPaint.setStyle(Paint.Style.STROKE);\r\n\r\n mLargePaint = new Paint();\r\n mLargePaint.setAntiAlias(true);\r\n mLargePaint.setColor(0x7f00ff00);\r\n mLargePaint.setStrokeWidth(15.0f);\r\n mLargePaint.setStyle(Paint.Style.FILL);\r\n\r\n mShader = new BitmapShader(mBitmap1, BitmapShader.TileMode.MIRROR,\r\n BitmapShader.TileMode.MIRROR);\r\n\r\n mPath = new Path();\r\n mPath.moveTo(0.0f, 0.0f);\r\n mPath.cubicTo(0.0f, 0.0f, 100.0f, 150.0f, 100.0f, 200.0f);\r\n mPath.cubicTo(100.0f, 200.0f, 50.0f, 300.0f, -80.0f, 200.0f);\r\n mPath.cubicTo(-80.0f, 200.0f, 100.0f, 200.0f, 200.0f, 0.0f);\r\n\r\n mPathBounds = new RectF();\r\n mPath.computeBounds(mPathBounds, true);\r\n\r\n mBoundsPaint = new Paint();\r\n mBoundsPaint.setColor(0x4000ff00);\r\n\r\n mOffset = mMediumPaint.getStrokeWidth();\r\n final int width = (int) (mPathBounds.width() + mOffset * 3.0f + 0.5f);\r\n final int height = (int) (mPathBounds.height() + mOffset * 3.0f + 0.5f);\r\n mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);\r\n Canvas canvas = new Canvas(mBitmap);\r\n canvas.translate(-mPathBounds.left + mOffset * 1.5f, -mPathBounds.top + mOffset * 1.5f);\r\n canvas.drawPath(mPath, mMediumPaint);\r\n canvas.setBitmap(null);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n canvas.drawARGB(255, 255, 255, 255);\r\n\r\n canvas.save();\r\n canvas.translate(200.0f, 60.0f);\r\n canvas.drawPath(mPath, mSmallPaint);\r\n\r\n canvas.translate(350.0f, 0.0f);\r\n canvas.drawPath(mPath, mMediumPaint);\r\n\r\n mLargePaint.setShader(mShader);\r\n canvas.translate(350.0f, 0.0f);\r\n canvas.drawPath(mPath, mLargePaint);\r\n mLargePaint.setShader(null);\r\n canvas.restore();\r\n\r\n canvas.save();\r\n canvas.translate(200.0f, 360.0f);\r\n canvas.drawPath(mPath, mSmallPaint);\r\n canvas.drawRect(mPathBounds, mBoundsPaint);\r\n\r\n canvas.translate(350.0f, 0.0f);\r\n canvas.drawBitmap(mBitmap, mPathBounds.left - mOffset * 1.5f,\r\n mPathBounds.top - mOffset * 1.5f, null);\r\n canvas.drawRect(mPathBounds, mBoundsPaint);\r\n canvas.drawLine(0.0f, -360.0f, 0.0f, 500.0f, mLinePaint);\r\n\r\n mLargePaint.setShader(mShader);\r\n canvas.translate(350.0f, 0.0f);\r\n canvas.drawPath(mPath, mLargePaint);\r\n canvas.drawRect(mPathBounds, mBoundsPaint);\r\n mLargePaint.setShader(null);\r\n canvas.restore();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.710991382598877, "alphanum_fraction": 0.7133620977401733, "avg_line_length": 38, "blob_id": "b01e6f5fa3106240f810a8057ef90406dee8f642", "content_id": "ce4e744d0529054326817ffd53a78fc8d1b98479", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4640, "license_type": "permissive", "max_line_length": 97, "num_lines": 116, "path": "/core/tests/coretests/src/com/android/internal/policy/DecorContextTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.internal.policy;\r\n\r\nimport static android.view.Display.DEFAULT_DISPLAY;\r\n\r\nimport static org.junit.Assert.assertEquals;\r\nimport static org.junit.Assert.assertTrue;\r\n\r\nimport android.app.Activity;\r\nimport android.app.EmptyActivity;\r\nimport android.content.Context;\r\nimport android.hardware.display.DisplayManagerGlobal;\r\nimport android.platform.test.annotations.Presubmit;\r\nimport android.view.Display;\r\nimport android.view.DisplayAdjustments;\r\nimport android.view.DisplayInfo;\r\nimport android.view.WindowManager;\r\nimport android.view.WindowManagerImpl;\r\n\r\nimport androidx.test.core.app.ApplicationProvider;\r\nimport androidx.test.filters.SmallTest;\r\nimport androidx.test.rule.ActivityTestRule;\r\nimport androidx.test.runner.AndroidJUnit4;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Rule;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\n\r\n/**\r\n * Tests {@link DecorContext}.\r\n */\r\n@SmallTest\r\n@Presubmit\r\n@RunWith(AndroidJUnit4.class)\r\npublic final class DecorContextTest {\r\n private Context mContext;\r\n private static final int EXTERNAL_DISPLAY = DEFAULT_DISPLAY + 1;\r\n\r\n @Rule\r\n public ActivityTestRule<EmptyActivity> mActivityRule =\r\n new ActivityTestRule<>(EmptyActivity.class);\r\n\r\n @Before\r\n public void setUp() {\r\n mContext = ApplicationProvider.getApplicationContext();\r\n }\r\n\r\n @Test\r\n public void testDecorContextWithDefaultDisplay() {\r\n Display defaultDisplay = new Display(DisplayManagerGlobal.getInstance(), DEFAULT_DISPLAY,\r\n new DisplayInfo(), DisplayAdjustments.DEFAULT_DISPLAY_ADJUSTMENTS);\r\n final Context defaultDisplayContext = mContext.createDisplayContext(defaultDisplay);\r\n final PhoneWindow window = new PhoneWindow(defaultDisplayContext);\r\n DecorContext context = new DecorContext(mContext.getApplicationContext(), window);\r\n\r\n assertDecorContextDisplay(DEFAULT_DISPLAY, context);\r\n }\r\n\r\n @Test\r\n public void testDecorContextWithExternalDisplay() {\r\n Display display = new Display(DisplayManagerGlobal.getInstance(), EXTERNAL_DISPLAY,\r\n new DisplayInfo(), DisplayAdjustments.DEFAULT_DISPLAY_ADJUSTMENTS);\r\n final Context defaultDisplayContext = mContext.createDisplayContext(display);\r\n final PhoneWindow window = new PhoneWindow(defaultDisplayContext);\r\n DecorContext context = new DecorContext(mContext.getApplicationContext(), window);\r\n\r\n assertDecorContextDisplay(EXTERNAL_DISPLAY, context);\r\n }\r\n\r\n private static void assertDecorContextDisplay(int expectedDisplayId,\r\n DecorContext decorContext) {\r\n Display associatedDisplay = decorContext.getDisplay();\r\n assertEquals(expectedDisplayId, associatedDisplay.getDisplayId());\r\n }\r\n\r\n @Test\r\n public void testGetWindowManagerFromVisualDecorContext() throws Throwable {\r\n mActivityRule.runOnUiThread(() -> {\r\n Activity activity = mActivityRule.getActivity();\r\n final DecorContext decorContext = new DecorContext(mContext.getApplicationContext(),\r\n (PhoneWindow) activity.getWindow());\r\n WindowManagerImpl actualWm = (WindowManagerImpl)\r\n decorContext.getSystemService(WindowManager.class);\r\n WindowManagerImpl expectedWm = (WindowManagerImpl)\r\n activity.getSystemService(WindowManager.class);\r\n // Verify that window manager is from activity not application context.\r\n assertEquals(expectedWm.mContext, actualWm.mContext);\r\n });\r\n }\r\n\r\n @Test\r\n public void testIsUiContextFromVisualDecorContext() throws Throwable {\r\n mActivityRule.runOnUiThread(() -> {\r\n Activity activity = mActivityRule.getActivity();\r\n final DecorContext decorContext = new DecorContext(mContext.getApplicationContext(),\r\n (PhoneWindow) activity.getWindow());\r\n assertTrue(decorContext.isUiContext());\r\n });\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7351778745651245, "alphanum_fraction": 0.7430830001831055, "avg_line_length": 29.625, "blob_id": "f7dbf0042d0a063ed133e8ce068647cd0ed4d506", "content_id": "e7bba1136612be534b74aa1bbadf1eb94536b34f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 253, "license_type": "permissive", "max_line_length": 96, "num_lines": 8, "path": "/libs/androidfw/tests/data/styles/build", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n\r\nset -e\r\n\r\nPATH_TO_FRAMEWORK_RES=${ANDROID_BUILD_TOP}/prebuilts/sdk/current/public/android.jar\r\n\r\naapt2 compile -o compiled.flata --dir res\r\naapt2 link -o styles.apk --manifest AndroidManifest.xml -I $PATH_TO_FRAMEWORK_RES compiled.flata\r\n" }, { "alpha_fraction": 0.7355642914772034, "alphanum_fraction": 0.740813672542572, "avg_line_length": 37.07692337036133, "blob_id": "d34cb6fc84a985f1d7b7b246a2464727f1dcbee5", "content_id": "306baaf35dd7ca57b1773b50006a67dc421b83b2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1524, "license_type": "permissive", "max_line_length": 100, "num_lines": 39, "path": "/test-base/src/android/test/UiThreadTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2008 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.test;\r\n\r\nimport java.lang.annotation.Retention;\r\nimport java.lang.annotation.RetentionPolicy;\r\nimport java.lang.annotation.Target;\r\nimport java.lang.annotation.ElementType;\r\n\r\n/**\r\n * This annotation can be used on an {@link InstrumentationTestCase}'s test methods.\r\n * When the annotation is present, the test method is executed on the application's\r\n * main thread (or UI thread.) Note that instrumentation methods may not be used\r\n * when this annotation is present.\r\n *\r\n * @deprecated Use\r\n * <a href=\"{@docRoot}reference/android/support/test/annotation/UiThreadTest.html\">\r\n * UiThreadTest</a> instead. New tests should be written using the\r\n * <a href=\"{@docRoot}tools/testing-support-library/index.html\">Android Testing Support Library</a>.\r\n */\r\n@Deprecated\r\n@Target(ElementType.METHOD)\r\n@Retention(RetentionPolicy.RUNTIME)\r\npublic @interface UiThreadTest {\r\n}\r\n" }, { "alpha_fraction": 0.6726039052009583, "alphanum_fraction": 0.6853265762329102, "avg_line_length": 34.84375, "blob_id": "99b0c140e3cf84e2c6ac066fd5ce0c15da4eb2f5", "content_id": "6959207d1d066b0eb0a196c4c0951d8b3bbe7a8e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1179, "license_type": "permissive", "max_line_length": 90, "num_lines": 32, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/imgprocutil.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n// Some native low-level image processing functions.\r\n\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_IMGPROCUTIL_H\r\n#define ANDROID_FILTERFW_JNI_IMGPROCUTIL_H\r\n\r\ninline int getIntensityFast(int R, int G, int B) {\r\n return (R + R + R + B + G + G + G + G) >> 3; // see http://stackoverflow.com/a/596241\r\n}\r\n\r\ninline int clamp(int min, int val, int max) {\r\n return val < min ? min : (val > max ? max : val);\r\n // Note that for performance reasons, this function does *not* check if min < max!\r\n}\r\n\r\n#endif // ANDROID_FILTERFW_JNI_IMGPROCUTIL_H\r\n" }, { "alpha_fraction": 0.6883543729782104, "alphanum_fraction": 0.6955600380897522, "avg_line_length": 34.87109375, "blob_id": "ab95983e08f3c617dc14c12ebdc9c51e897804e5", "content_id": "6cc638f47d4ee6a6c17221c472b1852cf0e2f383", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 9437, "license_type": "permissive", "max_line_length": 99, "num_lines": 256, "path": "/packages/SystemUI/tests/src/com/android/systemui/broadcast/ActionReceiverTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.broadcast\r\n\r\nimport android.content.BroadcastReceiver\r\nimport android.content.Context\r\nimport android.content.Intent\r\nimport android.content.IntentFilter\r\nimport android.os.UserHandle\r\nimport android.test.suitebuilder.annotation.SmallTest\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.broadcast.logging.BroadcastDispatcherLogger\r\nimport com.android.systemui.util.concurrency.FakeExecutor\r\nimport com.android.systemui.util.mockito.any\r\nimport com.android.systemui.util.mockito.capture\r\nimport com.android.systemui.util.mockito.eq\r\nimport com.android.systemui.util.time.FakeSystemClock\r\nimport org.junit.Assert.assertFalse\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.ArgumentCaptor\r\nimport org.mockito.ArgumentMatchers.anyInt\r\nimport org.mockito.ArgumentMatchers.anyString\r\nimport org.mockito.Captor\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito\r\nimport org.mockito.Mockito.mock\r\nimport org.mockito.Mockito.never\r\nimport org.mockito.Mockito.verify\r\nimport org.mockito.MockitoAnnotations\r\nimport java.lang.IllegalArgumentException\r\nimport java.lang.IllegalStateException\r\nimport java.util.concurrent.Executor\r\n\r\n@RunWith(AndroidTestingRunner::class)\r\[email protected]\r\n@SmallTest\r\nclass ActionReceiverTest : SysuiTestCase() {\r\n\r\n companion object {\r\n private const val ACTION1 = \"TEST_ACTION1\"\r\n private const val ACTION2 = \"TEST_ACTION2\"\r\n private const val CATEGORY = \"TEST_CATEGORY\"\r\n private val USER = UserHandle.of(0)\r\n private fun <T : Any> sameNotNull(arg: T): T = Mockito.same(arg) ?: arg\r\n\r\n fun IntentFilter.matchesOther(it: IntentFilter): Boolean {\r\n val actions = actionsIterator()?.asSequence()?.toSet() ?: emptySet()\r\n val categories = categoriesIterator()?.asSequence()?.toSet() ?: emptySet()\r\n return (it.actionsIterator()?.asSequence()?.toSet() ?: emptySet()) == actions &&\r\n (it.categoriesIterator()?.asSequence()?.toSet() ?: emptySet()) == categories &&\r\n it.countDataAuthorities() == 0 &&\r\n it.countDataPaths() == 0 &&\r\n it.countDataSchemes() == 0 &&\r\n it.countDataTypes() == 0 &&\r\n it.countMimeGroups() == 0 &&\r\n it.priority == 0\r\n }\r\n }\r\n\r\n @Mock\r\n private lateinit var registerFunction: BroadcastReceiver.(IntentFilter) -> Unit\r\n @Mock\r\n private lateinit var unregisterFunction: BroadcastReceiver.() -> Unit\r\n @Mock\r\n private lateinit var receiver1: BroadcastReceiver\r\n @Mock\r\n private lateinit var receiver2: BroadcastReceiver\r\n @Mock\r\n private lateinit var logger: BroadcastDispatcherLogger\r\n @Captor\r\n private lateinit var intentFilterCaptor: ArgumentCaptor<IntentFilter>\r\n\r\n private lateinit var executor: FakeExecutor\r\n private lateinit var actionReceiver: ActionReceiver\r\n private val directExecutor = Executor { it.run() }\r\n\r\n @Before\r\n fun setUp() {\r\n MockitoAnnotations.initMocks(this)\r\n executor = FakeExecutor(FakeSystemClock())\r\n\r\n actionReceiver = ActionReceiver(\r\n ACTION1,\r\n USER.identifier,\r\n registerFunction,\r\n unregisterFunction,\r\n executor,\r\n logger\r\n )\r\n }\r\n\r\n @Test\r\n fun testStartsUnregistered() {\r\n assertFalse(actionReceiver.registered)\r\n verify(registerFunction, never()).invoke(sameNotNull(actionReceiver),\r\n any(IntentFilter::class.java))\r\n }\r\n\r\n @Test\r\n fun testRegistersOnFirstAdd() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n assertTrue(actionReceiver.registered)\r\n verify(registerFunction).invoke(sameNotNull(actionReceiver), capture(intentFilterCaptor))\r\n\r\n assertTrue(IntentFilter(ACTION1).matchesOther(intentFilterCaptor.value))\r\n }\r\n\r\n @Test\r\n fun testRegistersOnlyOnce() {\r\n val receiverData1 = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n val receiverData2 = ReceiverData(receiver2, IntentFilter(ACTION1), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData1)\r\n actionReceiver.addReceiverData(receiverData2)\r\n\r\n verify(registerFunction).invoke(sameNotNull(actionReceiver), any(IntentFilter::class.java))\r\n }\r\n\r\n @Test\r\n fun testRemovingLastReceiverUnregisters() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n actionReceiver.removeReceiver(receiver1)\r\n\r\n assertFalse(actionReceiver.registered)\r\n verify(unregisterFunction).invoke(sameNotNull(actionReceiver))\r\n }\r\n\r\n @Test\r\n fun testRemovingWhileOtherReceiversDoesntUnregister() {\r\n val receiverData1 = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n val receiverData2 = ReceiverData(receiver2, IntentFilter(ACTION1), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData1)\r\n actionReceiver.addReceiverData(receiverData2)\r\n\r\n actionReceiver.removeReceiver(receiver1)\r\n\r\n assertTrue(actionReceiver.registered)\r\n verify(unregisterFunction, never()).invoke(any(BroadcastReceiver::class.java))\r\n }\r\n\r\n @Test\r\n fun testReceiverHasCategories() {\r\n val filter = IntentFilter(ACTION1)\r\n filter.addCategory(CATEGORY)\r\n\r\n val receiverData = ReceiverData(receiver1, filter, directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n verify(registerFunction).invoke(sameNotNull(actionReceiver), capture(intentFilterCaptor))\r\n assertTrue(intentFilterCaptor.value.hasCategory(CATEGORY))\r\n }\r\n\r\n @Test(expected = IllegalArgumentException::class)\r\n fun testNotRegisteredWithWrongAction_throwsException() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION2), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData)\r\n }\r\n\r\n @Test\r\n fun testReceiverGetsBroadcast() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n val intent = Intent(ACTION1)\r\n\r\n actionReceiver.onReceive(mContext, intent)\r\n\r\n executor.runAllReady()\r\n\r\n verify(receiver1).onReceive(any(Context::class.java), sameNotNull(intent))\r\n }\r\n\r\n @Test\r\n fun testReceiverGetsPendingResult() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n val intent = Intent(ACTION1)\r\n val pendingResult = mock(BroadcastReceiver.PendingResult::class.java)\r\n\r\n actionReceiver.pendingResult = pendingResult\r\n actionReceiver.onReceive(mContext, intent)\r\n\r\n executor.runAllReady()\r\n verify(receiver1).pendingResult = pendingResult\r\n }\r\n\r\n @Test\r\n fun testBroadcastIsDispatchedInExecutor() {\r\n val executor = FakeExecutor(FakeSystemClock())\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), executor, USER)\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n val intent = Intent(ACTION1)\r\n actionReceiver.onReceive(mContext, intent)\r\n\r\n this.executor.runAllReady()\r\n\r\n verify(receiver1, never()).onReceive(mContext, intent)\r\n\r\n executor.runAllReady()\r\n // Dispatched after executor is processed\r\n verify(receiver1).onReceive(mContext, intent)\r\n }\r\n\r\n @Test\r\n fun testBroadcastReceivedDispatched_logger() {\r\n val receiverData = ReceiverData(receiver1, IntentFilter(ACTION1), directExecutor, USER)\r\n\r\n actionReceiver.addReceiverData(receiverData)\r\n\r\n val intent = Intent(ACTION1)\r\n actionReceiver.onReceive(mContext, intent)\r\n verify(logger).logBroadcastReceived(anyInt(), eq(USER.identifier), eq(intent))\r\n\r\n verify(logger, never()).logBroadcastDispatched(anyInt(), anyString(),\r\n any(BroadcastReceiver::class.java))\r\n\r\n executor.runAllReady()\r\n\r\n verify(logger).logBroadcastDispatched(anyInt(), eq(ACTION1), sameNotNull(receiver1))\r\n }\r\n\r\n @Test(expected = IllegalStateException::class)\r\n fun testBroadcastWithWrongAction_throwsException() {\r\n actionReceiver.onReceive(mContext, Intent(ACTION2))\r\n }\r\n}" }, { "alpha_fraction": 0.7120622396469116, "alphanum_fraction": 0.7276264429092407, "avg_line_length": 34.71428680419922, "blob_id": "cccc3406b0892e384e0109a23228ebff8e9bfe1e", "content_id": "112b9d146604de0992c7b22662a2e1cf6844f708", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 257, "license_type": "permissive", "max_line_length": 79, "num_lines": 7, "path": "/tools/signedconfig/gen_priv_key.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n\r\n# This script acts as a record of how the debug key was generated. There should\r\n# be no need to run it again.\r\n\r\nopenssl ecparam -name prime256v1 -genkey -noout -out debug_key.pem\r\nopenssl ec -in debug_key.pem -pubout -out debug_public.pem\r\n" }, { "alpha_fraction": 0.7991596460342407, "alphanum_fraction": 0.7991596460342407, "avg_line_length": 60.6315803527832, "blob_id": "dd91379d268f121f3c62e78a417f09edfd62fcd5", "content_id": "863faedb72f4775ef2ab6c087683f2da653c1fea", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1190, "license_type": "permissive", "max_line_length": 100, "num_lines": 19, "path": "/services/core/java/com/android/server/soundtrigger_middleware/README.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Sound Trigger Middleware\r\nTODO: Add component description.\r\n\r\n## Notes about thread synchronization\r\nThis component has some tricky thread synchronization considerations due to its layered design and\r\ndue to the fact that it is involved in both in-bound and out-bound calls from / to\r\nexternal components. To avoid potential deadlocks, a strict locking order must be ensured whenever\r\nnesting locks. The order is:\r\n- `SoundTriggerMiddlewareValidation` lock.\r\n- Audio policy service lock. This one is external - it should be assumed to be held whenever we're\r\n inside the `ExternalCaptureStateTracker.setCaptureState()` call stack *AND* to be acquired from\r\n within our calls into `AudioSessionProvider.acquireSession()`.\r\n- `SoundTriggerModule` lock.\r\n\r\nThis dictates careful consideration of callbacks going from `SoundTriggerModule` to\r\n`SoundTriggerMiddlewareValidation` and especially those coming from the `setCaptureState()` path.\r\nWe always invoke those calls outside of the `SoundTriggerModule` lock, so we can lock\r\n`SoundTriggerMiddlewareValidation`. However, in the `setCaptureState()` case, we have to use atomics\r\nin `SoundTriggerMiddlewareValidation` and avoid the lock.\r\n" }, { "alpha_fraction": 0.6596812605857849, "alphanum_fraction": 0.668063759803772, "avg_line_length": 43.32776641845703, "blob_id": "74348737597695f59ec44707255a69ecbf0564ff", "content_id": "7a3ad8566f0a714cbcf9e51d0411dbba9287e481", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 21712, "license_type": "permissive", "max_line_length": 122, "num_lines": 479, "path": "/cmds/statsd/tests/statsd_test_util.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "// Copyright (C) 2017 The Android Open Source Project\r\n//\r\n// Licensed under the Apache License, Version 2.0 (the \"License\");\r\n// you may not use this file except in compliance with the License.\r\n// You may obtain a copy of the License at\r\n//\r\n// http://www.apache.org/licenses/LICENSE-2.0\r\n//\r\n// Unless required by applicable law or agreed to in writing, software\r\n// distributed under the License is distributed on an \"AS IS\" BASIS,\r\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n// See the License for the specific language governing permissions and\r\n// limitations under the License.\r\n\r\n#pragma once\r\n\r\n#include <aidl/android/os/BnPullAtomCallback.h>\r\n#include <aidl/android/os/IPullAtomCallback.h>\r\n#include <aidl/android/os/IPullAtomResultReceiver.h>\r\n#include <gmock/gmock.h>\r\n#include <gtest/gtest.h>\r\n\r\n#include \"frameworks/base/cmds/statsd/src/stats_log.pb.h\"\r\n#include \"frameworks/base/cmds/statsd/src/statsd_config.pb.h\"\r\n#include \"src/StatsLogProcessor.h\"\r\n#include \"src/hash.h\"\r\n#include \"src/logd/LogEvent.h\"\r\n#include \"src/packages/UidMap.h\"\r\n#include \"src/stats_log_util.h\"\r\n#include \"stats_event.h\"\r\n#include \"statslog_statsdtest.h\"\r\n\r\nnamespace android {\r\nnamespace os {\r\nnamespace statsd {\r\n\r\nusing namespace testing;\r\nusing ::aidl::android::os::BnPullAtomCallback;\r\nusing ::aidl::android::os::IPullAtomCallback;\r\nusing ::aidl::android::os::IPullAtomResultReceiver;\r\nusing android::util::ProtoReader;\r\nusing google::protobuf::RepeatedPtrField;\r\nusing Status = ::ndk::ScopedAStatus;\r\n\r\nconst int SCREEN_STATE_ATOM_ID = util::SCREEN_STATE_CHANGED;\r\nconst int UID_PROCESS_STATE_ATOM_ID = util::UID_PROCESS_STATE_CHANGED;\r\n\r\nenum BucketSplitEvent { APP_UPGRADE, BOOT_COMPLETE };\r\n\r\nclass MockUidMap : public UidMap {\r\npublic:\r\n MOCK_METHOD(int, getHostUidOrSelf, (int uid), (const));\r\n MOCK_METHOD(std::set<int32_t>, getAppUid, (const string& package), (const));\r\n};\r\n\r\n// Converts a ProtoOutputStream to a StatsLogReport proto.\r\nStatsLogReport outputStreamToProto(ProtoOutputStream* proto);\r\n\r\n// Create AtomMatcher proto to simply match a specific atom type.\r\nAtomMatcher CreateSimpleAtomMatcher(const string& name, int atomId);\r\n\r\n// Create AtomMatcher proto for temperature atom.\r\nAtomMatcher CreateTemperatureAtomMatcher();\r\n\r\n// Create AtomMatcher proto for scheduled job state changed.\r\nAtomMatcher CreateScheduledJobStateChangedAtomMatcher();\r\n\r\n// Create AtomMatcher proto for starting a scheduled job.\r\nAtomMatcher CreateStartScheduledJobAtomMatcher();\r\n\r\n// Create AtomMatcher proto for a scheduled job is done.\r\nAtomMatcher CreateFinishScheduledJobAtomMatcher();\r\n\r\n// Create AtomMatcher proto for screen brightness state changed.\r\nAtomMatcher CreateScreenBrightnessChangedAtomMatcher();\r\n\r\n// Create AtomMatcher proto for starting battery save mode.\r\nAtomMatcher CreateBatterySaverModeStartAtomMatcher();\r\n\r\n// Create AtomMatcher proto for stopping battery save mode.\r\nAtomMatcher CreateBatterySaverModeStopAtomMatcher();\r\n\r\n// Create AtomMatcher proto for battery state none mode.\r\nAtomMatcher CreateBatteryStateNoneMatcher();\r\n\r\n// Create AtomMatcher proto for battery state usb mode.\r\nAtomMatcher CreateBatteryStateUsbMatcher();\r\n\r\n// Create AtomMatcher proto for process state changed.\r\nAtomMatcher CreateUidProcessStateChangedAtomMatcher();\r\n\r\n// Create AtomMatcher proto for acquiring wakelock.\r\nAtomMatcher CreateAcquireWakelockAtomMatcher();\r\n\r\n// Create AtomMatcher proto for releasing wakelock.\r\nAtomMatcher CreateReleaseWakelockAtomMatcher() ;\r\n\r\n// Create AtomMatcher proto for screen turned on.\r\nAtomMatcher CreateScreenTurnedOnAtomMatcher();\r\n\r\n// Create AtomMatcher proto for screen turned off.\r\nAtomMatcher CreateScreenTurnedOffAtomMatcher();\r\n\r\n// Create AtomMatcher proto for app sync turned on.\r\nAtomMatcher CreateSyncStartAtomMatcher();\r\n\r\n// Create AtomMatcher proto for app sync turned off.\r\nAtomMatcher CreateSyncEndAtomMatcher();\r\n\r\n// Create AtomMatcher proto for app sync moves to background.\r\nAtomMatcher CreateMoveToBackgroundAtomMatcher();\r\n\r\n// Create AtomMatcher proto for app sync moves to foreground.\r\nAtomMatcher CreateMoveToForegroundAtomMatcher();\r\n\r\n// Create AtomMatcher proto for process crashes\r\nAtomMatcher CreateProcessCrashAtomMatcher() ;\r\n\r\n// Create Predicate proto for screen is on.\r\nPredicate CreateScreenIsOnPredicate();\r\n\r\n// Create Predicate proto for screen is off.\r\nPredicate CreateScreenIsOffPredicate();\r\n\r\n// Create Predicate proto for a running scheduled job.\r\nPredicate CreateScheduledJobPredicate();\r\n\r\n// Create Predicate proto for battery saver mode.\r\nPredicate CreateBatterySaverModePredicate();\r\n\r\n// Create Predicate proto for device unplogged mode.\r\nPredicate CreateDeviceUnpluggedPredicate();\r\n\r\n// Create Predicate proto for holding wakelock.\r\nPredicate CreateHoldingWakelockPredicate();\r\n\r\n// Create a Predicate proto for app syncing.\r\nPredicate CreateIsSyncingPredicate();\r\n\r\n// Create a Predicate proto for app is in background.\r\nPredicate CreateIsInBackgroundPredicate();\r\n\r\n// Create State proto for screen state atom.\r\nState CreateScreenState();\r\n\r\n// Create State proto for uid process state atom.\r\nState CreateUidProcessState();\r\n\r\n// Create State proto for overlay state atom.\r\nState CreateOverlayState();\r\n\r\n// Create State proto for screen state atom with on/off map.\r\nState CreateScreenStateWithOnOffMap(int64_t screenOnId, int64_t screenOffId);\r\n\r\n// Create State proto for screen state atom with simple on/off map.\r\nState CreateScreenStateWithSimpleOnOffMap(int64_t screenOnId, int64_t screenOffId);\r\n\r\n// Create StateGroup proto for ScreenState ON group\r\nStateMap_StateGroup CreateScreenStateOnGroup(int64_t screenOnId);\r\n\r\n// Create StateGroup proto for ScreenState OFF group\r\nStateMap_StateGroup CreateScreenStateOffGroup(int64_t screenOffId);\r\n\r\n// Create StateGroup proto for simple ScreenState ON group\r\nStateMap_StateGroup CreateScreenStateSimpleOnGroup(int64_t screenOnId);\r\n\r\n// Create StateGroup proto for simple ScreenState OFF group\r\nStateMap_StateGroup CreateScreenStateSimpleOffGroup(int64_t screenOffId);\r\n\r\n// Create StateMap proto for ScreenState ON/OFF map\r\nStateMap CreateScreenStateOnOffMap(int64_t screenOnId, int64_t screenOffId);\r\n\r\n// Create StateMap proto for simple ScreenState ON/OFF map\r\nStateMap CreateScreenStateSimpleOnOffMap(int64_t screenOnId, int64_t screenOffId);\r\n\r\n// Add a predicate to the predicate combination.\r\nvoid addPredicateToPredicateCombination(const Predicate& predicate, Predicate* combination);\r\n\r\n// Create dimensions from primitive fields.\r\nFieldMatcher CreateDimensions(const int atomId, const std::vector<int>& fields);\r\n\r\n// Create dimensions by attribution uid and tag.\r\nFieldMatcher CreateAttributionUidAndTagDimensions(const int atomId,\r\n const std::vector<Position>& positions);\r\n\r\n// Create dimensions by attribution uid only.\r\nFieldMatcher CreateAttributionUidDimensions(const int atomId,\r\n const std::vector<Position>& positions);\r\n\r\nFieldMatcher CreateAttributionUidAndOtherDimensions(const int atomId,\r\n const std::vector<Position>& positions,\r\n const std::vector<int>& fields);\r\n\r\n// START: get primary key functions\r\n// These functions take in atom field information and create FieldValues which are stored in the\r\n// given HashableDimensionKey.\r\nvoid getUidProcessKey(int uid, HashableDimensionKey* key);\r\n\r\nvoid getOverlayKey(int uid, string packageName, HashableDimensionKey* key);\r\n\r\nvoid getPartialWakelockKey(int uid, const std::string& tag, HashableDimensionKey* key);\r\n\r\nvoid getPartialWakelockKey(int uid, HashableDimensionKey* key);\r\n// END: get primary key functions\r\n\r\nvoid writeAttribution(AStatsEvent* statsEvent, const vector<int>& attributionUids,\r\n const vector<string>& attributionTags);\r\n\r\n// Builds statsEvent to get buffer that is parsed into logEvent then releases statsEvent.\r\nvoid parseStatsEventToLogEvent(AStatsEvent* statsEvent, LogEvent* logEvent);\r\n\r\nshared_ptr<LogEvent> CreateTwoValueLogEvent(int atomId, int64_t eventTimeNs, int32_t value1,\r\n int32_t value2);\r\n\r\nvoid CreateTwoValueLogEvent(LogEvent* logEvent, int atomId, int64_t eventTimeNs, int32_t value1,\r\n int32_t value2);\r\n\r\nshared_ptr<LogEvent> CreateThreeValueLogEvent(int atomId, int64_t eventTimeNs, int32_t value1,\r\n int32_t value2, int32_t value3);\r\n\r\nvoid CreateThreeValueLogEvent(LogEvent* logEvent, int atomId, int64_t eventTimeNs, int32_t value1,\r\n int32_t value2, int32_t value3);\r\n\r\n// The repeated value log event helpers create a log event with two int fields, both\r\n// set to the same value. This is useful for testing metrics that are only interested\r\n// in the value of the second field but still need the first field to be populated.\r\nstd::shared_ptr<LogEvent> CreateRepeatedValueLogEvent(int atomId, int64_t eventTimeNs,\r\n int32_t value);\r\n\r\nvoid CreateRepeatedValueLogEvent(LogEvent* logEvent, int atomId, int64_t eventTimeNs,\r\n int32_t value);\r\n\r\nstd::shared_ptr<LogEvent> CreateNoValuesLogEvent(int atomId, int64_t eventTimeNs);\r\n\r\nvoid CreateNoValuesLogEvent(LogEvent* logEvent, int atomId, int64_t eventTimeNs);\r\n\r\nstd::shared_ptr<LogEvent> makeUidLogEvent(int atomId, int64_t eventTimeNs, int uid, int data1,\r\n int data2);\r\n\r\nstd::shared_ptr<LogEvent> makeAttributionLogEvent(int atomId, int64_t eventTimeNs,\r\n const vector<int>& uids,\r\n const vector<string>& tags, int data1, int data2);\r\n\r\nsp<MockUidMap> makeMockUidMapForOneHost(int hostUid, const vector<int>& isolatedUids);\r\n\r\nsp<MockUidMap> makeMockUidMapForPackage(const string& pkg, const set<int32_t>& uids);\r\n\r\n// Create log event for screen state changed.\r\nstd::unique_ptr<LogEvent> CreateScreenStateChangedEvent(uint64_t timestampNs,\r\n const android::view::DisplayStateEnum state,\r\n int loggerUid = 0);\r\n\r\n// Create log event for screen brightness state changed.\r\nstd::unique_ptr<LogEvent> CreateScreenBrightnessChangedEvent(uint64_t timestampNs, int level);\r\n\r\n// Create log event when scheduled job starts.\r\nstd::unique_ptr<LogEvent> CreateStartScheduledJobEvent(uint64_t timestampNs,\r\n const vector<int>& attributionUids,\r\n const vector<string>& attributionTags,\r\n const string& jobName);\r\n\r\n// Create log event when scheduled job finishes.\r\nstd::unique_ptr<LogEvent> CreateFinishScheduledJobEvent(uint64_t timestampNs,\r\n const vector<int>& attributionUids,\r\n const vector<string>& attributionTags,\r\n const string& jobName);\r\n\r\n// Create log event when battery saver starts.\r\nstd::unique_ptr<LogEvent> CreateBatterySaverOnEvent(uint64_t timestampNs);\r\n// Create log event when battery saver stops.\r\nstd::unique_ptr<LogEvent> CreateBatterySaverOffEvent(uint64_t timestampNs);\r\n\r\n// Create log event when battery state changes.\r\nstd::unique_ptr<LogEvent> CreateBatteryStateChangedEvent(const uint64_t timestampNs, const BatteryPluggedStateEnum state);\r\n\r\n// Create log event for app moving to background.\r\nstd::unique_ptr<LogEvent> CreateMoveToBackgroundEvent(uint64_t timestampNs, const int uid);\r\n\r\n// Create log event for app moving to foreground.\r\nstd::unique_ptr<LogEvent> CreateMoveToForegroundEvent(uint64_t timestampNs, const int uid);\r\n\r\n// Create log event when the app sync starts.\r\nstd::unique_ptr<LogEvent> CreateSyncStartEvent(uint64_t timestampNs, const vector<int>& uids,\r\n const vector<string>& tags, const string& name);\r\n\r\n// Create log event when the app sync ends.\r\nstd::unique_ptr<LogEvent> CreateSyncEndEvent(uint64_t timestampNs, const vector<int>& uids,\r\n const vector<string>& tags, const string& name);\r\n\r\n// Create log event when the app sync ends.\r\nstd::unique_ptr<LogEvent> CreateAppCrashEvent(uint64_t timestampNs, const int uid);\r\n\r\n// Create log event for an app crash.\r\nstd::unique_ptr<LogEvent> CreateAppCrashOccurredEvent(uint64_t timestampNs, const int uid);\r\n\r\n// Create log event for acquiring wakelock.\r\nstd::unique_ptr<LogEvent> CreateAcquireWakelockEvent(uint64_t timestampNs, const vector<int>& uids,\r\n const vector<string>& tags,\r\n const string& wakelockName);\r\n\r\n// Create log event for releasing wakelock.\r\nstd::unique_ptr<LogEvent> CreateReleaseWakelockEvent(uint64_t timestampNs, const vector<int>& uids,\r\n const vector<string>& tags,\r\n const string& wakelockName);\r\n\r\n// Create log event for releasing wakelock.\r\nstd::unique_ptr<LogEvent> CreateIsolatedUidChangedEvent(uint64_t timestampNs, int hostUid,\r\n int isolatedUid, bool is_create);\r\n\r\n// Create log event for uid process state change.\r\nstd::unique_ptr<LogEvent> CreateUidProcessStateChangedEvent(\r\n uint64_t timestampNs, int uid, const android::app::ProcessStateEnum state);\r\n\r\nstd::unique_ptr<LogEvent> CreateBleScanStateChangedEvent(uint64_t timestampNs,\r\n const vector<int>& attributionUids,\r\n const vector<string>& attributionTags,\r\n const BleScanStateChanged::State state,\r\n const bool filtered, const bool firstMatch,\r\n const bool opportunistic);\r\n\r\nstd::unique_ptr<LogEvent> CreateOverlayStateChangedEvent(int64_t timestampNs, const int32_t uid,\r\n const string& packageName,\r\n const bool usingAlertWindow,\r\n const OverlayStateChanged::State state);\r\n\r\n// Create a statsd log event processor upon the start time in seconds, config and key.\r\nsp<StatsLogProcessor> CreateStatsLogProcessor(const int64_t timeBaseNs, const int64_t currentTimeNs,\r\n const StatsdConfig& config, const ConfigKey& key,\r\n const shared_ptr<IPullAtomCallback>& puller = nullptr,\r\n const int32_t atomTag = 0 /*for puller only*/,\r\n const sp<UidMap> = new UidMap());\r\n\r\n// Util function to sort the log events by timestamp.\r\nvoid sortLogEventsByTimestamp(std::vector<std::unique_ptr<LogEvent>> *events);\r\n\r\nint64_t StringToId(const string& str);\r\n\r\nvoid ValidateWakelockAttributionUidAndTagDimension(const DimensionsValue& value, const int atomId,\r\n const int uid, const string& tag);\r\nvoid ValidateUidDimension(const DimensionsValue& value, int node_idx, int atomId, int uid);\r\nvoid ValidateAttributionUidDimension(const DimensionsValue& value, int atomId, int uid);\r\nvoid ValidateAttributionUidAndTagDimension(\r\n const DimensionsValue& value, int atomId, int uid, const std::string& tag);\r\nvoid ValidateAttributionUidAndTagDimension(\r\n const DimensionsValue& value, int node_idx, int atomId, int uid, const std::string& tag);\r\n\r\nstruct DimensionsPair {\r\n DimensionsPair(DimensionsValue m1, google::protobuf::RepeatedPtrField<StateValue> m2)\r\n : dimInWhat(m1), stateValues(m2){};\r\n\r\n DimensionsValue dimInWhat;\r\n google::protobuf::RepeatedPtrField<StateValue> stateValues;\r\n};\r\n\r\nbool LessThan(const StateValue& s1, const StateValue& s2);\r\nbool LessThan(const DimensionsValue& s1, const DimensionsValue& s2);\r\nbool LessThan(const DimensionsPair& s1, const DimensionsPair& s2);\r\n\r\n\r\nvoid backfillStartEndTimestamp(ConfigMetricsReport *config_report);\r\nvoid backfillStartEndTimestamp(ConfigMetricsReportList *config_report_list);\r\n\r\nvoid backfillStringInReport(ConfigMetricsReportList *config_report_list);\r\nvoid backfillStringInDimension(const std::map<uint64_t, string>& str_map,\r\n DimensionsValue* dimension);\r\n\r\ntemplate <typename T>\r\nvoid backfillStringInDimension(const std::map<uint64_t, string>& str_map,\r\n T* metrics) {\r\n for (int i = 0; i < metrics->data_size(); ++i) {\r\n auto data = metrics->mutable_data(i);\r\n if (data->has_dimensions_in_what()) {\r\n backfillStringInDimension(str_map, data->mutable_dimensions_in_what());\r\n }\r\n if (data->has_dimensions_in_condition()) {\r\n backfillStringInDimension(str_map, data->mutable_dimensions_in_condition());\r\n }\r\n }\r\n}\r\n\r\nvoid backfillDimensionPath(ConfigMetricsReportList* config_report_list);\r\n\r\nbool backfillDimensionPath(const DimensionsValue& path,\r\n const google::protobuf::RepeatedPtrField<DimensionsValue>& leafValues,\r\n DimensionsValue* dimension);\r\n\r\nclass FakeSubsystemSleepCallback : public BnPullAtomCallback {\r\npublic:\r\n Status onPullAtom(int atomTag,\r\n const shared_ptr<IPullAtomResultReceiver>& resultReceiver) override;\r\n};\r\n\r\ntemplate <typename T>\r\nvoid backfillDimensionPath(const DimensionsValue& whatPath,\r\n const DimensionsValue& conditionPath,\r\n T* metricData) {\r\n for (int i = 0; i < metricData->data_size(); ++i) {\r\n auto data = metricData->mutable_data(i);\r\n if (data->dimension_leaf_values_in_what_size() > 0) {\r\n backfillDimensionPath(whatPath, data->dimension_leaf_values_in_what(),\r\n data->mutable_dimensions_in_what());\r\n data->clear_dimension_leaf_values_in_what();\r\n }\r\n if (data->dimension_leaf_values_in_condition_size() > 0) {\r\n backfillDimensionPath(conditionPath, data->dimension_leaf_values_in_condition(),\r\n data->mutable_dimensions_in_condition());\r\n data->clear_dimension_leaf_values_in_condition();\r\n }\r\n }\r\n}\r\n\r\nstruct DimensionCompare {\r\n bool operator()(const DimensionsPair& s1, const DimensionsPair& s2) const {\r\n return LessThan(s1, s2);\r\n }\r\n};\r\n\r\ntemplate <typename T>\r\nvoid sortMetricDataByDimensionsValue(const T& metricData, T* sortedMetricData) {\r\n std::map<DimensionsPair, int, DimensionCompare> dimensionIndexMap;\r\n for (int i = 0; i < metricData.data_size(); ++i) {\r\n dimensionIndexMap.insert(\r\n std::make_pair(DimensionsPair(metricData.data(i).dimensions_in_what(),\r\n metricData.data(i).slice_by_state()),\r\n i));\r\n }\r\n for (const auto& itr : dimensionIndexMap) {\r\n *sortedMetricData->add_data() = metricData.data(itr.second);\r\n }\r\n}\r\n\r\ntemplate <typename T>\r\nvoid backfillStartEndTimestampForFullBucket(\r\n const int64_t timeBaseNs, const int64_t bucketSizeNs, T* bucket) {\r\n bucket->set_start_bucket_elapsed_nanos(timeBaseNs + bucketSizeNs * bucket->bucket_num());\r\n bucket->set_end_bucket_elapsed_nanos(\r\n timeBaseNs + bucketSizeNs * bucket->bucket_num() + bucketSizeNs);\r\n bucket->clear_bucket_num();\r\n}\r\n\r\ntemplate <typename T>\r\nvoid backfillStartEndTimestampForPartialBucket(const int64_t timeBaseNs, T* bucket) {\r\n if (bucket->has_start_bucket_elapsed_millis()) {\r\n bucket->set_start_bucket_elapsed_nanos(\r\n MillisToNano(bucket->start_bucket_elapsed_millis()));\r\n bucket->clear_start_bucket_elapsed_millis();\r\n }\r\n if (bucket->has_end_bucket_elapsed_millis()) {\r\n bucket->set_end_bucket_elapsed_nanos(\r\n MillisToNano(bucket->end_bucket_elapsed_millis()));\r\n bucket->clear_end_bucket_elapsed_millis();\r\n }\r\n}\r\n\r\ntemplate <typename T>\r\nvoid backfillStartEndTimestampForMetrics(const int64_t timeBaseNs, const int64_t bucketSizeNs,\r\n T* metrics) {\r\n for (int i = 0; i < metrics->data_size(); ++i) {\r\n auto data = metrics->mutable_data(i);\r\n for (int j = 0; j < data->bucket_info_size(); ++j) {\r\n auto bucket = data->mutable_bucket_info(j);\r\n if (bucket->has_bucket_num()) {\r\n backfillStartEndTimestampForFullBucket(timeBaseNs, bucketSizeNs, bucket);\r\n } else {\r\n backfillStartEndTimestampForPartialBucket(timeBaseNs, bucket);\r\n }\r\n }\r\n }\r\n}\r\n\r\ntemplate <typename T>\r\nvoid backfillStartEndTimestampForSkippedBuckets(const int64_t timeBaseNs, T* metrics) {\r\n for (int i = 0; i < metrics->skipped_size(); ++i) {\r\n backfillStartEndTimestampForPartialBucket(timeBaseNs, metrics->mutable_skipped(i));\r\n }\r\n}\r\n} // namespace statsd\r\n} // namespace os\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.6878364086151123, "alphanum_fraction": 0.6921420693397522, "avg_line_length": 30.034482955932617, "blob_id": "91d000ded7de87814da66ce028c9d782dd58206f", "content_id": "de4f8cc0c72a0fe9f604f031c666d16852249ed1", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1858, "license_type": "permissive", "max_line_length": 90, "num_lines": 58, "path": "/packages/SettingsLib/src/com/android/settingslib/deviceinfo/StorageVolumeProvider.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.settingslib.deviceinfo;\r\n\r\nimport android.app.usage.StorageStatsManager;\r\nimport android.os.storage.VolumeInfo;\r\n\r\nimport java.io.IOException;\r\nimport java.util.List;\r\n\r\n/**\r\n * StorageVolumeProvider provides access to the storage volumes on a device for free space\r\n * calculations.\r\n */\r\npublic interface StorageVolumeProvider {\r\n /**\r\n * Returns the number of bytes of total storage on the primary storage.\r\n */\r\n long getPrimaryStorageSize();\r\n\r\n /**\r\n * Returns a list of VolumeInfos for the device.\r\n */\r\n List<VolumeInfo> getVolumes();\r\n\r\n /**\r\n * Returns the emulated volume for a given private volume.\r\n */\r\n VolumeInfo findEmulatedForPrivate(VolumeInfo privateVolume);\r\n\r\n /**\r\n * Returns the total bytes for a given storage volume.\r\n *\r\n * @pre The volume is a private volume and is readable.\r\n */\r\n long getTotalBytes(StorageStatsManager stats, VolumeInfo volume) throws IOException;\r\n\r\n /**\r\n * Returns the free bytes for a given storage volume.\r\n *\r\n * @pre The volume is a private volume and is readable.\r\n */\r\n long getFreeBytes(StorageStatsManager stats, VolumeInfo volume) throws IOException;\r\n}\r\n" }, { "alpha_fraction": 0.7102446556091309, "alphanum_fraction": 0.7240061163902283, "avg_line_length": 27.727272033691406, "blob_id": "65daa23db80266bdfb3510b3860e9e5e3c109325", "content_id": "e6822a0dea26d26288980dcba26a136dd0d45b63", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1308, "license_type": "permissive", "max_line_length": 75, "num_lines": 44, "path": "/services/core/jni/BroadcastRadio/regions.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/**\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef _ANDROID_SERVER_BROADCASTRADIO_REGIONS_H\r\n#define _ANDROID_SERVER_BROADCASTRADIO_REGIONS_H\r\n\r\n#include \"types.h\"\r\n\r\n#include <android/hardware/broadcastradio/1.1/types.h>\r\n\r\nnamespace android {\r\nnamespace server {\r\nnamespace BroadcastRadio {\r\nnamespace regions {\r\n\r\nnamespace V1_0 = hardware::broadcastradio::V1_0;\r\n\r\nstruct RegionalBandConfig {\r\n Region region;\r\n V1_0::BandConfig bandConfig;\r\n};\r\n\r\nstd::vector<RegionalBandConfig>\r\nmapRegions(const hardware::hidl_vec<V1_0::BandConfig>& bands);\r\n\r\n} // namespace regions\r\n} // namespace BroadcastRadio\r\n} // namespace server\r\n} // namespace android\r\n\r\n#endif // _ANDROID_SERVER_BROADCASTRADIO_REGIONS_H\r\n" }, { "alpha_fraction": 0.6016752123832703, "alphanum_fraction": 0.7026523947715759, "avg_line_length": 40.97999954223633, "blob_id": "d5f48d666350784ce24168b3b1d94a337b32a222", "content_id": "82cdff15821d47ab56ae455ce3ebc73cc50bcd57", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2149, "license_type": "permissive", "max_line_length": 81, "num_lines": 50, "path": "/libs/hwui/tests/scripts/prep_taieye.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "nr=$(adb shell cat /proc/cpuinfo | grep processor | wc -l)\r\ncpubase=/sys/devices/system/cpu\r\n\r\nadb root\r\nadb wait-for-device\r\nadb shell stop vendor.perfd\r\nadb shell stop thermal-engine\r\n\r\nS=1036800\r\ncpu=0\r\n# Changing governor and frequency in one core will be automatically applied\r\n# to other cores in the cluster\r\nwhile [ $((cpu < 4)) -eq 1 ]; do\r\n echo \"Setting cpu ${cpu} to $S hz\"\r\n adb shell \"echo userspace > $cpubase/cpu${cpu}/cpufreq/scaling_governor\"\r\n adb shell \"echo 1 > $cpubase/cpu${cpu}/online\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_max_freq\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_min_freq\"\r\n cpu=$(($cpu + 1))\r\ndone\r\n\r\nwhile [ $((cpu < $nr)) -eq 1 ]; do\r\n echo \"disable cpu $cpu\"\r\n adb shell \"echo 0 > $cpubase/cpu${cpu}/online\"\r\n cpu=$(($cpu + 1))\r\ndone\r\n\r\necho \"setting GPU bus and idle timer\"\r\nadb shell \"echo 0 > /sys/class/kgsl/kgsl-3d0/bus_split\"\r\nadb shell \"echo 1 > /sys/class/kgsl/kgsl-3d0/force_clk_on\"\r\nadb shell \"echo 10000 > /sys/class/kgsl/kgsl-3d0/idle_timer\"\r\n\r\n#0 762 1144 1525 2288 3143 4173 5195 5859 7759 9887 11863 13763\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,gpubw/min_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,gpubw/max_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,cpubw/min_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,cpubw/max_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,mincpubw/min_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,mincpubw/max_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,memlat-cpu0/min_freq\"\r\nadb shell \"echo 7759 > /sys/class/devfreq/soc\\:qcom,memlat-cpu0/max_freq\"\r\n\r\n# 180000000 257000000 342000000 414000000 515000000 596000000 670000000 710000000\r\necho \"performance mode, 342 MHz\"\r\nadb shell \"echo performance > /sys/class/kgsl/kgsl-3d0/devfreq/governor\"\r\nadb shell \"echo 342000000 > /sys/class/kgsl/kgsl-3d0/devfreq/min_freq\"\r\nadb shell \"echo 342000000 > /sys/class/kgsl/kgsl-3d0/devfreq/max_freq\"\r\n\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/min_pwrlevel\"\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/max_pwrlevel\"\r\n" }, { "alpha_fraction": 0.6126279830932617, "alphanum_fraction": 0.6283276677131653, "avg_line_length": 28.226804733276367, "blob_id": "d0e2ee3a9795239a74f1b734486df7f8aba15786", "content_id": "ea317f26a90c144da2e8ddcbe726e06e1d7f2c5d", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2930, "license_type": "permissive", "max_line_length": 87, "num_lines": 97, "path": "/tools/aapt/tests/CrunchCache_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "//\r\n// Copyright 2011 The Android Open Source Project\r\n//\r\n#include <utils/String8.h>\r\n#include <iostream>\r\n#include <errno.h>\r\n\r\n#include \"CrunchCache.h\"\r\n#include \"FileFinder.h\"\r\n#include \"MockFileFinder.h\"\r\n#include \"CacheUpdater.h\"\r\n#include \"MockCacheUpdater.h\"\r\n\r\nusing namespace android;\r\nusing std::cout;\r\nusing std::endl;\r\n\r\nvoid expectEqual(int got, int expected, const char* desc) {\r\n cout << \"Checking \" << desc << \": \";\r\n cout << \"Got \" << got << \", expected \" << expected << \"...\";\r\n cout << ( (got == expected) ? \"PASSED\" : \"FAILED\") << endl;\r\n errno += ((got == expected) ? 0 : 1);\r\n}\r\n\r\nint main() {\r\n\r\n errno = 0;\r\n\r\n String8 source(\"res\");\r\n String8 dest(\"res2\");\r\n\r\n // Create data for MockFileFinder to feed to the cache\r\n KeyedVector<String8, time_t> sourceData;\r\n // This shouldn't be updated\r\n sourceData.add(String8(\"res/drawable/hello.png\"),3);\r\n // This should be updated\r\n sourceData.add(String8(\"res/drawable/world.png\"),5);\r\n // This should cause make directory to be called\r\n sourceData.add(String8(\"res/drawable-cool/hello.png\"),3);\r\n\r\n KeyedVector<String8, time_t> destData;\r\n destData.add(String8(\"res2/drawable/hello.png\"),3);\r\n destData.add(String8(\"res2/drawable/world.png\"),3);\r\n // this should call delete\r\n destData.add(String8(\"res2/drawable/dead.png\"),3);\r\n\r\n // Package up data and create mock file finder\r\n KeyedVector<String8, KeyedVector<String8,time_t> > data;\r\n data.add(source,sourceData);\r\n data.add(dest,destData);\r\n FileFinder* ff = new MockFileFinder(data);\r\n CrunchCache cc(source,dest,ff);\r\n\r\n MockCacheUpdater* mcu = new MockCacheUpdater();\r\n CacheUpdater* cu(mcu);\r\n\r\n cout << \"Running Crunch...\";\r\n int result = cc.crunch(cu);\r\n cout << ((result > 0) ? \"PASSED\" : \"FAILED\") << endl;\r\n errno += ((result > 0) ? 0 : 1);\r\n\r\n const int EXPECTED_RESULT = 2;\r\n expectEqual(result, EXPECTED_RESULT, \"number of files touched\");\r\n\r\n cout << \"Checking calls to deleteFile and processImage:\" << endl;\r\n const int EXPECTED_DELETES = 1;\r\n const int EXPECTED_PROCESSED = 2;\r\n // Deletes\r\n expectEqual(mcu->deleteCount, EXPECTED_DELETES, \"deleteFile\");\r\n // processImage\r\n expectEqual(mcu->processCount, EXPECTED_PROCESSED, \"processImage\");\r\n\r\n const int EXPECTED_OVERWRITES = 3;\r\n result = cc.crunch(cu, true);\r\n expectEqual(result, EXPECTED_OVERWRITES, \"number of files touched with overwrite\");\r\n \\\r\n\r\n if (errno == 0)\r\n cout << \"ALL TESTS PASSED!\" << endl;\r\n else\r\n cout << errno << \" TESTS FAILED\" << endl;\r\n\r\n delete ff;\r\n delete cu;\r\n\r\n // TESTS BELOW WILL GO AWAY SOON\r\n\r\n String8 source2(\"ApiDemos/res\");\r\n String8 dest2(\"ApiDemos/res2\");\r\n\r\n FileFinder* sff = new SystemFileFinder();\r\n CacheUpdater* scu = new SystemCacheUpdater();\r\n\r\n CrunchCache scc(source2,dest2,sff);\r\n\r\n scc.crunch(scu);\r\n}" }, { "alpha_fraction": 0.6739543676376343, "alphanum_fraction": 0.6739543676376343, "avg_line_length": 29.878787994384766, "blob_id": "e600c2e182d706d95617c5e383d27ec2875a57d8", "content_id": "bcacd44af68a0b60e1ce7b4c526cf636da5d6424", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1052, "license_type": "permissive", "max_line_length": 96, "num_lines": 33, "path": "/core/java/android/widget/MenuItemHoverListener.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package android.widget;\r\n\r\nimport android.annotation.NonNull;\r\nimport android.view.MenuItem;\r\n\r\nimport com.android.internal.view.menu.MenuBuilder;\r\n\r\n/**\r\n * An interface notified when a menu item is hovered. Useful for cases when hover should trigger\r\n * some behavior at a higher level, like managing the opening and closing of submenus.\r\n *\r\n * @hide\r\n */\r\npublic interface MenuItemHoverListener {\r\n /**\r\n * Called when hover exits a menu item.\r\n * <p>\r\n * If hover is moving to another item, this method will be called before\r\n * {@link #onItemHoverEnter(MenuBuilder, MenuItem)} for the newly-hovered item.\r\n *\r\n * @param menu the item's parent menu\r\n * @param item the hovered menu item\r\n */\r\n void onItemHoverExit(@NonNull MenuBuilder menu, @NonNull MenuItem item);\r\n\r\n /**\r\n * Called when hover enters a menu item.\r\n *\r\n * @param menu the item's parent menu\r\n * @param item the hovered menu item\r\n */\r\n void onItemHoverEnter(@NonNull MenuBuilder menu, @NonNull MenuItem item);\r\n}\r\n" }, { "alpha_fraction": 0.6756594777107239, "alphanum_fraction": 0.6810551285743713, "avg_line_length": 31.360000610351562, "blob_id": "51487394d5e6007c17eaccd9019f88efff760d87", "content_id": "78bd5c200441bd5c9c8463affd3baba465cb3a37", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1668, "license_type": "permissive", "max_line_length": 80, "num_lines": 50, "path": "/core/java/android/speech/tts/SilencePlaybackQueueItem.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\r\n * use this file except in compliance with the License. You may obtain a copy of\r\n * the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\r\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r\n * License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage android.speech.tts;\r\n\r\nimport android.os.ConditionVariable;\r\nimport android.speech.tts.TextToSpeechService.UtteranceProgressDispatcher;\r\n\r\nclass SilencePlaybackQueueItem extends PlaybackQueueItem {\r\n private final ConditionVariable mCondVar = new ConditionVariable();\r\n private final long mSilenceDurationMs;\r\n\r\n SilencePlaybackQueueItem(UtteranceProgressDispatcher dispatcher,\r\n Object callerIdentity, long silenceDurationMs) {\r\n super(dispatcher, callerIdentity);\r\n mSilenceDurationMs = silenceDurationMs;\r\n }\r\n\r\n @Override\r\n public void run() {\r\n getDispatcher().dispatchOnStart();\r\n boolean wasStopped = false;\r\n if (mSilenceDurationMs > 0) {\r\n wasStopped = mCondVar.block(mSilenceDurationMs);\r\n }\r\n if (wasStopped) {\r\n getDispatcher().dispatchOnStop();\r\n } else {\r\n getDispatcher().dispatchOnSuccess();\r\n }\r\n\r\n }\r\n\r\n @Override\r\n void stop(int errorCode) {\r\n mCondVar.open();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.618927538394928, "alphanum_fraction": 0.6239713430404663, "avg_line_length": 38.28876876831055, "blob_id": "1bbc48376feb0981b41ada449100ee5e11a7c87a", "content_id": "3c3aa2fcc1da525d84f0bd1d65a37f4ef4cde49c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7534, "license_type": "permissive", "max_line_length": 96, "num_lines": 187, "path": "/tests/CanvasCompare/src/com/android/test/hwuicompare/ErrorCalculator.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwuicompare;\r\n\r\nimport android.content.Context;\r\nimport android.content.res.Resources;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.Color;\r\nimport android.renderscript.Allocation;\r\nimport android.renderscript.Element;\r\nimport android.renderscript.RenderScript;\r\nimport android.util.Log;\r\n\r\npublic class ErrorCalculator {\r\n private static final String LOG_TAG = \"ErrorCalculator\";\r\n private static final int REGION_SIZE = 8;\r\n\r\n private static final boolean LOG_TIMING = false;\r\n private static final boolean LOG_CALC = false;\r\n\r\n private RenderScript mRS;\r\n private Allocation mIdealPixelsAllocation;\r\n private Allocation mGivenPixelsAllocation;\r\n private Allocation mOutputPixelsAllocation;\r\n\r\n private Allocation mInputRowsAllocation;\r\n private Allocation mOutputRegionsAllocation;\r\n\r\n private ScriptC_errorCalculator mScript;\r\n\r\n private int[] mOutputRowRegions;\r\n\r\n public ErrorCalculator(Context c, Resources resources) {\r\n int width = resources.getDimensionPixelSize(R.dimen.layer_width);\r\n int height = resources.getDimensionPixelSize(R.dimen.layer_height);\r\n mOutputRowRegions = new int[height / REGION_SIZE];\r\n\r\n mRS = RenderScript.create(c);\r\n int[] rowIndices = new int[height / REGION_SIZE];\r\n for (int i = 0; i < rowIndices.length; i++)\r\n rowIndices[i] = i * REGION_SIZE;\r\n\r\n mScript = new ScriptC_errorCalculator(mRS);\r\n mScript.set_HEIGHT(height);\r\n mScript.set_WIDTH(width);\r\n mScript.set_REGION_SIZE(REGION_SIZE);\r\n\r\n mInputRowsAllocation = Allocation.createSized(mRS, Element.I32(mRS), rowIndices.length,\r\n Allocation.USAGE_SCRIPT);\r\n mInputRowsAllocation.copyFrom(rowIndices);\r\n mOutputRegionsAllocation = Allocation.createSized(mRS, Element.I32(mRS),\r\n mOutputRowRegions.length, Allocation.USAGE_SCRIPT);\r\n }\r\n\r\n\r\n private static long startMillis, middleMillis;\r\n\r\n public float calcErrorRS(Bitmap ideal, Bitmap given) {\r\n if (LOG_TIMING) {\r\n startMillis = System.currentTimeMillis();\r\n }\r\n\r\n mIdealPixelsAllocation = Allocation.createFromBitmap(mRS, ideal,\r\n Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);\r\n mGivenPixelsAllocation = Allocation.createFromBitmap(mRS, given,\r\n Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);\r\n\r\n mScript.set_ideal(mIdealPixelsAllocation);\r\n mScript.set_given(mGivenPixelsAllocation);\r\n\r\n mScript.forEach_countInterestingRegions(mInputRowsAllocation, mOutputRegionsAllocation);\r\n mOutputRegionsAllocation.copyTo(mOutputRowRegions);\r\n\r\n int regionCount = 0;\r\n for (int region : mOutputRowRegions) {\r\n regionCount += region;\r\n }\r\n int interestingPixels = Math.max(1, regionCount) * REGION_SIZE * REGION_SIZE;\r\n\r\n if (LOG_TIMING) {\r\n long startMillis2 = System.currentTimeMillis();\r\n }\r\n\r\n mScript.forEach_accumulateError(mInputRowsAllocation, mOutputRegionsAllocation);\r\n mOutputRegionsAllocation.copyTo(mOutputRowRegions);\r\n float totalError = 0;\r\n for (int row : mOutputRowRegions) {\r\n totalError += row;\r\n }\r\n totalError /= 1024.0f;\r\n\r\n if (LOG_TIMING) {\r\n long finalMillis = System.currentTimeMillis();\r\n Log.d(LOG_TAG, \"rs: first part took \" + (middleMillis - startMillis) + \"ms\");\r\n Log.d(LOG_TAG, \"rs: last part took \" + (finalMillis - middleMillis) + \"ms\");\r\n }\r\n if (LOG_CALC) {\r\n Log.d(LOG_TAG, \"rs: error \" + totalError + \", pixels \" + interestingPixels);\r\n }\r\n return totalError / interestingPixels;\r\n }\r\n\r\n public void calcErrorHeatmapRS(Bitmap ideal, Bitmap given, Bitmap output) {\r\n mIdealPixelsAllocation = Allocation.createFromBitmap(mRS, ideal,\r\n Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);\r\n mGivenPixelsAllocation = Allocation.createFromBitmap(mRS, given,\r\n Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);\r\n\r\n mScript.set_ideal(mIdealPixelsAllocation);\r\n mScript.set_given(mGivenPixelsAllocation);\r\n\r\n mOutputPixelsAllocation = Allocation.createFromBitmap(mRS, output,\r\n Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_SCRIPT);\r\n mScript.forEach_displayDifference(mOutputPixelsAllocation, mOutputPixelsAllocation);\r\n mOutputPixelsAllocation.copyTo(output);\r\n }\r\n\r\n public static float calcError(Bitmap ideal, Bitmap given) {\r\n if (LOG_TIMING) {\r\n startMillis = System.currentTimeMillis();\r\n }\r\n\r\n int interestingRegions = 0;\r\n for (int x = 0; x < ideal.getWidth(); x += REGION_SIZE) {\r\n for (int y = 0; y < ideal.getWidth(); y += REGION_SIZE) {\r\n if (inspectRegion(ideal, x, y)) {\r\n interestingRegions++;\r\n }\r\n }\r\n }\r\n\r\n int interestingPixels = Math.max(1, interestingRegions) * REGION_SIZE * REGION_SIZE;\r\n\r\n if (LOG_TIMING) {\r\n long startMillis2 = System.currentTimeMillis();\r\n }\r\n\r\n float totalError = 0;\r\n for (int x = 0; x < ideal.getWidth(); x++) {\r\n for (int y = 0; y < ideal.getHeight(); y++) {\r\n int idealColor = ideal.getPixel(x, y);\r\n int givenColor = given.getPixel(x, y);\r\n if (idealColor == givenColor)\r\n continue;\r\n totalError += Math.abs(Color.red(idealColor) - Color.red(givenColor));\r\n totalError += Math.abs(Color.green(idealColor) - Color.green(givenColor));\r\n totalError += Math.abs(Color.blue(idealColor) - Color.blue(givenColor));\r\n totalError += Math.abs(Color.alpha(idealColor) - Color.alpha(givenColor));\r\n }\r\n }\r\n totalError /= 1024.0f;\r\n if (LOG_TIMING) {\r\n long finalMillis = System.currentTimeMillis();\r\n Log.d(LOG_TAG, \"dvk: first part took \" + (middleMillis - startMillis) + \"ms\");\r\n Log.d(LOG_TAG, \"dvk: last part took \" + (finalMillis - middleMillis) + \"ms\");\r\n }\r\n if (LOG_CALC) {\r\n Log.d(LOG_TAG, \"dvk: error \" + totalError + \", pixels \" + interestingPixels);\r\n }\r\n return totalError / interestingPixels;\r\n }\r\n\r\n private static boolean inspectRegion(Bitmap ideal, int x, int y) {\r\n int regionColor = ideal.getPixel(x, y);\r\n for (int i = 0; i < REGION_SIZE; i++) {\r\n for (int j = 0; j < REGION_SIZE; j++) {\r\n if (ideal.getPixel(x + i, y + j) != regionColor)\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7265568971633911, "alphanum_fraction": 0.7304939031600952, "avg_line_length": 35.253334045410156, "blob_id": "fe2d3ae957829deebabed2d835e736dbb14d0b93", "content_id": "162780419117ee8a241b74231c9944f27a20a218", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2794, "license_type": "permissive", "max_line_length": 95, "num_lines": 75, "path": "/packages/SystemUI/tests/src/com/android/keyguard/clock/BubbleClockControllerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.keyguard.clock;\r\n\r\nimport static com.google.common.truth.Truth.assertThat;\r\n\r\nimport android.content.res.Resources;\r\nimport android.graphics.Color;\r\nimport android.test.suitebuilder.annotation.SmallTest;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper.RunWithLooper;\r\nimport android.view.LayoutInflater;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\nimport android.widget.TextView;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.colorextraction.SysuiColorExtractor;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\n@RunWithLooper\r\npublic final class BubbleClockControllerTest extends SysuiTestCase {\r\n\r\n private BubbleClockController mClockController;\r\n @Mock SysuiColorExtractor mMockColorExtractor;\r\n\r\n @Before\r\n public void setUp() {\r\n MockitoAnnotations.initMocks(this);\r\n\r\n Resources res = getContext().getResources();\r\n LayoutInflater layoutInflater = LayoutInflater.from(getContext());\r\n mClockController = new BubbleClockController(res, layoutInflater, mMockColorExtractor);\r\n }\r\n\r\n @Test\r\n public void setDarkAmount_AOD() {\r\n ViewGroup smallClockFrame = (ViewGroup) mClockController.getView();\r\n View smallClock = smallClockFrame.getChildAt(0);\r\n // WHEN dark amount is set to AOD\r\n mClockController.setDarkAmount(1f);\r\n // THEN small clock should not be shown.\r\n assertThat(smallClock.getVisibility()).isEqualTo(View.VISIBLE);\r\n }\r\n\r\n @Test\r\n public void setColorPalette_setDigitalClock() {\r\n ViewGroup smallClock = (ViewGroup) mClockController.getView();\r\n // WHEN text color is set\r\n mClockController.setColorPalette(true, new int[]{Color.RED});\r\n // THEN child of small clock should have text color set.\r\n TextView digitalClock = (TextView) smallClock.getChildAt(0);\r\n assertThat(digitalClock.getCurrentTextColor()).isEqualTo(Color.RED);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7020484209060669, "alphanum_fraction": 0.7132216095924377, "avg_line_length": 26.263158798217773, "blob_id": "da29a7896f7df7c82b2649a5fdb61baef7590e5f", "content_id": "7933e49a9fbad1debf470906a36b120b0e905854", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1074, "license_type": "permissive", "max_line_length": 75, "num_lines": 38, "path": "/media/jni/android_media_MediaDescrambler.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2017, The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef _ANDROID_MEDIA_DESCRAMBLER_H_\r\n#define _ANDROID_MEDIA_DESCRAMBLER_H_\r\n\r\n#include \"jni.h\"\r\n\r\n#include <utils/RefBase.h>\r\n\r\nnamespace android {\r\n\r\nnamespace hardware {\r\nnamespace cas {\r\nnamespace native {\r\nnamespace V1_0 {\r\nstruct IDescrambler;\r\n}}}}\r\nusing hardware::cas::native::V1_0::IDescrambler;\r\n\r\nsp<IDescrambler> GetDescrambler(JNIEnv *env, jobject obj);\r\n\r\n} // namespace android\r\n\r\n#endif // _ANDROID_MEDIA_DESCRAMBLER_H_\r\n" }, { "alpha_fraction": 0.7155663967132568, "alphanum_fraction": 0.7220863699913025, "avg_line_length": 31.16216278076172, "blob_id": "d5f0449c949bb9cf65c0839390658c4a7784a109", "content_id": "67d3d85a3a6e63b10ab92f7fb5b89a46fef93337", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1227, "license_type": "permissive", "max_line_length": 79, "num_lines": 37, "path": "/libs/hostgraphics/gui/BufferQueue.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_GUI_BUFFERQUEUE_H\r\n#define ANDROID_GUI_BUFFERQUEUE_H\r\n\r\n#include <gui/BufferItem.h>\r\n#include <gui/IGraphicBufferConsumer.h>\r\n#include <gui/IGraphicBufferProducer.h>\r\n\r\nnamespace android {\r\n\r\nclass BufferQueue {\r\npublic:\r\n enum { INVALID_BUFFER_SLOT = BufferItem::INVALID_BUFFER_SLOT };\r\n enum { NO_BUFFER_AVAILABLE = IGraphicBufferConsumer::NO_BUFFER_AVAILABLE };\r\n\r\n static void createBufferQueue(sp<IGraphicBufferProducer>* outProducer,\r\n sp<IGraphicBufferConsumer>* outConsumer);\r\n};\r\n\r\n} // namespace android\r\n\r\n#endif // ANDROID_GUI_BUFFERQUEUE_H\r\n" }, { "alpha_fraction": 0.6228528618812561, "alphanum_fraction": 0.636295735836029, "avg_line_length": 36.78260803222656, "blob_id": "3e7eb581e7b373bac43a11cfe9590f7bdec4f2b5", "content_id": "8d710a2c636c09988d91b96b1b152cce5925dbd7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2678, "license_type": "permissive", "max_line_length": 100, "num_lines": 69, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/stats_scorer.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n// Stats (mean and stdev) scoring in the native.\r\n\r\n#include \"stats_scorer.h\"\r\n\r\n#include <jni.h>\r\n#include <math.h>\r\n\r\nvoid Java_androidx_media_filterpacks_numeric_StatsFilter_score(\r\n JNIEnv* env, jobject thiz, jobject imageBuffer, jfloatArray statsArray)\r\n{\r\n unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));\r\n int numPixels = env->GetDirectBufferCapacity(imageBuffer); // 1 byte per pixel\r\n float sum = 0.0;\r\n float sumSquares = 0.0;\r\n\r\n for (int i = 0; i < numPixels; ++i) {\r\n float val = static_cast<float>(pImg[i]);\r\n sum += val;\r\n sumSquares += val * val;\r\n }\r\n jfloat result[2];\r\n result[0] = sum / numPixels; // mean\r\n result[1] = sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1)); // stdev.\r\n env->SetFloatArrayRegion(statsArray, 0, 2, result);\r\n}\r\n\r\nvoid Java_androidx_media_filterpacks_numeric_StatsFilter_regionscore(\r\n JNIEnv* env, jobject thiz, jobject imageBuffer, jint width, jint height,\r\n jfloat left, jfloat top, jfloat right, jfloat bottom, jfloatArray statsArray)\r\n{\r\n unsigned char* pImg = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));\r\n int xStart = static_cast<int>(width * left);\r\n int xEnd = static_cast<int>(width * right);\r\n int yStart = static_cast<int>(height * top);\r\n int yEnd = static_cast<int>(height * bottom);\r\n int numPixels = (xEnd - xStart) * (yEnd - yStart);\r\n float sum = 0.0;\r\n float sumSquares = 0.0;\r\n\r\n for (int y = yStart; y < yEnd; y++) {\r\n int disp = width * y;\r\n for (int x = xStart; x < xEnd; ++x) {\r\n float val = static_cast<float>(*(pImg + disp + x));\r\n sum += val;\r\n sumSquares += val * val;\r\n }\r\n }\r\n jfloat result[2];\r\n result[0] = sum / numPixels; // mean\r\n result[1] = (numPixels == 1) ?\r\n 0 : sqrt((sumSquares - numPixels * result[0] * result[0]) / (numPixels - 1)); // stdev.\r\n env->SetFloatArrayRegion(statsArray, 0, 2, result);\r\n}\r\n\r\n" }, { "alpha_fraction": 0.5864854454994202, "alphanum_fraction": 0.5913786292076111, "avg_line_length": 35.31304168701172, "blob_id": "b99ab9e17eb4d6c1c41eedb6f519cd4f5f6422a7", "content_id": "7ccd51b4346b41a5602697d823230a03e0f3067e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 12875, "license_type": "permissive", "max_line_length": 100, "num_lines": 345, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/geometry/Quad.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw.geometry;\r\n\r\nimport android.annotation.SuppressLint;\r\nimport android.graphics.Matrix;\r\nimport android.graphics.PointF;\r\nimport android.graphics.RectF;\r\n\r\n/**\r\n * The Quad class specifies a (possibly affine transformed) rectangle.\r\n *\r\n * A Quad instance holds 4 points that define its shape. The points may represent any rectangle that\r\n * has been transformed by an affine transformation. This means that Quads can represent translated,\r\n * scaled, rotated and sheared/skewed rectangles. As such, Quads are restricted to the set of\r\n * parallelograms.\r\n *\r\n * Each point in the Quad represents a specific corner of the Quad. These are top-left, top-right,\r\n * bottom-left, and bottom-right. These labels allow mapping a transformed Quad back to an up-right\r\n * Quad, with the point-to-point mapping well-defined. They do not necessarily indicate that e.g.\r\n * the top-left corner is actually at the top-left of coordinate space.\r\n */\r\n@SuppressLint(\"FloatMath\")\r\npublic class Quad {\r\n\r\n private final PointF mTopLeft;\r\n private final PointF mTopRight;\r\n private final PointF mBottomLeft;\r\n private final PointF mBottomRight;\r\n\r\n /**\r\n * Returns the unit Quad.\r\n * The unit Quad has its top-left point at (0, 0) and bottom-right point at (1, 1).\r\n * @return the unit Quad.\r\n */\r\n public static Quad unitQuad() {\r\n return new Quad(0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f);\r\n }\r\n\r\n /**\r\n * Return a Quad from the specified rectangle.\r\n *\r\n * @param rect a RectF instance.\r\n * @return Quad that represents the passed rectangle.\r\n */\r\n public static Quad fromRect(RectF rect) {\r\n return new Quad(new PointF(rect.left, rect.top),\r\n new PointF(rect.right, rect.top),\r\n new PointF(rect.left, rect.bottom),\r\n new PointF(rect.right, rect.bottom));\r\n }\r\n\r\n /**\r\n * Return a Quad from the specified rectangle coordinates.\r\n *\r\n * @param x the top left x coordinate\r\n * @param y the top left y coordinate\r\n * @param width the width of the rectangle\r\n * @param height the height of the rectangle\r\n * @return Quad that represents the passed rectangle.\r\n */\r\n public static Quad fromRect(float x, float y, float width, float height) {\r\n return new Quad(new PointF(x, y),\r\n new PointF(x + width, y),\r\n new PointF(x, y + height),\r\n new PointF(x + width, y + height));\r\n }\r\n\r\n /**\r\n * Return a Quad that spans the specified points and height.\r\n *\r\n * The returned Quad has the specified top-left and top-right points, and the specified height\r\n * while maintaining 90 degree angles on all 4 corners.\r\n *\r\n * @param topLeft the top-left of the quad\r\n * @param topRight the top-right of the quad\r\n * @param height the height of the quad\r\n * @return Quad that spans the specified points and height.\r\n */\r\n public static Quad fromLineAndHeight(PointF topLeft, PointF topRight, float height) {\r\n PointF dp = new PointF(topRight.x - topLeft.x, topRight.y - topLeft.y);\r\n float len = dp.length();\r\n PointF np = new PointF(height * (dp.y / len), height * (dp.x / len));\r\n PointF p2 = new PointF(topLeft.x - np.x, topLeft.y + np.y);\r\n PointF p3 = new PointF(topRight.x - np.x, topRight.y + np.y);\r\n return new Quad(topLeft, topRight, p2, p3);\r\n }\r\n\r\n /**\r\n * Return a Quad that represents the specified rotated rectangle.\r\n *\r\n * The Quad is rotated counter-clockwise around its centroid.\r\n *\r\n * @param rect the source rectangle\r\n * @param angle the angle to rotate the source rectangle in radians\r\n * @return the Quad representing the source rectangle rotated by the given angle.\r\n */\r\n public static Quad fromRotatedRect(RectF rect, float angle) {\r\n return Quad.fromRect(rect).rotated(angle);\r\n }\r\n\r\n /**\r\n * Return a Quad that represents the specified transformed rectangle.\r\n *\r\n * The transform is applied by multiplying each point (x, y, 1) by the matrix.\r\n *\r\n * @param rect the source rectangle\r\n * @param matrix the transformation matrix\r\n * @return the Quad representing the source rectangle transformed by the matrix\r\n */\r\n public static Quad fromTransformedRect(RectF rect, Matrix matrix) {\r\n return Quad.fromRect(rect).transformed(matrix);\r\n }\r\n\r\n /**\r\n * Returns the transformation matrix to transform the source Quad to the target Quad.\r\n *\r\n * @param source the source quad\r\n * @param target the target quad\r\n * @return the transformation matrix to map source to target.\r\n */\r\n public static Matrix getTransform(Quad source, Quad target) {\r\n // We only use the first 3 points as they sufficiently specify the transform\r\n Matrix transform = new Matrix();\r\n transform.setPolyToPoly(source.asCoords(), 0, target.asCoords(), 0, 3);\r\n return transform;\r\n }\r\n\r\n /**\r\n * The top-left point of the Quad.\r\n * @return top-left point of the Quad.\r\n */\r\n public PointF topLeft() {\r\n return mTopLeft;\r\n }\r\n\r\n /**\r\n * The top-right point of the Quad.\r\n * @return top-right point of the Quad.\r\n */\r\n public PointF topRight() {\r\n return mTopRight;\r\n }\r\n\r\n /**\r\n * The bottom-left point of the Quad.\r\n * @return bottom-left point of the Quad.\r\n */\r\n public PointF bottomLeft() {\r\n return mBottomLeft;\r\n }\r\n\r\n /**\r\n * The bottom-right point of the Quad.\r\n * @return bottom-right point of the Quad.\r\n */\r\n public PointF bottomRight() {\r\n return mBottomRight;\r\n }\r\n\r\n /**\r\n * Rotate the quad by the given angle.\r\n *\r\n * The Quad is rotated counter-clockwise around its centroid.\r\n *\r\n * @param angle the angle to rotate in radians\r\n * @return the rotated Quad\r\n */\r\n public Quad rotated(float angle) {\r\n PointF center = center();\r\n float cosa = (float) Math.cos(angle);\r\n float sina = (float) Math.sin(angle);\r\n\r\n PointF topLeft = rotatePoint(topLeft(), center, cosa, sina);\r\n PointF topRight = rotatePoint(topRight(), center, cosa, sina);\r\n PointF bottomLeft = rotatePoint(bottomLeft(), center, cosa, sina);\r\n PointF bottomRight = rotatePoint(bottomRight(), center, cosa, sina);\r\n\r\n return new Quad(topLeft, topRight, bottomLeft, bottomRight);\r\n }\r\n\r\n /**\r\n * Transform the quad with the given transformation matrix.\r\n *\r\n * The transform is applied by multiplying each point (x, y, 1) by the matrix.\r\n *\r\n * @param matrix the transformation matrix\r\n * @return the transformed Quad\r\n */\r\n public Quad transformed(Matrix matrix) {\r\n float[] points = asCoords();\r\n matrix.mapPoints(points);\r\n return new Quad(points);\r\n }\r\n\r\n /**\r\n * Returns the centroid of the Quad.\r\n *\r\n * The centroid of the Quad is where the two inner diagonals connecting the opposite corners\r\n * meet.\r\n *\r\n * @return the centroid of the Quad.\r\n */\r\n public PointF center() {\r\n // As the diagonals bisect each other, we can simply return the center of one of the\r\n // diagonals.\r\n return new PointF((mTopLeft.x + mBottomRight.x) / 2f,\r\n (mTopLeft.y + mBottomRight.y) / 2f);\r\n }\r\n\r\n /**\r\n * Returns the quad as a float-array of coordinates.\r\n * The order of coordinates is top-left, top-right, bottom-left, bottom-right. This is the\r\n * default order of coordinates used in ImageShaders, so this method can be used to bind\r\n * an attribute to the Quad.\r\n */\r\n public float[] asCoords() {\r\n return new float[] { mTopLeft.x, mTopLeft.y,\r\n mTopRight.x, mTopRight.y,\r\n mBottomLeft.x, mBottomLeft.y,\r\n mBottomRight.x, mBottomRight.y };\r\n }\r\n\r\n /**\r\n * Grow the Quad outwards by the specified factor.\r\n *\r\n * This method moves the corner points of the Quad outward along the diagonals that connect\r\n * them to the centroid. A factor of 1.0 moves the quad outwards by the distance of the corners\r\n * to the centroid.\r\n *\r\n * @param factor the growth factor\r\n * @return the Quad grown by the specified amount\r\n */\r\n public Quad grow(float factor) {\r\n PointF pc = center();\r\n return new Quad(factor * (mTopLeft.x - pc.x) + pc.x,\r\n factor * (mTopLeft.y - pc.y) + pc.y,\r\n factor * (mTopRight.x - pc.x) + pc.x,\r\n factor * (mTopRight.y - pc.y) + pc.y,\r\n factor * (mBottomLeft.x - pc.x) + pc.x,\r\n factor * (mBottomLeft.y - pc.y) + pc.y,\r\n factor * (mBottomRight.x - pc.x) + pc.x,\r\n factor * (mBottomRight.y - pc.y) + pc.y);\r\n }\r\n\r\n /**\r\n * Scale the Quad by the specified factor.\r\n *\r\n * @param factor the scaling factor\r\n * @return the Quad instance scaled by the specified factor.\r\n */\r\n public Quad scale(float factor) {\r\n return new Quad(mTopLeft.x * factor, mTopLeft.y * factor,\r\n mTopRight.x * factor, mTopRight.y * factor,\r\n mBottomLeft.x * factor, mBottomLeft.y * factor,\r\n mBottomRight.x * factor, mBottomRight.y * factor);\r\n }\r\n\r\n /**\r\n * Scale the Quad by the specified factors in the x and y factors.\r\n *\r\n * @param sx the x scaling factor\r\n * @param sy the y scaling factor\r\n * @return the Quad instance scaled by the specified factors.\r\n */\r\n public Quad scale2(float sx, float sy) {\r\n return new Quad(mTopLeft.x * sx, mTopLeft.y * sy,\r\n mTopRight.x * sx, mTopRight.y * sy,\r\n mBottomLeft.x * sx, mBottomLeft.y * sy,\r\n mBottomRight.x * sx, mBottomRight.y * sy);\r\n }\r\n\r\n /**\r\n * Returns the Quad's left-to-right edge.\r\n *\r\n * Returns a vector that goes from the Quad's top-left to top-right (or bottom-left to\r\n * bottom-right).\r\n *\r\n * @return the edge vector as a PointF.\r\n */\r\n public PointF xEdge() {\r\n return new PointF(mTopRight.x - mTopLeft.x, mTopRight.y - mTopLeft.y);\r\n }\r\n\r\n /**\r\n * Returns the Quad's top-to-bottom edge.\r\n *\r\n * Returns a vector that goes from the Quad's top-left to bottom-left (or top-right to\r\n * bottom-right).\r\n *\r\n * @return the edge vector as a PointF.\r\n */\r\n public PointF yEdge() {\r\n return new PointF(mBottomLeft.x - mTopLeft.x, mBottomLeft.y - mTopLeft.y);\r\n }\r\n\r\n @Override\r\n public String toString() {\r\n return \"Quad(\" + mTopLeft.x + \", \" + mTopLeft.y + \", \"\r\n + mTopRight.x + \", \" + mTopRight.y + \", \"\r\n + mBottomLeft.x + \", \" + mBottomLeft.y + \", \"\r\n + mBottomRight.x + \", \" + mBottomRight.y + \")\";\r\n }\r\n\r\n private Quad(PointF topLeft, PointF topRight, PointF bottomLeft, PointF bottomRight) {\r\n mTopLeft = topLeft;\r\n mTopRight = topRight;\r\n mBottomLeft = bottomLeft;\r\n mBottomRight = bottomRight;\r\n }\r\n\r\n private Quad(float x0, float y0, float x1, float y1, float x2, float y2, float x3, float y3) {\r\n mTopLeft = new PointF(x0, y0);\r\n mTopRight = new PointF(x1, y1);\r\n mBottomLeft = new PointF(x2, y2);\r\n mBottomRight = new PointF(x3, y3);\r\n }\r\n\r\n private Quad(float[] points) {\r\n mTopLeft = new PointF(points[0], points[1]);\r\n mTopRight = new PointF(points[2], points[3]);\r\n mBottomLeft = new PointF(points[4], points[5]);\r\n mBottomRight = new PointF(points[6], points[7]);\r\n }\r\n\r\n private static PointF rotatePoint(PointF p, PointF c, float cosa, float sina) {\r\n float x = (p.x - c.x) * cosa - (p.y - c.y) * sina + c.x;\r\n float y = (p.x - c.x) * sina + (p.y - c.y) * cosa + c.y;\r\n return new PointF(x,y);\r\n }\r\n}\r\n\r\n" }, { "alpha_fraction": 0.6160183548927307, "alphanum_fraction": 0.6211507320404053, "avg_line_length": 30.331878662109375, "blob_id": "ee348f4365208a5a3a1a5fb7aaabe89fd4e92b7e", "content_id": "68c25aa41f8138b0813e570699280bcc9d6c5e50", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7404, "license_type": "permissive", "max_line_length": 95, "num_lines": 229, "path": "/tests/OneMedia/src/com/android/onemedia/playback/Renderer.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.onemedia.playback;\r\n\r\nimport android.content.Context;\r\nimport android.media.MediaPlayer;\r\nimport android.os.Bundle;\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.List;\r\n\r\n/**\r\n * TODO: Insert description here. (generated by epastern)\r\n */\r\npublic abstract class Renderer {\r\n public static final String FEATURE_SET_CONTENT = \"com.android.media.SET_CONTENT\";\r\n public static final String FEATURE_SET_NEXT_CONTENT = \"com.android.media.SET_NEXT_CONTENT\";\r\n public static final String FEATURE_PLAY = \"com.android.media.PLAY\";\r\n public static final String FEATURE_PAUSE = \"com.android.media.PAUSE\";\r\n public static final String FEATURE_NEXT = \"com.android.media.NEXT\";\r\n public static final String FEATURE_PREVIOUS = \"com.android.media.PREVIOUS\";\r\n public static final String FEATURE_SEEK_TO = \"com.android.media.SEEK_TO\";\r\n public static final String FEATURE_STOP = \"com.android.media.STOP\";\r\n // TODO move states somewhere else\r\n public static final int STATE_ERROR = 0;\r\n /**\r\n * The state MediaPlayerManager starts in before any action has been\r\n * performed.\r\n */\r\n public static final int STATE_INIT = 1 << 0;\r\n /**\r\n * Indicates the source has been set and it is being prepared/buffered\r\n * before starting playback.\r\n */\r\n public static final int STATE_PREPARING = 1 << 1;\r\n /**\r\n * The media is ready and playback can be started.\r\n */\r\n public static final int STATE_READY = 1 << 2;\r\n /**\r\n * The media is currently playing.\r\n */\r\n public static final int STATE_PLAYING = 1 << 3;\r\n /**\r\n * The media is currently paused.\r\n */\r\n public static final int STATE_PAUSED = 1 << 4;\r\n /**\r\n * The service has been stopped and cannot be started again until a new\r\n * source has been set.\r\n */\r\n public static final int STATE_STOPPED = 1 << 5;\r\n /**\r\n * The playback has reached the end. It can be restarted by calling play().\r\n */\r\n public static final int STATE_ENDED = 1 << 6;\r\n\r\n // TODO decide on proper way of describing features\r\n protected List<String> mFeatures = new ArrayList<String>();\r\n protected List<Listener> mListeners = new ArrayList<Listener>();\r\n\r\n public Renderer(Context context, Bundle params) {\r\n onCreate(params);\r\n initFeatures(params);\r\n }\r\n\r\n abstract public void setContent(Bundle request);\r\n\r\n public void onCreate(Bundle params) {\r\n // Do nothing by default\r\n }\r\n\r\n public void setNextContent(Bundle request) {\r\n throw new UnsupportedOperationException(\"setNextContent() is not supported.\");\r\n }\r\n\r\n public List<String> getFeatures() {\r\n return mFeatures;\r\n }\r\n\r\n public boolean onPlay() {\r\n // TODO consider making these log warnings instead of crashes (or\r\n // Log.wtf)\r\n // throw new UnsupportedOperationException(\"play is not supported.\");\r\n return false;\r\n }\r\n\r\n public boolean onPause() {\r\n // throw new UnsupportedOperationException(\"pause is not supported.\");\r\n return false;\r\n }\r\n\r\n public boolean onNext() {\r\n // throw new UnsupportedOperationException(\"next is not supported.\");\r\n return false;\r\n }\r\n\r\n public boolean onPrevious() {\r\n // throw new\r\n // UnsupportedOperationException(\"previous is not supported.\");\r\n return false;\r\n }\r\n\r\n public boolean onStop() {\r\n // throw new UnsupportedOperationException(\"stop is not supported.\");\r\n return false;\r\n }\r\n\r\n public boolean onSeekTo(int time) {\r\n // throw new UnsupportedOperationException(\"seekTo is not supported.\");\r\n return false;\r\n }\r\n\r\n public long getSeekPosition() {\r\n // throw new\r\n // UnsupportedOperationException(\"getSeekPosition is not supported.\");\r\n return -1;\r\n }\r\n\r\n public long getDuration() {\r\n // throw new\r\n // UnsupportedOperationException(\"getDuration is not supported.\");\r\n return -1;\r\n }\r\n\r\n public int getPlayState() {\r\n // throw new\r\n // UnsupportedOperationException(\"getPlayState is not supported.\");\r\n return 0;\r\n }\r\n\r\n public void onDestroy() {\r\n // Do nothing by default\r\n }\r\n\r\n public void registerListener(Listener listener) {\r\n if (!mListeners.contains(listener)) {\r\n mListeners.add(listener);\r\n }\r\n }\r\n\r\n public void unregisterListener(Listener listener) {\r\n mListeners.remove(listener);\r\n }\r\n\r\n protected void initFeatures(Bundle params) {\r\n mFeatures.add(FEATURE_SET_CONTENT);\r\n }\r\n\r\n protected void pushOnError(int type, int extra, Bundle extras, Throwable error) {\r\n for (Listener listener : mListeners) {\r\n listener.onError(type, extra, extras, error);\r\n }\r\n }\r\n\r\n protected void pushOnStateChanged(int newState) {\r\n for (Listener listener : mListeners) {\r\n listener.onStateChanged(newState);\r\n }\r\n }\r\n\r\n protected void pushOnBufferingUpdate(int percent) {\r\n for (Listener listener : mListeners) {\r\n listener.onBufferingUpdate(percent);\r\n }\r\n }\r\n\r\n protected void pushOnFocusLost() {\r\n for (Listener listener : mListeners) {\r\n listener.onFocusLost();\r\n }\r\n }\r\n\r\n protected void pushOnNextStarted() {\r\n for (Listener listener : mListeners) {\r\n listener.onNextStarted();\r\n }\r\n }\r\n\r\n public interface Listener {\r\n public static final int ERROR_LOAD_FAILED = 1770;\r\n public static final int ERROR_PREPARE_ERROR = 1771;\r\n public static final int ERROR_PLAYBACK_FAILED = 1772;\r\n\r\n /**\r\n * When an error occurs onError will be called but not onStateChanged.\r\n * The Manager will remain in the error state until\r\n * {@link #setContent()} is called again.\r\n */\r\n public void onError(int type, int extra, Bundle extras,\r\n Throwable error);\r\n\r\n /**\r\n * onStateChanged will be called whenever the state of the manager\r\n * transitions except to an error state.\r\n */\r\n public void onStateChanged(int newState);\r\n\r\n /**\r\n * This is a passthrough of\r\n * {@link MediaPlayer.OnBufferingUpdateListener}.\r\n */\r\n public void onBufferingUpdate(int percent);\r\n\r\n /**\r\n * Called when audio focus is lost and it is not transient or ducking.\r\n */\r\n public void onFocusLost();\r\n\r\n /**\r\n * Called when the next item was started playing. Only called if a next\r\n * item has been set and the current item has ended.\r\n */\r\n public void onNextStarted();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6008408665657043, "alphanum_fraction": 0.6019537448883057, "avg_line_length": 36.87980651855469, "blob_id": "ac974a3a49e0d34f75f8d8ff46cdd23c2b17ff34", "content_id": "97ea7f760cdd75b2ce255b3efcb2bd21c7c827ba", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 8087, "license_type": "permissive", "max_line_length": 89, "num_lines": 208, "path": "/media/tests/EffectsTest/src/com/android/effectstest/EffectsTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2009 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.effectstest;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.KeyEvent;\r\nimport android.view.Menu;\r\nimport android.view.View.OnClickListener;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\nimport android.widget.Button;\r\nimport android.widget.TextView;\r\nimport android.widget.ListView;\r\nimport android.widget.BaseAdapter;\r\nimport android.widget.LinearLayout;\r\nimport android.media.audiofx.AudioEffect;\r\n\r\nimport java.util.UUID;\r\n\r\npublic class EffectsTest extends Activity {\r\n\r\n private final static String TAG = \"EffectsTest\";\r\n\r\n\r\n public EffectsTest() {\r\n Log.d(TAG, \"contructor\");\r\n }\r\n\r\n @Override\r\n public void onCreate(Bundle icicle) {\r\n super.onCreate(icicle);\r\n setContentView(R.layout.effectstest);\r\n\r\n Button button = (Button) findViewById(R.id.env_reverb_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, EnvReverbTest.class));\r\n }\r\n });\r\n\r\n button = (Button) findViewById(R.id.preset_reverb_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, PresetReverbTest.class));\r\n }\r\n });\r\n\r\n button = (Button) findViewById(R.id.equalizer_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, EqualizerTest.class));\r\n }\r\n });\r\n\r\n button = (Button) findViewById(R.id.virtualizer_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, VirtualizerTest.class));\r\n }\r\n });\r\n\r\n button = (Button) findViewById(R.id.bassboost_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, BassBoostTest.class));\r\n }\r\n });\r\n\r\n button = (Button) findViewById(R.id.visualizer_actvity);\r\n button.setOnClickListener(new OnClickListener() {\r\n public void onClick(View v) {\r\n startActivity(new Intent(EffectsTest.this, VisualizerTest.class));\r\n }\r\n });\r\n\r\n AudioEffect.Descriptor[] descriptors = AudioEffect.queryEffects();\r\n\r\n ListView list = (ListView) findViewById(R.id.effect_list);\r\n list.setAdapter(new EffectListAdapter(this, descriptors));\r\n\r\n }\r\n\r\n private class EffectListAdapter extends BaseAdapter {\r\n\r\n private Context mContext;\r\n\r\n AudioEffect.Descriptor[] mDescriptors;\r\n\r\n public EffectListAdapter(Context context, AudioEffect.Descriptor[] descriptors) {\r\n Log.d(TAG, \"EffectListAdapter contructor\");\r\n mContext = context;\r\n mDescriptors = descriptors;\r\n for (int i = 0; i < mDescriptors.length; i++) {\r\n Log.d(TAG, \"Effect: \"+i+\" name: \"+ mDescriptors[i].name);\r\n }\r\n }\r\n\r\n public int getCount() {\r\n Log.d(TAG, \"EffectListAdapter getCount(): \"+mDescriptors.length);\r\n return mDescriptors.length;\r\n }\r\n\r\n public Object getItem(int position) {\r\n Log.d(TAG, \"EffectListAdapter getItem() at: \"+position+\" name: \"\r\n +mDescriptors[position].name);\r\n return mDescriptors[position];\r\n }\r\n\r\n public long getItemId(int position) {\r\n return position;\r\n }\r\n\r\n public View getView(int position, View convertView, ViewGroup parent) {\r\n EffectView ev;\r\n if (convertView == null) {\r\n Log.d(TAG, \"getView() new EffectView position: \" + position);\r\n ev = new EffectView(mContext, mDescriptors);\r\n } else {\r\n Log.d(TAG, \"getView() convertView position: \" + position);\r\n ev = new EffectView(mContext, mDescriptors);\r\n //ev = (EffectView) convertView;\r\n }\r\n ev.set(position);\r\n return ev;\r\n }\r\n }\r\n\r\n private class EffectView extends LinearLayout {\r\n private Context mContext;\r\n AudioEffect.Descriptor[] mDescriptors;\r\n\r\n public EffectView(Context context, AudioEffect.Descriptor[] descriptors) {\r\n super(context);\r\n\r\n mContext = context;\r\n mDescriptors = descriptors;\r\n this.setOrientation(VERTICAL);\r\n }\r\n\r\n public String effectUuidToString(UUID effectType) {\r\n if (effectType.equals(AudioEffect.EFFECT_TYPE_VIRTUALIZER)) {\r\n return \"Virtualizer\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_ENV_REVERB)){\r\n return \"Reverb\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_PRESET_REVERB)){\r\n return \"Preset Reverb\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_EQUALIZER)){\r\n return \"Equalizer\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_BASS_BOOST)){\r\n return \"Bass Boost\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_AGC)){\r\n return \"Automatic Gain Control\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_AEC)){\r\n return \"Acoustic Echo Canceler\";\r\n } else if (effectType.equals(AudioEffect.EFFECT_TYPE_NS)){\r\n return \"Noise Suppressor\";\r\n }\r\n\r\n return effectType.toString();\r\n }\r\n\r\n public void set(int position) {\r\n TextView tv = new TextView(mContext);\r\n tv.setText(\"Effect \"+ position);\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n tv = new TextView(mContext);\r\n tv.setText(\" type: \"+ effectUuidToString(mDescriptors[position].type));\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n tv = new TextView(mContext);\r\n tv.setText(\" uuid: \"+ mDescriptors[position].uuid.toString());\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n tv = new TextView(mContext);\r\n tv.setText(\" name: \"+ mDescriptors[position].name);\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n tv = new TextView(mContext);\r\n tv.setText(\" vendor: \"+ mDescriptors[position].implementor);\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n tv = new TextView(mContext);\r\n tv.setText(\" mode: \"+ mDescriptors[position].connectMode);\r\n addView(tv, new LinearLayout.LayoutParams(\r\n LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));\r\n }\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6451612710952759, "alphanum_fraction": 0.6451612710952759, "avg_line_length": 13.5, "blob_id": "d9908f35aeb4c4c5348847a1760c6512189d25d5", "content_id": "c685971ad7ac9c8c97efaff55508a9dcf9d8221c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 62, "license_type": "permissive", "max_line_length": 40, "num_lines": 4, "path": "/media/mca/filterpacks/java/android/filterpacks/performance/package-info.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/**\r\n * @hide\r\n */\r\npackage android.filterpacks.performance;\r\n" }, { "alpha_fraction": 0.619026243686676, "alphanum_fraction": 0.6202247142791748, "avg_line_length": 36.80813980102539, "blob_id": "5eea09dae97b116e67dd32f55a678eca4f095957", "content_id": "54cbbc00ad0c4d76f868c9ce219a6da455c90c9e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6675, "license_type": "permissive", "max_line_length": 96, "num_lines": 172, "path": "/tests/testables/src/android/testing/LayoutInflaterBuilder.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage android.testing;\r\n\r\nimport android.annotation.NonNull;\r\nimport android.content.Context;\r\nimport android.util.ArrayMap;\r\nimport android.util.ArraySet;\r\nimport android.util.AttributeSet;\r\nimport android.util.Log;\r\nimport android.view.LayoutInflater;\r\nimport android.view.View;\r\nimport java.util.Map;\r\nimport java.util.Set;\r\n\r\n/**\r\n * Builder class to create a {@link LayoutInflater} with various properties.\r\n *\r\n * Call any desired configuration methods on the Builder and then use\r\n * {@link Builder#build} to create the LayoutInflater. This is an alternative to directly using\r\n * {@link LayoutInflater#setFilter} and {@link LayoutInflater#setFactory}.\r\n * @hide for use by framework\r\n */\r\npublic class LayoutInflaterBuilder {\r\n private static final String TAG = \"LayoutInflaterBuilder\";\r\n\r\n private Context mFromContext;\r\n private Context mTargetContext;\r\n private Map<String, String> mReplaceMap;\r\n private Set<Class> mDisallowedClasses;\r\n private LayoutInflater mBuiltInflater;\r\n\r\n /**\r\n * Creates a new Builder which will construct a LayoutInflater.\r\n *\r\n * @param fromContext This context's LayoutInflater will be cloned by the Builder using\r\n * {@link LayoutInflater#cloneInContext}. By default, the new LayoutInflater will point at\r\n * this same Context.\r\n */\r\n public LayoutInflaterBuilder(@NonNull Context fromContext) {\r\n mFromContext = fromContext;\r\n mTargetContext = fromContext;\r\n mReplaceMap = null;\r\n mDisallowedClasses = null;\r\n mBuiltInflater = null;\r\n }\r\n\r\n /**\r\n * Instructs the Builder to point the LayoutInflater at a different Context.\r\n *\r\n * @param targetContext Context to be provided to\r\n * {@link LayoutInflater#cloneInContext(Context)}.\r\n * @return Builder object post-modification.\r\n */\r\n public LayoutInflaterBuilder target(@NonNull Context targetContext) {\r\n assertIfAlreadyBuilt();\r\n mTargetContext = targetContext;\r\n return this;\r\n }\r\n\r\n /**\r\n * Instructs the Builder to configure the LayoutInflater such that all instances\r\n * of one {@link View} will be replaced with instances of another during inflation.\r\n *\r\n * @param from Instances of this class will be replaced during inflation.\r\n * @param to Instances of this class will be inflated as replacements.\r\n * @return Builder object post-modification.\r\n */\r\n public LayoutInflaterBuilder replace(@NonNull Class from, @NonNull Class to) {\r\n return replace(from.getName(), to);\r\n }\r\n\r\n /**\r\n * Instructs the Builder to configure the LayoutInflater such that all instances\r\n * of one {@link View} will be replaced with instances of another during inflation.\r\n *\r\n * @param tag Instances of this tag will be replaced during inflation.\r\n * @param to Instances of this class will be inflated as replacements.\r\n * @return Builder object post-modification.\r\n */\r\n public LayoutInflaterBuilder replace(@NonNull String tag, @NonNull Class to) {\r\n assertIfAlreadyBuilt();\r\n if (mReplaceMap == null) {\r\n mReplaceMap = new ArrayMap<String, String>();\r\n }\r\n mReplaceMap.put(tag, to.getName());\r\n return this;\r\n }\r\n\r\n /**\r\n * Instructs the Builder to configure the LayoutInflater such that any attempt to inflate\r\n * a {@link View} of a given type will throw a {@link InflateException}.\r\n *\r\n * @param disallowedClass The Class type that will be disallowed.\r\n * @return Builder object post-modification.\r\n */\r\n public LayoutInflaterBuilder disallow(@NonNull Class disallowedClass) {\r\n assertIfAlreadyBuilt();\r\n if (mDisallowedClasses == null) {\r\n mDisallowedClasses = new ArraySet<Class>();\r\n }\r\n mDisallowedClasses.add(disallowedClass);\r\n return this;\r\n }\r\n\r\n /**\r\n * Builds and returns the LayoutInflater. Afterwards, this Builder can no longer can be\r\n * used, all future calls on the Builder will throw {@link AssertionError}.\r\n */\r\n public LayoutInflater build() {\r\n assertIfAlreadyBuilt();\r\n mBuiltInflater =\r\n LayoutInflater.from(mFromContext).cloneInContext(mTargetContext);\r\n setFactoryIfNeeded(mBuiltInflater);\r\n setFilterIfNeeded(mBuiltInflater);\r\n return mBuiltInflater;\r\n }\r\n\r\n private void assertIfAlreadyBuilt() {\r\n if (mBuiltInflater != null) {\r\n throw new AssertionError(\"Cannot use this Builder after build() has been called.\");\r\n }\r\n }\r\n\r\n private void setFactoryIfNeeded(LayoutInflater inflater) {\r\n if (mReplaceMap == null) {\r\n return;\r\n }\r\n inflater.setFactory(\r\n new LayoutInflater.Factory() {\r\n @Override\r\n public View onCreateView(String name, Context context, AttributeSet attrs) {\r\n String replacingClassName = mReplaceMap.get(name);\r\n if (replacingClassName != null) {\r\n try {\r\n return inflater.createView(replacingClassName, null, attrs);\r\n } catch (ClassNotFoundException e) {\r\n Log.e(TAG, \"Could not replace \" + name\r\n + \" with \" + replacingClassName\r\n + \", Exception: \", e);\r\n }\r\n }\r\n return null;\r\n }\r\n });\r\n }\r\n\r\n private void setFilterIfNeeded(LayoutInflater inflater) {\r\n if (mDisallowedClasses == null) {\r\n return;\r\n }\r\n inflater.setFilter(\r\n new LayoutInflater.Filter() {\r\n @Override\r\n public boolean onLoadClass(Class clazz) {\r\n return !mDisallowedClasses.contains(clazz);\r\n }\r\n });\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6269421577453613, "alphanum_fraction": 0.6328688263893127, "avg_line_length": 36.53703689575195, "blob_id": "43f5f821baaf6c785a5e6c1a04e1d91dae48ce22", "content_id": "6e6e7a4f6b4193c26c76ccc1c9d146e539af1dde", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6243, "license_type": "permissive", "max_line_length": 94, "num_lines": 162, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/ViewLayerInvalidationActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.graphics.Color;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.TextView;\r\n\r\nimport java.util.ArrayList;\r\n\r\npublic class ViewLayerInvalidationActivity extends Activity {\r\n\r\n int currentColor = Color.WHITE;\r\n boolean nestedLayersOn = false;\r\n ArrayList<LinearLayout> linearLayouts = new ArrayList<LinearLayout>();\r\n ArrayList<LinearLayout> topLayouts = new ArrayList<LinearLayout>();\r\n ArrayList<TextView> textViews = new ArrayList<TextView>();\r\n LinearLayout container = null;\r\n boolean randomInvalidates = false;\r\n TextView nestedStatusTV, invalidateStatusTV;\r\n static final String NO_NESTING = \"Nested Layer: NO \";\r\n static final String NESTING = \"Nested Layers: YES \";\r\n static final String NO_INVALIDATING = \"Random Invalidating: NO \";\r\n static final String INVALIDATING = \"Random Invalidating: YES \";\r\n static final int TEXT_COLOR_INTERVAL = 400;\r\n static final int INVALIDATING_INTERVAL = 1000;\r\n static final int NESTING_INTERVAL = 2000;\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.view_layer_invalidation);\r\n\r\n container = findViewById(R.id.container);\r\n final LinearLayout container1 = findViewById(R.id.container1);\r\n final LinearLayout container2 = findViewById(R.id.container2);\r\n final LinearLayout container3 = findViewById(R.id.container3);\r\n nestedStatusTV = findViewById(R.id.nestedStatus);\r\n invalidateStatusTV = findViewById(R.id.invalidateStatus);\r\n final TextView tva = findViewById(R.id.textviewa);\r\n\r\n topLayouts.add(container1);\r\n topLayouts.add(container2);\r\n topLayouts.add(container3);\r\n\r\n collectLinearLayouts(container);\r\n collectTextViews(container);\r\n\r\n nestedStatusTV.setText(NO_NESTING);\r\n invalidateStatusTV.setText(NO_INVALIDATING);\r\n\r\n tva.setLayerType(View.LAYER_TYPE_HARDWARE, null);\r\n container1.setLayerType(View.LAYER_TYPE_HARDWARE, null);\r\n container2.setLayerType(View.LAYER_TYPE_HARDWARE, null);\r\n container3.setLayerType(View.LAYER_TYPE_HARDWARE, null);\r\n\r\n container.postDelayed(textColorSetter, TEXT_COLOR_INTERVAL);\r\n container.postDelayed(nestedLayerSetter, NESTING_INTERVAL);\r\n container.postDelayed(randomInvalidatesSetter, INVALIDATING_INTERVAL);\r\n }\r\n\r\n private Runnable textColorSetter = new Runnable() {\r\n @Override\r\n public void run() {\r\n currentColor = (currentColor == Color.WHITE) ? Color.RED : Color.WHITE;\r\n for (TextView tv : textViews) {\r\n tv.setTextColor(currentColor);\r\n }\r\n if (randomInvalidates) {\r\n randomInvalidator(container);\r\n }\r\n container.postDelayed(textColorSetter, TEXT_COLOR_INTERVAL);\r\n }\r\n };\r\n\r\n private Runnable randomInvalidatesSetter = new Runnable() {\r\n @Override\r\n public void run() {\r\n randomInvalidates = !randomInvalidates;\r\n invalidateStatusTV.setText(randomInvalidates ? INVALIDATING : NO_INVALIDATING);\r\n container.postDelayed(randomInvalidatesSetter, INVALIDATING_INTERVAL);\r\n }\r\n };\r\n\r\n private Runnable nestedLayerSetter = new Runnable() {\r\n @Override\r\n public void run() {\r\n nestedLayersOn = !nestedLayersOn;\r\n nestedStatusTV.setText(nestedLayersOn ? NESTING : NO_NESTING);\r\n for (LinearLayout layout : linearLayouts) {\r\n layout.setLayerType(nestedLayersOn ?\r\n View.LAYER_TYPE_HARDWARE : View.LAYER_TYPE_NONE, null);\r\n }\r\n if (!nestedLayersOn) {\r\n for (LinearLayout layout : topLayouts) {\r\n layout.setLayerType(View.LAYER_TYPE_HARDWARE, null);\r\n }\r\n }\r\n container.postDelayed(nestedLayerSetter, NESTING_INTERVAL);\r\n }\r\n };\r\n\r\n /**\r\n * Invalidates views based on random chance (50%). This is meant to test\r\n * invalidating several items in the hierarchy at the same time, which can cause artifacts\r\n * if our invalidation-propagation logic is not sound.\r\n */\r\n private void randomInvalidator(ViewGroup parent) {\r\n for (int i = 0; i < parent.getChildCount(); ++i) {\r\n View child = parent.getChildAt(i);\r\n if (Math.random() < .5) {\r\n child.invalidate();\r\n }\r\n if (child instanceof ViewGroup) {\r\n randomInvalidator((ViewGroup) child);\r\n }\r\n }\r\n }\r\n\r\n private void collectLinearLayouts(View view) {\r\n if (!(view instanceof LinearLayout)) {\r\n return;\r\n }\r\n LinearLayout parent = (LinearLayout) view;\r\n linearLayouts.add(parent);\r\n for (int i = 0; i < parent.getChildCount(); ++i) {\r\n collectLinearLayouts(parent.getChildAt(i));\r\n }\r\n }\r\n\r\n private void collectTextViews(View view) {\r\n if (view instanceof TextView) {\r\n textViews.add((TextView) view);\r\n return;\r\n }\r\n if (!(view instanceof ViewGroup)) {\r\n return;\r\n }\r\n ViewGroup parent = (ViewGroup) view;\r\n for (int i = 0; i < parent.getChildCount(); ++i) {\r\n collectTextViews(parent.getChildAt(i));\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5721865892410278, "alphanum_fraction": 0.7001990079879761, "avg_line_length": 34.183998107910156, "blob_id": "4837243af572730f6b23124584ce72c1547c25d9", "content_id": "b810f9928c7b4bfd6e0ebd129f538658e6ec8124", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4523, "license_type": "permissive", "max_line_length": 80, "num_lines": 125, "path": "/libs/androidfw/tests/AttributeFinder_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"androidfw/AttributeFinder.h\"\r\n\r\n#include \"android-base/macros.h\"\r\n#include \"gtest/gtest.h\"\r\n\r\nnamespace android {\r\n\r\nclass MockAttributeFinder\r\n : public BackTrackingAttributeFinder<MockAttributeFinder, int> {\r\n public:\r\n MockAttributeFinder(const uint32_t* attrs, int len)\r\n : BackTrackingAttributeFinder(0, len) {\r\n attrs_ = new uint32_t[len];\r\n memcpy(attrs_, attrs, sizeof(*attrs) * len);\r\n }\r\n\r\n ~MockAttributeFinder() { delete attrs_; }\r\n\r\n inline uint32_t GetAttribute(const int index) const { return attrs_[index]; }\r\n\r\n private:\r\n uint32_t* attrs_;\r\n};\r\n\r\nstatic const uint32_t kSortedAttributes[] = {0x01010000, 0x01010001, 0x01010002,\r\n 0x01010004, 0x02010001, 0x02010010,\r\n 0x7f010001};\r\n\r\nstatic const uint32_t kPackageUnsortedAttributes[] = {\r\n 0x02010001, 0x02010010, 0x01010000, 0x01010001,\r\n 0x01010002, 0x01010004, 0x7f010001};\r\n\r\nstatic const uint32_t kSinglePackageAttributes[] = {0x7f010007, 0x7f01000a,\r\n 0x7f01000d, 0x00000000};\r\n\r\nTEST(AttributeFinderTest, IteratesSequentially) {\r\n const int end = arraysize(kSortedAttributes);\r\n MockAttributeFinder finder(kSortedAttributes, end);\r\n\r\n EXPECT_EQ(0, finder.Find(0x01010000));\r\n EXPECT_EQ(1, finder.Find(0x01010001));\r\n EXPECT_EQ(2, finder.Find(0x01010002));\r\n EXPECT_EQ(3, finder.Find(0x01010004));\r\n EXPECT_EQ(4, finder.Find(0x02010001));\r\n EXPECT_EQ(5, finder.Find(0x02010010));\r\n EXPECT_EQ(6, finder.Find(0x7f010001));\r\n EXPECT_EQ(end, finder.Find(0x7f010002));\r\n}\r\n\r\nTEST(AttributeFinderTest, PackagesAreOutOfOrder) {\r\n const int end = arraysize(kSortedAttributes);\r\n MockAttributeFinder finder(kSortedAttributes, end);\r\n\r\n EXPECT_EQ(6, finder.Find(0x7f010001));\r\n EXPECT_EQ(end, finder.Find(0x7f010002));\r\n EXPECT_EQ(4, finder.Find(0x02010001));\r\n EXPECT_EQ(5, finder.Find(0x02010010));\r\n EXPECT_EQ(0, finder.Find(0x01010000));\r\n EXPECT_EQ(1, finder.Find(0x01010001));\r\n EXPECT_EQ(2, finder.Find(0x01010002));\r\n EXPECT_EQ(3, finder.Find(0x01010004));\r\n}\r\n\r\nTEST(AttributeFinderTest, SomeAttributesAreNotFound) {\r\n const int end = arraysize(kSortedAttributes);\r\n MockAttributeFinder finder(kSortedAttributes, end);\r\n\r\n EXPECT_EQ(0, finder.Find(0x01010000));\r\n EXPECT_EQ(1, finder.Find(0x01010001));\r\n EXPECT_EQ(2, finder.Find(0x01010002));\r\n EXPECT_EQ(end, finder.Find(0x01010003));\r\n EXPECT_EQ(3, finder.Find(0x01010004));\r\n EXPECT_EQ(end, finder.Find(0x01010005));\r\n EXPECT_EQ(end, finder.Find(0x01010006));\r\n EXPECT_EQ(4, finder.Find(0x02010001));\r\n EXPECT_EQ(end, finder.Find(0x02010002));\r\n}\r\n\r\nTEST(AttributeFinderTest, FindAttributesInPackageUnsortedAttributeList) {\r\n const int end = arraysize(kPackageUnsortedAttributes);\r\n MockAttributeFinder finder(kPackageUnsortedAttributes, end);\r\n\r\n EXPECT_EQ(2, finder.Find(0x01010000));\r\n EXPECT_EQ(3, finder.Find(0x01010001));\r\n EXPECT_EQ(4, finder.Find(0x01010002));\r\n EXPECT_EQ(end, finder.Find(0x01010003));\r\n EXPECT_EQ(5, finder.Find(0x01010004));\r\n EXPECT_EQ(end, finder.Find(0x01010005));\r\n EXPECT_EQ(end, finder.Find(0x01010006));\r\n EXPECT_EQ(0, finder.Find(0x02010001));\r\n EXPECT_EQ(end, finder.Find(0x02010002));\r\n EXPECT_EQ(1, finder.Find(0x02010010));\r\n EXPECT_EQ(6, finder.Find(0x7f010001));\r\n}\r\n\r\nTEST(AttributeFinderTest, FindAttributesInSinglePackageAttributeList) {\r\n const int end = arraysize(kSinglePackageAttributes);\r\n MockAttributeFinder finder(kSinglePackageAttributes, end);\r\n\r\n EXPECT_EQ(end, finder.Find(0x010100f4));\r\n EXPECT_EQ(end, finder.Find(0x010100f5));\r\n EXPECT_EQ(end, finder.Find(0x010100f6));\r\n EXPECT_EQ(end, finder.Find(0x010100f7));\r\n EXPECT_EQ(end, finder.Find(0x010100f8));\r\n EXPECT_EQ(end, finder.Find(0x010100fa));\r\n EXPECT_EQ(0, finder.Find(0x7f010007));\r\n}\r\n\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.6463714838027954, "alphanum_fraction": 0.6549815535545349, "avg_line_length": 24.655736923217773, "blob_id": "f7785e18b59301a9bfd3c86b1782be527acd7381", "content_id": "29bbae0f66f00f039c18d0047fcdd3d3ca856511", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1626, "license_type": "permissive", "max_line_length": 80, "num_lines": 61, "path": "/media/mca/filterfw/native/core/native_frame.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"core/native_frame.h\"\r\n\r\nnamespace android {\r\nnamespace filterfw {\r\n\r\nNativeFrame::NativeFrame(int size) : data_(NULL), size_(size), capacity_(size) {\r\n data_ = capacity_ == 0 ? NULL : new uint8_t[capacity_];\r\n}\r\n\r\nNativeFrame::~NativeFrame() {\r\n delete[] data_;\r\n}\r\n\r\nbool NativeFrame::WriteData(const uint8_t* data, int offset, int size) {\r\n if (size_ >= (offset + size)) {\r\n memcpy(data_ + offset, data, size);\r\n return true;\r\n }\r\n return false;\r\n}\r\n\r\nbool NativeFrame::SetData(uint8_t* data, int size) {\r\n delete[] data_;\r\n size_ = capacity_ = size;\r\n data_ = data;\r\n return true;\r\n}\r\n\r\nNativeFrame* NativeFrame::Clone() const {\r\n NativeFrame* result = new NativeFrame(size_);\r\n if (data_)\r\n result->WriteData(data_, 0, size_);\r\n return result;\r\n}\r\n\r\nbool NativeFrame::Resize(int newSize) {\r\n if (newSize <= capacity_ && newSize >= 0) {\r\n size_ = newSize;\r\n return true;\r\n }\r\n return false;\r\n}\r\n\r\n} // namespace filterfw\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.5390153527259827, "alphanum_fraction": 0.5408732295036316, "avg_line_length": 41.060001373291016, "blob_id": "6e82d2809e14f05e3ea79269b85fac97c590428c", "content_id": "b07b51e39f6a894bc3c644837a5f7993ff507527", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 4306, "license_type": "permissive", "max_line_length": 87, "num_lines": 100, "path": "/media/mca/filterfw/jni/jni_native_program.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_NATIVE_PROGRAM_H\r\n#define ANDROID_FILTERFW_JNI_NATIVE_PROGRAM_H\r\n\r\n#include <jni.h>\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_allocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_deallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_nativeInit(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_openNativeLibrary(JNIEnv* env,\r\n jobject thiz,\r\n jstring lib_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindInitFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindSetValueFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindGetValueFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindProcessFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindResetFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_bindTeardownFunction(JNIEnv* env,\r\n jobject thiz,\r\n jstring func_name);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeInit(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeSetValue(JNIEnv* env,\r\n jobject thiz,\r\n jstring key,\r\n jstring value);\r\n\r\nJNIEXPORT jstring JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeGetValue(JNIEnv* env,\r\n jobject thiz,\r\n jstring key);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeProcess(JNIEnv* env,\r\n jobject thiz,\r\n jobjectArray inputs,\r\n jobject output);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeReset(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_NativeProgram_callNativeTeardown(JNIEnv* env, jobject thiz);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // ANDROID_FILTERFW_JNI_NATIVE_PROGRAM_H\r\n" }, { "alpha_fraction": 0.725978672504425, "alphanum_fraction": 0.725978672504425, "avg_line_length": 26.100000381469727, "blob_id": "a4dfe5cbfdd6952e8e6d2834d4634d29bfc66d8a", "content_id": "816928884d665a4c5ce7c633d526868a2d7cc863", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 281, "license_type": "permissive", "max_line_length": 102, "num_lines": 10, "path": "/rs/java/android/renderscript/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<HTML>\r\n<BODY>\r\n<p>RenderScript provides support for high-performance computation across heterogeneous processors.</p>\r\n\r\n<p>For more information, see the\r\n<a href=\"{@docRoot}guide/topics/renderscript/index.html\">RenderScript</a> developer guide.</p>\r\n{@more}\r\n\r\n</BODY>\r\n</HTML>\r\n" }, { "alpha_fraction": 0.6397163271903992, "alphanum_fraction": 0.6397163271903992, "avg_line_length": 18.14285659790039, "blob_id": "3d204e5fd99748fb499b42abe80e368d61e850d4", "content_id": "56f768097a24b5a39d537335ab281148b93042d3", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 705, "license_type": "permissive", "max_line_length": 57, "num_lines": 35, "path": "/rs/jni/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "LOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_SRC_FILES:= \\\r\n android_renderscript_RenderScript.cpp\r\n\r\nLOCAL_SHARED_LIBRARIES := \\\r\n libandroid \\\r\n libandroid_runtime \\\r\n libandroidfw \\\r\n libnativehelper \\\r\n libRS \\\r\n libcutils \\\r\n libhwui \\\r\n liblog \\\r\n libutils \\\r\n libui \\\r\n libgui \\\r\n libjnigraphics\r\n\r\nLOCAL_HEADER_LIBRARIES := \\\r\n libbase_headers\r\n\r\nLOCAL_C_INCLUDES += \\\r\n $(JNI_H_INCLUDE) \\\r\n frameworks/rs\r\n\r\nLOCAL_CFLAGS += -Wno-unused-parameter\r\nLOCAL_CFLAGS += -Wall -Werror -Wunused -Wunreachable-code\r\n\r\nLOCAL_MODULE:= librs_jni\r\nLOCAL_MODULE_TAGS := optional\r\nLOCAL_REQUIRED_MODULES := libRS\r\n\r\ninclude $(BUILD_SHARED_LIBRARY)\r\n" }, { "alpha_fraction": 0.7614457607269287, "alphanum_fraction": 0.7638553977012634, "avg_line_length": 23.9375, "blob_id": "d432eab4834ffd8c72d4fd6cbe2623a3d8d33fec", "content_id": "08ffc6dd12e73227ef09ef9f6864e9cebb4e30db", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 415, "license_type": "permissive", "max_line_length": 72, "num_lines": 16, "path": "/core/tests/hosttests/test-apps/MultiDexLegacyTestAppTests2/src/com/android/multidexlegacytestapp/test2/MultiDexAndroidJUnitRunner.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.multidexlegacytestapp.test2;\r\n\r\nimport android.os.Bundle;\r\n\r\nimport androidx.multidex.MultiDex;\r\nimport androidx.test.runner.AndroidJUnitRunner;\r\n\r\npublic class MultiDexAndroidJUnitRunner extends AndroidJUnitRunner {\r\n\r\n @Override\r\n public void onCreate(Bundle arguments) {\r\n MultiDex.installInstrumentation(getContext(), getTargetContext());\r\n super.onCreate(arguments);\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.8516483306884766, "alphanum_fraction": 0.8516483306884766, "avg_line_length": 76, "blob_id": "a5ad19223013c67f444025738c5613f6d373bade", "content_id": "ca4ac82cfa75d20cd0b939e59a6c4689e18fe164", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 546, "license_type": "permissive", "max_line_length": 88, "num_lines": 7, "path": "/wifi/tests/assets/pps/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "PerProviderSubscription.xml - valid PPS XML file\r\nPerProviderSubscription_DuplicateHomeSP.xml - containing multiple HomeSP node\r\nPerProviderSubscription_DuplicateValue.xml - FriendlyName node contains multiple Value\r\nPerProviderSubscription_MissingValue.xml - FriendlyName node is missing Value\r\nPerProviderSubscription_MissingName.xml - HomeSP node is missing NodeName\r\nPerProviderSubscription_InvalidNode.xml - FQDN node contains both Value and a child node\r\nPerProviderSubscription_InvalidName.xml - FriendlyName node have a typo in its name\r\n" }, { "alpha_fraction": 0.6638611555099487, "alphanum_fraction": 0.6685954928398132, "avg_line_length": 32.490909576416016, "blob_id": "b56b233adb19e6fc5b80e3ba19009a0f9bc14dd4", "content_id": "72cdcf9c7ae58506e3ef0e213689c0a77d2ced40", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1901, "license_type": "permissive", "max_line_length": 102, "num_lines": 55, "path": "/tests/appwidgets/AppWidgetHostTest/src/com/android/tests/appwidgethost/TestAppWidgetConfigure.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2008 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.tests.appwidgethost;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Intent;\r\nimport android.content.SharedPreferences;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\nimport android.widget.EditText;\r\n\r\npublic class TestAppWidgetConfigure extends Activity {\r\n static final String TAG = \"TestAppWidgetConfigure\";\r\n\r\n public TestAppWidgetConfigure() {\r\n super();\r\n }\r\n\r\n @Override\r\n public void onCreate(Bundle icicle) {\r\n super.onCreate(icicle);\r\n setContentView(R.layout.test_appwidget_configure);\r\n\r\n findViewById(R.id.save_button).setOnClickListener(mOnClickListener);\r\n }\r\n\r\n View.OnClickListener mOnClickListener = new View.OnClickListener() {\r\n public void onClick(View v) {\r\n String text = ((EditText)findViewById(R.id.edit_text)).getText().toString();\r\n Log.d(TAG, \"text is '\" + text + '\\'');\r\n SharedPreferences.Editor prefs = getSharedPreferences(TestAppWidgetProvider.PREFS_NAME, 0)\r\n .edit();\r\n prefs.putString(TestAppWidgetProvider.PREF_PREFIX_KEY, text);\r\n prefs.commit();\r\n setResult(RESULT_OK);\r\n finish();\r\n }\r\n };\r\n\r\n}\r\n\r\n\r\n" }, { "alpha_fraction": 0.759235680103302, "alphanum_fraction": 0.759235680103302, "avg_line_length": 50.33333206176758, "blob_id": "96d8a69dd131be6d89b688fef43a1d8a85f1069c", "content_id": "8df72f9f4833fe69b48c861587593b0247d19be5", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 785, "license_type": "permissive", "max_line_length": 251, "num_lines": 15, "path": "/packages/SystemUI/plugin/update_plugin_lib.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\ncd $ANDROID_BUILD_TOP/frameworks/base/packages/SystemUI/plugin\r\n# Clear out anything old.\r\nrm -rf /tmp/plugin_classes/\r\nmkdir /tmp/plugin_classes\r\n\r\n# Compile the jar\r\njavac -cp $ANDROID_BUILD_TOP/out/target/common/obj/JAVA_LIBRARIES/framework_intermediates/classes.jar:$ANDROID_BUILD_TOP/out/target/common/obj/JAVA_LIBRARIES/core-all_intermediates/classes.jar `find ../plugin*/src -name *.java` -d /tmp/plugin_classes/\r\necho \"\" >> /tmp/plugin_classes/manifest.txt\r\njar cvfm SystemUIPluginLib.jar /tmp/plugin_classes/manifest.txt -C /tmp/plugin_classes .\r\n\r\n# Place the jar and update the latest\r\nmv SystemUIPluginLib.jar ./SystemUIPluginLib-`date +%m-%d-%Y`.jar\r\nrm SystemUIPluginLib-latest.jar\r\nln -s SystemUIPluginLib-`date +%m-%d-%Y`.jar SystemUIPluginLib-latest.jar\r\n" }, { "alpha_fraction": 0.6270592212677002, "alphanum_fraction": 0.6298633217811584, "avg_line_length": 32.349395751953125, "blob_id": "a4284c7f9b6154ab38ab6cbbd6b1bec53a37f681", "content_id": "f027b90ee78f85127092610552728eb685d4e9ec", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2853, "license_type": "permissive", "max_line_length": 97, "num_lines": 83, "path": "/media/mca/filterfw/jni/jni_native_buffer.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"jni/jni_native_buffer.h\"\r\n#include \"jni/jni_util.h\"\r\n\r\nchar* GetJBufferData(JNIEnv* env, jobject buffer, int* size) {\r\n jclass base_class = env->FindClass(\"android/filterfw/core/NativeBuffer\");\r\n\r\n // Get fields\r\n jfieldID ptr_field = env->GetFieldID(base_class, \"mDataPointer\", \"J\");\r\n jfieldID size_field = env->GetFieldID(base_class, \"mSize\", \"I\");\r\n\r\n // Get their values\r\n char* data = reinterpret_cast<char*>(env->GetLongField(buffer, ptr_field));\r\n if (size) {\r\n *size = env->GetIntField(buffer, size_field);\r\n }\r\n\r\n // Clean-up\r\n env->DeleteLocalRef(base_class);\r\n\r\n return data;\r\n}\r\n\r\nbool AttachDataToJBuffer(JNIEnv* env, jobject buffer, char* data, int size) {\r\n jclass base_class = env->FindClass(\"android/filterfw/core/NativeBuffer\");\r\n\r\n // Get fields\r\n jfieldID ptr_field = env->GetFieldID(base_class, \"mDataPointer\", \"J\");\r\n jfieldID size_field = env->GetFieldID(base_class, \"mSize\", \"I\");\r\n\r\n // Set their values\r\n env->SetLongField(buffer, ptr_field, reinterpret_cast<jlong>(data));\r\n env->SetIntField(buffer, size_field, size);\r\n\r\n return true;\r\n}\r\n\r\njboolean Java_android_filterfw_core_NativeBuffer_allocate(JNIEnv* env, jobject thiz, jint size) {\r\n char* data = new char[size];\r\n return ToJBool(AttachDataToJBuffer(env, thiz, data, size));\r\n}\r\n\r\njboolean Java_android_filterfw_core_NativeBuffer_deallocate(JNIEnv* env,\r\n jobject thiz,\r\n jboolean owns_data) {\r\n if (ToCppBool(owns_data)) {\r\n char* data = GetJBufferData(env, thiz, NULL);\r\n delete[] data;\r\n }\r\n return JNI_TRUE;\r\n}\r\n\r\njboolean Java_android_filterfw_core_NativeBuffer_nativeCopyTo(JNIEnv* env,\r\n jobject thiz,\r\n jobject new_buffer) {\r\n // Get source buffer\r\n int size;\r\n char* source_data = GetJBufferData(env, thiz, &size);\r\n\r\n // Make copy\r\n char* target_data = new char[size];\r\n memcpy(target_data, source_data, size);\r\n\r\n // Attach it to new buffer\r\n AttachDataToJBuffer(env, new_buffer, target_data, size);\r\n\r\n return JNI_TRUE;\r\n}\r\n\r\n" }, { "alpha_fraction": 0.7341772317886353, "alphanum_fraction": 0.7408786416053772, "avg_line_length": 35.30555725097656, "blob_id": "c63370562f27db82df6f122a6466f466db176f71", "content_id": "c959289ef02732094792d3055006f2ee121fa471", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1343, "license_type": "permissive", "max_line_length": 92, "num_lines": 36, "path": "/packages/SystemUI/plugin/src/com/android/systemui/plugins/qs/QSFactory.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.plugins.qs;\r\n\r\nimport com.android.systemui.plugins.Plugin;\r\nimport com.android.systemui.plugins.annotations.DependsOn;\r\nimport com.android.systemui.plugins.annotations.ProvidesInterface;\r\n\r\n/**\r\n * Plugin that has the ability to create or override any part of\r\n * QS tiles.\r\n */\r\n@ProvidesInterface(action = QSFactory.ACTION, version = QSFactory.VERSION)\r\n@DependsOn(target = QSTile.class)\r\n@DependsOn(target = QSTileView.class)\r\npublic interface QSFactory extends Plugin {\r\n\r\n String ACTION = \"com.android.systemui.action.PLUGIN_QS_FACTORY\";\r\n int VERSION = 1;\r\n\r\n QSTile createTile(String tileSpec);\r\n QSTileView createTileView(QSTile tile, boolean collapsedView);\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7071583271026611, "alphanum_fraction": 0.7071583271026611, "avg_line_length": 26.8125, "blob_id": "d0d37e0cfd9aaea21639bb83304f1ebbd7694573", "content_id": "e0128caff8ca9fb03206722cd0fc09cc2407a96b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 461, "license_type": "permissive", "max_line_length": 87, "num_lines": 16, "path": "/tests/LockTaskTests/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "LOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_MODULE_TAGS := optional\r\nLOCAL_MODULE_PATH := $(PRODUCT_OUT)/system/priv-app\r\n\r\nLOCAL_PACKAGE_NAME := LockTaskTests\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_CERTIFICATE := platform\r\n\r\nLOCAL_SRC_FILES := $(call all-Iaidl-files-under, src) $(call all-java-files-under, src)\r\n\r\ninclude $(BUILD_PACKAGE)\r\n\r\n# Use the following include to make our test apk.\r\ninclude $(call all-makefiles-under,$(LOCAL_PATH))\r\n" }, { "alpha_fraction": 0.7790055274963379, "alphanum_fraction": 0.7920643091201782, "avg_line_length": 54.88571548461914, "blob_id": "df77378d9bf99e3ebe070d4ccaaca571469a0305", "content_id": "e9323d84299fc99ae23c7641d575a7530617e386", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1991, "license_type": "permissive", "max_line_length": 111, "num_lines": 35, "path": "/wifi/java/android/net/wifi/migration_samples/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This folder contains sample files for each of the 4 XML Wi-Fi config store files in Android 11 AOSP.\r\nOEMs can use these files as reference for converting their previous customized\r\nformats into the AOSP format. The conversion logic needs to be written in\r\nWifiMigration.java class, i.e each OEM needs to modify\r\nWifiMigration.convertAndRetrieveSharedConfigStoreFile() and the\r\nWifiMigration.convertAndRetrieveUserConfigStoreFile() methods.\r\n\r\nThe 4 files are:\r\n\r\nShared files\r\n============\r\n1) WifiConfigStore.xml - General storage for shared configurations. Includes\r\nuser's saved Wi-Fi networks.\r\nAOSP Path in Android 10: /data/misc/wifi/WifiConfigStore.xml\r\nAOSP Path in Android 11: /data/misc/apexdata/com.android/wifi/WifiConfigStore.xml\r\nSample File (in this folder): Shared_WifiConfigStore.xml\r\n\r\n2) WifiConfigStoreSoftAp.xml - Storage for user's softap/tethering configuration.\r\nAOSP Path in Android 10: /data/misc/wifi/softap.conf.\r\nNote: Was key/value format in Android 10. Conversion to XML done in SoftApConfToXmlMigrationUtil.java.\r\nAOSP Path in Android 11: /data/misc/apexdata/com.android/wifi/WifiConfigStore.xml\r\nSample File (in this folder): Shared_WifiConfigStoreSoftAp.xml\r\n\r\nUser specific files\r\n==================\r\n3) WifiConfigStore.xml - General storage for user specific configurations. Includes\r\nuser's saved passpoint networks, Wi-Fi network request approvals, etc.\r\nAOSP Path in Android 10: /data/misc_ce/<userId>/wifi/WifiConfigStore.xml\r\nAOSP Path in Android 11: /data/misc_ce/<userId>/apexdata/com.android/wifi/WifiConfigStore.xml\r\nSample File (in this folder): User_WifiConfigStore.xml\r\n\r\n4) WifiConfigStoreNetworkSuggestions.xml - Storage for app installed network suggestions.\r\nAOSP Path in Android 10: /data/misc_ce/<userId>/wifi/WifiConfigStoreNetworkSuggestions.xml\r\nAOSP Path in Android 11: /data/misc_ce/<userId>/apexdata/com.android/wifi/WifiConfigStoreNetworkSuggestions.xml\r\nSample File (in this folder): User_WifiConfigStoreNetworkSuggestions.xml\r\n" }, { "alpha_fraction": 0.717587947845459, "alphanum_fraction": 0.7221105694770813, "avg_line_length": 35.54716873168945, "blob_id": "6c8256165330fbfe09528561413ac8ffee48865e", "content_id": "2e1d9d47e0ee6b06e331ba995b1e7bc6f94dbc4d", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1990, "license_type": "permissive", "max_line_length": 94, "num_lines": 53, "path": "/packages/SystemUI/plugin/src/com/android/systemui/plugins/qs/QSTileView.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.plugins.qs;\r\n\r\nimport android.content.Context;\r\nimport android.view.View;\r\nimport android.widget.LinearLayout;\r\n\r\nimport com.android.systemui.plugins.annotations.DependsOn;\r\nimport com.android.systemui.plugins.annotations.ProvidesInterface;\r\nimport com.android.systemui.plugins.qs.QSTile.State;\r\n\r\n@ProvidesInterface(version = QSTileView.VERSION)\r\n@DependsOn(target = QSIconView.class)\r\n@DependsOn(target = QSTile.class)\r\npublic abstract class QSTileView extends LinearLayout {\r\n public static final int VERSION = 2;\r\n\r\n public QSTileView(Context context) {\r\n super(context);\r\n }\r\n\r\n public abstract View updateAccessibilityOrder(View previousView);\r\n\r\n /**\r\n * Returns a {@link QSIconView} containing only the icon for this tile. Use\r\n * {@link #getIconWithBackground()} to retrieve the entire tile (background & peripherals\r\n * included).\r\n */\r\n public abstract QSIconView getIcon();\r\n\r\n /**\r\n * Returns a {@link View} containing the icon for this tile along with the accompanying\r\n * background circle/peripherals. To retrieve only the inner icon, use {@link #getIcon()}.\r\n */\r\n public abstract View getIconWithBackground();\r\n public abstract void init(QSTile tile);\r\n public abstract void onStateChanged(State state);\r\n\r\n public abstract int getDetailY();\r\n}\r\n" }, { "alpha_fraction": 0.5068368315696716, "alphanum_fraction": 0.6896080374717712, "avg_line_length": 32.79365158081055, "blob_id": "a4f588b1f91a3399116c4249bc2144207e037b20", "content_id": "7f1269e29cb0199a004e8a8cd1a00797d63279f2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2204, "license_type": "permissive", "max_line_length": 79, "num_lines": 63, "path": "/libs/hwui/tests/scripts/prep_buller.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#buller is bullhead & angler (☞゚ヮ゚)☞\r\n\r\nnr=$(adb shell cat /proc/cpuinfo | grep processor | wc -l)\r\ncpubase=/sys/devices/system/cpu\r\ngov=cpufreq/scaling_governor\r\n\r\nadb root\r\nadb wait-for-device\r\nadb shell stop thermal-engine\r\nadb shell stop perfd\r\n\r\n# LITTLE cores\r\n# 384000 460800 600000 672000 787200 864000 960000 1248000 1440000\r\n# BIG cores\r\n# 384000 480000 633600 768000 864000 960000 1248000 1344000 1440000\r\n# 1536000 1632000 1689600 1824000\r\n\r\ncpu=0\r\nS=960000\r\nwhile [ $((cpu < 4)) -eq 1 ]; do\r\n echo \"Setting cpu $cpu to $S hz\"\r\n adb shell \"echo 1 > $cpubase/cpu${cpu}/online\"\r\n adb shell \"echo userspace > $cpubase/cpu${cpu}/$gov\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_max_freq\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_min_freq\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_setspeed\"\r\n cpu=$(($cpu + 1))\r\ndone\r\n\r\nwhile [ $((cpu < $nr)) -eq 1 ]; do\r\n echo \"disable cpu $cpu\"\r\n adb shell \"echo 0 > $cpubase/cpu${cpu}/online\"\r\n cpu=$(($cpu + 1))\r\ndone\r\n\r\necho \"setting GPU bus and idle timer\"\r\nadb shell \"echo 0 > /sys/class/kgsl/kgsl-3d0/bus_split\"\r\nadb shell \"echo 1 > /sys/class/kgsl/kgsl-3d0/force_clk_on\"\r\nadb shell \"echo 10000 > /sys/class/kgsl/kgsl-3d0/idle_timer\"\r\n\r\n# angler: 0 762 1144 1525 2288 3509 4173 5271 5928 7904 9887 11863\r\nadb shell \"echo 11863 > /sys/class/devfreq/qcom,gpubw.70/min_freq\" &> /dev/null\r\n# bullhead: 0 762 1144 1525 2288 3509 4173 5271 5928 7102\r\nadb shell \"echo 7102 > /sys/class/devfreq/qcom,gpubw.19/min_freq\" &> /dev/null\r\n\r\n\r\nboard=$(adb shell \"getprop ro.product.board\")\r\nfreq=0\r\nif [ \"$board\" = \"bullhead\" ]\r\nthen\r\n #600000000 490000000 450000000 367000000 300000000 180000000\r\n freq=300000000\r\nelse\r\n #600000000 510000000 450000000 390000000 305000000 180000000\r\n freq=305000000\r\nfi\r\necho \"performance mode, $freq Hz\"\r\nadb shell \"echo performance > /sys/class/kgsl/kgsl-3d0/devfreq/governor\"\r\nadb shell \"echo $freq > /sys/class/kgsl/kgsl-3d0/devfreq/min_freq\"\r\nadb shell \"echo $freq > /sys/class/kgsl/kgsl-3d0/devfreq/max_freq\"\r\n\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/min_pwrlevel\"\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/max_pwrlevel\"\r\n\r\n" }, { "alpha_fraction": 0.774954617023468, "alphanum_fraction": 0.774954617023468, "avg_line_length": 43.91666793823242, "blob_id": "35f718f82be4a652433932f5a01f4865757de460", "content_id": "54f18a48549a63977f42b295e0c6ee771823941e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 551, "license_type": "permissive", "max_line_length": 80, "num_lines": 12, "path": "/core/java/android/view/inputmethod/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<html>\r\n<body>\r\nFramework classes for interaction between views and input methods (such\r\nas soft keyboards). See {@link android.view.inputmethod.InputMethodManager} for\r\nan overview. In most cases the main classes here are not needed for\r\nmost applications, since they are dealt with for you by\r\n{@link android.widget.TextView}. When implementing a custom text editor,\r\nhowever, you will need to implement the\r\n{@link android.view.inputmethod.InputConnection} class to allow the current\r\ninput method to interact with your view.\r\n</body>\r\n</html>\r\n" }, { "alpha_fraction": 0.6154218316078186, "alphanum_fraction": 0.6417694091796875, "avg_line_length": 37.400001525878906, "blob_id": "02c3fd04d3edb1c5cc4e7594244f1e85904251a2", "content_id": "4982223d59e1fca26924a3b4570412e3c1a7761a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4137, "license_type": "permissive", "max_line_length": 101, "num_lines": 105, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/BitmapsActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.graphics.PorterDuff;\r\nimport android.graphics.PorterDuffXfermode;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.Gravity;\r\nimport android.view.View;\r\nimport android.view.animation.Animation;\r\nimport android.view.animation.ScaleAnimation;\r\nimport android.widget.FrameLayout;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class BitmapsActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final BitmapsView view = new BitmapsView(this);\r\n final FrameLayout layout = new FrameLayout(this);\r\n layout.addView(view, new FrameLayout.LayoutParams(480, 800, Gravity.CENTER));\r\n setContentView(layout);\r\n \r\n ScaleAnimation a = new ScaleAnimation(1.0f, 2.0f, 1.0f, 2.0f,\r\n ScaleAnimation.RELATIVE_TO_SELF, 0.5f,\r\n ScaleAnimation.RELATIVE_TO_SELF,0.5f);\r\n a.setDuration(2000);\r\n a.setRepeatCount(Animation.INFINITE);\r\n a.setRepeatMode(Animation.REVERSE);\r\n view.startAnimation(a);\r\n }\r\n\r\n static class BitmapsView extends View {\r\n private Paint mBitmapPaint;\r\n private final Bitmap mBitmap1;\r\n private final Bitmap mBitmap2;\r\n private final PorterDuffXfermode mDstIn;\r\n\r\n BitmapsView(Context c) {\r\n super(c);\r\n\r\n mBitmap1 = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset1);\r\n mBitmap2 = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset2);\r\n \r\n Log.d(\"Bitmap\", \"mBitmap1.isMutable() = \" + mBitmap1.isMutable());\r\n Log.d(\"Bitmap\", \"mBitmap2.isMutable() = \" + mBitmap2.isMutable());\r\n\r\n BitmapFactory.Options opts = new BitmapFactory.Options();\r\n opts.inMutable = true;\r\n Bitmap bitmap = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset1, opts);\r\n Log.d(\"Bitmap\", \"bitmap.isMutable() = \" + bitmap.isMutable());\r\n \r\n mBitmapPaint = new Paint();\r\n mDstIn = new PorterDuffXfermode(PorterDuff.Mode.DST_IN);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n canvas.translate(120.0f, 50.0f);\r\n canvas.drawBitmap(mBitmap1, 0.0f, 0.0f, mBitmapPaint);\r\n\r\n canvas.translate(0.0f, mBitmap1.getHeight());\r\n canvas.translate(0.0f, 25.0f);\r\n canvas.drawBitmap(mBitmap2, 0.0f, 0.0f, null);\r\n \r\n mBitmapPaint.setAlpha(127);\r\n canvas.translate(0.0f, mBitmap2.getHeight());\r\n canvas.translate(0.0f, 25.0f);\r\n canvas.drawBitmap(mBitmap1, 0.0f, 0.0f, mBitmapPaint);\r\n \r\n mBitmapPaint.setAlpha(255);\r\n canvas.translate(0.0f, mBitmap1.getHeight());\r\n canvas.translate(0.0f, 25.0f);\r\n mBitmapPaint.setColor(0xffff0000);\r\n canvas.drawRect(0.0f, 0.0f, mBitmap2.getWidth(), mBitmap2.getHeight(), mBitmapPaint);\r\n mBitmapPaint.setXfermode(mDstIn);\r\n canvas.drawBitmap(mBitmap2, 0.0f, 0.0f, mBitmapPaint);\r\n\r\n mBitmapPaint.reset();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5769230723381042, "alphanum_fraction": 0.5769230723381042, "avg_line_length": 11, "blob_id": "3b87cb30ed84a23ec2d84f181a288e2316de6078", "content_id": "1ad3b8c860c9ecd5b8eef976ec2c1306b23c4ccc", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 52, "license_type": "permissive", "max_line_length": 30, "num_lines": 4, "path": "/media/mca/filterfw/java/android/filterfw/core/package-info.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/**\r\n * @hide\r\n */\r\npackage android.filterfw.core;\r\n" }, { "alpha_fraction": 0.6926002502441406, "alphanum_fraction": 0.7029737234115601, "avg_line_length": 39.31428527832031, "blob_id": "57e33dfb6e1337bdabf7b75e7547c1c7913927d4", "content_id": "f657a4973d93fafdc61e61d92b90434d6b9ba606", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2892, "license_type": "permissive", "max_line_length": 100, "num_lines": 70, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/samples/simplecamera/IfElseFilter.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw.samples.simplecamera;\r\n\r\nimport android.util.Log;\r\nimport androidx.media.filterfw.Filter;\r\nimport androidx.media.filterfw.FrameBuffer2D;\r\nimport androidx.media.filterfw.FrameImage2D;\r\nimport androidx.media.filterfw.FrameType;\r\nimport androidx.media.filterfw.FrameValue;\r\nimport androidx.media.filterfw.MffContext;\r\nimport androidx.media.filterfw.OutputPort;\r\nimport androidx.media.filterfw.Signature;\r\n\r\n\r\npublic class IfElseFilter extends Filter {\r\n\r\n private static final String TAG = \"IfElseFilter\";\r\n private static boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);\r\n\r\n public IfElseFilter(MffContext context, String name) {\r\n super(context, name);\r\n }\r\n\r\n @Override\r\n public Signature getSignature() {\r\n FrameType imageIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);\r\n FrameType videoIn = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.READ_GPU);\r\n FrameType imageOut = FrameType.image2D(FrameType.ELEMENT_RGBA8888, FrameType.WRITE_GPU);\r\n\r\n return new Signature().addInputPort(\"falseResult\", Signature.PORT_REQUIRED, imageIn)\r\n .addInputPort(\"trueResult\", Signature.PORT_REQUIRED, videoIn)\r\n .addInputPort(\"condition\", Signature.PORT_REQUIRED, FrameType.single(boolean.class))\r\n .addOutputPort(\"output\", Signature.PORT_REQUIRED, imageOut)\r\n .disallowOtherPorts();\r\n }\r\n\r\n @Override\r\n protected void onProcess() {\r\n OutputPort outPort = getConnectedOutputPort(\"output\");\r\n FrameImage2D trueFrame = getConnectedInputPort(\"trueResult\").pullFrame().asFrameImage2D();\r\n FrameImage2D falseFrame = getConnectedInputPort(\"falseResult\").pullFrame().asFrameImage2D();\r\n FrameValue boolFrameValue = getConnectedInputPort(\"condition\").pullFrame().asFrameValue();\r\n boolean condition = (Boolean) boolFrameValue.getValue();\r\n FrameBuffer2D outputFrame;\r\n // If the condition is true, then we want to use the camera, else use the gallery\r\n if (condition) {\r\n outputFrame = trueFrame;\r\n } else {\r\n outputFrame = falseFrame;\r\n }\r\n outPort.pushFrame(outputFrame);\r\n\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.758266806602478, "alphanum_fraction": 0.758266806602478, "avg_line_length": 41.849998474121094, "blob_id": "114b5510e1b1a54f26f8c89d68b6a677f6400163", "content_id": "59e9c338c780e949607c3408e889ad1810cdf0b3", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 877, "license_type": "permissive", "max_line_length": 139, "num_lines": 20, "path": "/location/java/android/location/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<html>\r\n<body>\r\n\r\n<p>Contains the framework API classes that define Android location-based and\r\n\trelated services.</p>\r\n<p class=\"warning\">\r\n<strong>This API is not the recommended method for accessing Android location.</strong><br>\r\nThe\r\n<a href=\"https://developers.google.com/android/reference/com/google/android/gms/location/package-summary\">Google Location Services API</a>,\r\npart of Google Play services, is the preferred way to add location-awareness to\r\nyour app. It offers a simpler API, higher accuracy, low-power geofencing, and\r\nmore. If you are currently using the android.location API, you are strongly\r\nencouraged to switch to the Google Location Services API as soon as\r\npossible.\r\n<br><br>\r\nTo learn more about the Google Location Services API, see the\r\n<a href=\"{@docRoot}google/play-services/location.html\">Location API overview</a>.\r\n</p>\r\n</body>\r\n</html>\r\n" }, { "alpha_fraction": 0.7699642181396484, "alphanum_fraction": 0.7747318148612976, "avg_line_length": 38.975608825683594, "blob_id": "8856d36e48c95ecc8379c844f0607296c7640de5", "content_id": "cf5d0252345c01d1b39c65c816cb038bef850d16", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1678, "license_type": "permissive", "max_line_length": 75, "num_lines": 41, "path": "/tests/net/integration/util/com/android/server/ConnectivityServiceTestUtils.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.server\r\n\r\nimport android.net.ConnectivityManager.TYPE_BLUETOOTH\r\nimport android.net.ConnectivityManager.TYPE_ETHERNET\r\nimport android.net.ConnectivityManager.TYPE_MOBILE\r\nimport android.net.ConnectivityManager.TYPE_NONE\r\nimport android.net.ConnectivityManager.TYPE_TEST\r\nimport android.net.ConnectivityManager.TYPE_VPN\r\nimport android.net.ConnectivityManager.TYPE_WIFI\r\nimport android.net.NetworkCapabilities.TRANSPORT_BLUETOOTH\r\nimport android.net.NetworkCapabilities.TRANSPORT_CELLULAR\r\nimport android.net.NetworkCapabilities.TRANSPORT_ETHERNET\r\nimport android.net.NetworkCapabilities.TRANSPORT_TEST\r\nimport android.net.NetworkCapabilities.TRANSPORT_VPN\r\nimport android.net.NetworkCapabilities.TRANSPORT_WIFI\r\n\r\nfun transportToLegacyType(transport: Int) = when (transport) {\r\n TRANSPORT_BLUETOOTH -> TYPE_BLUETOOTH\r\n TRANSPORT_CELLULAR -> TYPE_MOBILE\r\n TRANSPORT_ETHERNET -> TYPE_ETHERNET\r\n TRANSPORT_TEST -> TYPE_TEST\r\n TRANSPORT_VPN -> TYPE_VPN\r\n TRANSPORT_WIFI -> TYPE_WIFI\r\n else -> TYPE_NONE\r\n}" }, { "alpha_fraction": 0.6735308766365051, "alphanum_fraction": 0.6870918869972229, "avg_line_length": 26.855072021484375, "blob_id": "f596ee1b938eb9891b20760b1022557f620c81e5", "content_id": "c9ca27ca8cde5156327e34c0acc462cfe2e50a83", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1991, "license_type": "permissive", "max_line_length": 75, "num_lines": 69, "path": "/media/mca/filterpacks/native/base/time_util.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_FILTERPACKS_BASE_TIME_UTIL_H\r\n#define ANDROID_FILTERFW_FILTERPACKS_BASE_TIME_UTIL_H\r\n\r\n#include <string>\r\n#include <utils/RefBase.h>\r\n\r\n#define LOG_MFF_RUNNING_TIMES 0\r\n\r\nnamespace android {\r\nnamespace filterfw {\r\n\r\nuint64_t getTimeUs();\r\n\r\nclass NamedStopWatch : public RefBase {\r\n public:\r\n static const uint64_t kDefaultLoggingPeriodInFrames;\r\n\r\n explicit NamedStopWatch(const std::string& name);\r\n void Start();\r\n void Stop();\r\n\r\n void SetName(const std::string& name) { mName = name; }\r\n void SetLoggingPeriodInFrames(uint64_t numFrames) {\r\n mLoggingPeriodInFrames = numFrames;\r\n }\r\n\r\n const std::string& Name() const { return mName; }\r\n uint64_t NumCalls() const { return mNumCalls; }\r\n uint64_t TotalUSec() const { return mTotalUSec; }\r\n\r\n private:\r\n std::string mName;\r\n uint64_t mLoggingPeriodInFrames;\r\n uint64_t mStartUSec;\r\n uint64_t mNumCalls;\r\n uint64_t mTotalUSec;\r\n};\r\n\r\nclass ScopedTimer {\r\n public:\r\n explicit ScopedTimer(const std::string& stop_watch_name);\r\n explicit ScopedTimer(NamedStopWatch* watch)\r\n : mWatch(watch) { mWatch->Start(); }\r\n ~ScopedTimer() { mWatch->Stop(); }\r\n\r\n private:\r\n NamedStopWatch* mWatch;\r\n};\r\n\r\n} // namespace filterfw\r\n} // namespace android\r\n\r\n#endif // ANDROID_FILTERFW_FILTERPACKS_BASE_TIME_UTIL_H\r\n" }, { "alpha_fraction": 0.6530451774597168, "alphanum_fraction": 0.6561886072158813, "avg_line_length": 32.863014221191406, "blob_id": "57d4c8ffe614b340dbd0c6fa7d1e493ae1bd34ba", "content_id": "e33e73deba5c37988b45ab6153022bde5a8c7462", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2545, "license_type": "permissive", "max_line_length": 76, "num_lines": 73, "path": "/tests/OneMedia/src/com/android/onemedia/playback/RequestUtils.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.onemedia.playback;\r\n\r\nimport android.os.Bundle;\r\n\r\nimport java.util.HashMap;\r\nimport java.util.Map;\r\n\r\n/**\r\n * TODO: Insert description here. (generated by epastern)\r\n */\r\npublic class RequestUtils {\r\n public static final String ACTION_SET_CONTENT = \"set_content\";\r\n public static final String ACTION_SET_NEXT_CONTENT = \"set_next_content\";\r\n public static final String ACTION_PAUSE = \"com.android.onemedia.pause\";\r\n public static final String ACTION_PLAY = \"com.android.onemedia.play\";\r\n public static final String ACTION_REW = \"com.android.onemedia.rew\";\r\n public static final String ACTION_FFWD = \"com.android.onemedia.ffwd\";\r\n public static final String ACTION_PREV = \"com.android.onemedia.prev\";\r\n public static final String ACTION_NEXT = \"com.android.onemedia.next\";\r\n\r\n public static final String EXTRA_KEY_SOURCE = \"source\";\r\n public static final String EXTRA_KEY_METADATA = \"metadata\";\r\n public static final String EXTRA_KEY_HEADERS = \"headers\";\r\n\r\n private RequestUtils() {\r\n }\r\n\r\n public static class ContentBuilder {\r\n private Bundle mBundle;\r\n\r\n public ContentBuilder() {\r\n mBundle = new Bundle();\r\n }\r\n\r\n public ContentBuilder setSource(String source) {\r\n mBundle.putString(EXTRA_KEY_SOURCE, source);\r\n return this;\r\n }\r\n\r\n /**\r\n * @see MediaItemMetadata\r\n * @param metadata The metadata for this item\r\n */\r\n public ContentBuilder setMetadata(Bundle metadata) {\r\n mBundle.putBundle(EXTRA_KEY_METADATA, metadata);\r\n return this;\r\n }\r\n\r\n public ContentBuilder setHeaders(HashMap<String, String> headers) {\r\n mBundle.putSerializable(EXTRA_KEY_HEADERS, headers);\r\n return this;\r\n }\r\n\r\n public Bundle build() {\r\n return mBundle;\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.751937985420227, "alphanum_fraction": 0.751937985420227, "avg_line_length": 41, "blob_id": "329bd6de5c602e6e30f3b5af6c28c55feb287e5e", "content_id": "9bbfda736d23cbee40baa4f376f8d01b9c208df0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 129, "license_type": "permissive", "max_line_length": 64, "num_lines": 3, "path": "/cmds/bmgr/bmgr", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\nexport CLASSPATH=/system/framework/bmgr.jar\r\nexec app_process /system/bin com.android.commands.bmgr.Bmgr \"$@\"\r\n" }, { "alpha_fraction": 0.6378714442253113, "alphanum_fraction": 0.6410677433013916, "avg_line_length": 35.70684051513672, "blob_id": "b56c84001b46e254ed142e13e67c32309c94c26f", "content_id": "f333e9af4d297ab1c43b984aa2303d9342f7888c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 11576, "license_type": "permissive", "max_line_length": 100, "num_lines": 307, "path": "/services/backup/java/com/android/server/backup/BackupPasswordManager.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.server.backup;\r\n\r\nimport android.content.Context;\r\nimport android.util.Slog;\r\n\r\nimport com.android.server.backup.utils.DataStreamFileCodec;\r\nimport com.android.server.backup.utils.DataStreamCodec;\r\nimport com.android.server.backup.utils.PasswordUtils;\r\n\r\nimport java.io.DataInputStream;\r\nimport java.io.DataOutputStream;\r\nimport java.io.File;\r\nimport java.io.IOException;\r\nimport java.security.SecureRandom;\r\n\r\n/**\r\n * Manages persisting and verifying backup passwords.\r\n *\r\n * <p>Does not persist the password itself, but persists a PBKDF2 hash with a randomly chosen (also\r\n * persisted) salt. Validation is performed by running the challenge text through the same\r\n * PBKDF2 cycle with the persisted salt, and checking the hashes match.\r\n *\r\n * @see PasswordUtils for the hashing algorithm.\r\n */\r\npublic final class BackupPasswordManager {\r\n private static final String TAG = \"BackupPasswordManager\";\r\n private static final boolean DEBUG = false;\r\n\r\n private static final int BACKUP_PW_FILE_VERSION = 2;\r\n private static final int DEFAULT_PW_FILE_VERSION = 1;\r\n\r\n private static final String PASSWORD_VERSION_FILE_NAME = \"pwversion\";\r\n private static final String PASSWORD_HASH_FILE_NAME = \"pwhash\";\r\n\r\n // See https://android-developers.googleblog.com/2013/12/changes-to-secretkeyfactory-api-in.html\r\n public static final String PBKDF_CURRENT = \"PBKDF2WithHmacSHA1\";\r\n public static final String PBKDF_FALLBACK = \"PBKDF2WithHmacSHA1And8bit\";\r\n\r\n private final SecureRandom mRng;\r\n private final Context mContext;\r\n private final File mBaseStateDir;\r\n\r\n private String mPasswordHash;\r\n private int mPasswordVersion;\r\n private byte[] mPasswordSalt;\r\n\r\n /**\r\n * Creates an instance enforcing permissions using the {@code context} and persisting password\r\n * data within the {@code baseStateDir}.\r\n *\r\n * @param context The context, for enforcing permissions around setting the password.\r\n * @param baseStateDir A directory within which to persist password data.\r\n * @param secureRandom Random number generator with which to generate password salts.\r\n */\r\n BackupPasswordManager(Context context, File baseStateDir, SecureRandom secureRandom) {\r\n mContext = context;\r\n mRng = secureRandom;\r\n mBaseStateDir = baseStateDir;\r\n loadStateFromFilesystem();\r\n }\r\n\r\n /**\r\n * Returns {@code true} if a password for backup is set.\r\n *\r\n * @throws SecurityException If caller does not have {@link android.Manifest.permission#BACKUP}\r\n * permission.\r\n */\r\n boolean hasBackupPassword() {\r\n mContext.enforceCallingOrSelfPermission(android.Manifest.permission.BACKUP,\r\n \"hasBackupPassword\");\r\n return mPasswordHash != null && mPasswordHash.length() > 0;\r\n }\r\n\r\n /**\r\n * Returns {@code true} if {@code password} matches the persisted password.\r\n *\r\n * @throws SecurityException If caller does not have {@link android.Manifest.permission#BACKUP}\r\n * permission.\r\n */\r\n boolean backupPasswordMatches(String password) {\r\n if (hasBackupPassword() && !passwordMatchesSaved(password)) {\r\n if (DEBUG) Slog.w(TAG, \"Backup password mismatch; aborting\");\r\n return false;\r\n }\r\n return true;\r\n }\r\n\r\n /**\r\n * Sets the new password, given a correct current password.\r\n *\r\n * @throws SecurityException If caller does not have {@link android.Manifest.permission#BACKUP}\r\n * permission.\r\n * @return {@code true} if has permission to set the password, {@code currentPassword}\r\n * matches the currently persisted password, and is able to persist {@code newPassword}.\r\n */\r\n boolean setBackupPassword(String currentPassword, String newPassword) {\r\n mContext.enforceCallingOrSelfPermission(android.Manifest.permission.BACKUP,\r\n \"setBackupPassword\");\r\n\r\n if (!passwordMatchesSaved(currentPassword)) {\r\n return false;\r\n }\r\n\r\n // Snap up to latest password file version.\r\n try {\r\n getPasswordVersionFileCodec().serialize(BACKUP_PW_FILE_VERSION);\r\n mPasswordVersion = BACKUP_PW_FILE_VERSION;\r\n } catch (IOException e) {\r\n Slog.e(TAG, \"Unable to write backup pw version; password not changed\");\r\n return false;\r\n }\r\n\r\n if (newPassword == null || newPassword.isEmpty()) {\r\n return clearPassword();\r\n }\r\n\r\n try {\r\n byte[] salt = randomSalt();\r\n String newPwHash = PasswordUtils.buildPasswordHash(\r\n PBKDF_CURRENT, newPassword, salt, PasswordUtils.PBKDF2_HASH_ROUNDS);\r\n\r\n getPasswordHashFileCodec().serialize(new BackupPasswordHash(newPwHash, salt));\r\n mPasswordHash = newPwHash;\r\n mPasswordSalt = salt;\r\n return true;\r\n } catch (IOException e) {\r\n Slog.e(TAG, \"Unable to set backup password\");\r\n }\r\n return false;\r\n }\r\n\r\n /**\r\n * Returns {@code true} if should try salting using the older PBKDF algorithm.\r\n *\r\n * <p>This is {@code true} for v1 files.\r\n */\r\n private boolean usePbkdf2Fallback() {\r\n return mPasswordVersion < BACKUP_PW_FILE_VERSION;\r\n }\r\n\r\n /**\r\n * Deletes the current backup password.\r\n *\r\n * @return {@code true} if successful.\r\n */\r\n private boolean clearPassword() {\r\n File passwordHashFile = getPasswordHashFile();\r\n if (passwordHashFile.exists() && !passwordHashFile.delete()) {\r\n Slog.e(TAG, \"Unable to clear backup password\");\r\n return false;\r\n }\r\n\r\n mPasswordHash = null;\r\n mPasswordSalt = null;\r\n return true;\r\n }\r\n\r\n /**\r\n * Sets the password hash, salt, and version in the object from what has been persisted to the\r\n * filesystem.\r\n */\r\n private void loadStateFromFilesystem() {\r\n try {\r\n mPasswordVersion = getPasswordVersionFileCodec().deserialize();\r\n } catch (IOException e) {\r\n Slog.e(TAG, \"Unable to read backup pw version\");\r\n mPasswordVersion = DEFAULT_PW_FILE_VERSION;\r\n }\r\n\r\n try {\r\n BackupPasswordHash hash = getPasswordHashFileCodec().deserialize();\r\n mPasswordHash = hash.hash;\r\n mPasswordSalt = hash.salt;\r\n } catch (IOException e) {\r\n Slog.e(TAG, \"Unable to read saved backup pw hash\");\r\n }\r\n }\r\n\r\n /**\r\n * Whether the candidate password matches the current password. If the persisted password is an\r\n * older version, attempts hashing using the older algorithm.\r\n *\r\n * @param candidatePassword The password to try.\r\n * @return {@code true} if the passwords match.\r\n */\r\n private boolean passwordMatchesSaved(String candidatePassword) {\r\n return passwordMatchesSaved(PBKDF_CURRENT, candidatePassword)\r\n || (usePbkdf2Fallback() && passwordMatchesSaved(PBKDF_FALLBACK, candidatePassword));\r\n }\r\n\r\n /**\r\n * Returns {@code true} if the candidate password is correct.\r\n *\r\n * @param algorithm The algorithm used to hash passwords.\r\n * @param candidatePassword The candidate password to compare to the current password.\r\n * @return {@code true} if the candidate password matched the saved password.\r\n */\r\n private boolean passwordMatchesSaved(String algorithm, String candidatePassword) {\r\n if (mPasswordHash == null) {\r\n return candidatePassword == null || candidatePassword.equals(\"\");\r\n } else if (candidatePassword == null || candidatePassword.length() == 0) {\r\n // The current password is not zero-length, but the candidate password is.\r\n return false;\r\n } else {\r\n String candidatePasswordHash = PasswordUtils.buildPasswordHash(\r\n algorithm, candidatePassword, mPasswordSalt, PasswordUtils.PBKDF2_HASH_ROUNDS);\r\n return mPasswordHash.equalsIgnoreCase(candidatePasswordHash);\r\n }\r\n }\r\n\r\n private byte[] randomSalt() {\r\n int bitsPerByte = 8;\r\n byte[] array = new byte[PasswordUtils.PBKDF2_SALT_SIZE / bitsPerByte];\r\n mRng.nextBytes(array);\r\n return array;\r\n }\r\n\r\n private DataStreamFileCodec<Integer> getPasswordVersionFileCodec() {\r\n return new DataStreamFileCodec<>(\r\n new File(mBaseStateDir, PASSWORD_VERSION_FILE_NAME),\r\n new PasswordVersionFileCodec());\r\n }\r\n\r\n private DataStreamFileCodec<BackupPasswordHash> getPasswordHashFileCodec() {\r\n return new DataStreamFileCodec<>(getPasswordHashFile(), new PasswordHashFileCodec());\r\n }\r\n\r\n private File getPasswordHashFile() {\r\n return new File(mBaseStateDir, PASSWORD_HASH_FILE_NAME);\r\n }\r\n\r\n /**\r\n * Container class for a PBKDF hash and the salt used to create the hash.\r\n */\r\n private static final class BackupPasswordHash {\r\n public String hash;\r\n public byte[] salt;\r\n\r\n BackupPasswordHash(String hash, byte[] salt) {\r\n this.hash = hash;\r\n this.salt = salt;\r\n }\r\n }\r\n\r\n /**\r\n * The password version file contains a single 32-bit integer.\r\n */\r\n private static final class PasswordVersionFileCodec implements\r\n DataStreamCodec<Integer> {\r\n @Override\r\n public void serialize(Integer integer, DataOutputStream dataOutputStream)\r\n throws IOException {\r\n dataOutputStream.write(integer);\r\n }\r\n\r\n @Override\r\n public Integer deserialize(DataInputStream dataInputStream) throws IOException {\r\n return dataInputStream.readInt();\r\n }\r\n }\r\n\r\n /**\r\n * The passwords hash file contains\r\n *\r\n * <ul>\r\n * <li>A 32-bit integer representing the number of bytes in the salt;\r\n * <li>The salt bytes;\r\n * <li>A UTF-8 string of the hash.\r\n * </ul>\r\n */\r\n private static final class PasswordHashFileCodec implements\r\n DataStreamCodec<BackupPasswordHash> {\r\n @Override\r\n public void serialize(BackupPasswordHash backupPasswordHash,\r\n DataOutputStream dataOutputStream) throws IOException {\r\n dataOutputStream.writeInt(backupPasswordHash.salt.length);\r\n dataOutputStream.write(backupPasswordHash.salt);\r\n dataOutputStream.writeUTF(backupPasswordHash.hash);\r\n }\r\n\r\n @Override\r\n public BackupPasswordHash deserialize(\r\n DataInputStream dataInputStream) throws IOException {\r\n int saltLen = dataInputStream.readInt();\r\n byte[] salt = new byte[saltLen];\r\n dataInputStream.readFully(salt);\r\n String hash = dataInputStream.readUTF();\r\n return new BackupPasswordHash(hash, salt);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6384839415550232, "alphanum_fraction": 0.6583090424537659, "avg_line_length": 24.384614944458008, "blob_id": "d99c2407b7d786617cd2a9978609fe8b65f12f69", "content_id": "143f0d0be28d6f3bbbb235f73aadb1b5855654a0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1715, "license_type": "permissive", "max_line_length": 90, "num_lines": 65, "path": "/tools/aapt2/text/Utf8Iterator.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"text/Utf8Iterator.h\"\r\n\r\n#include \"android-base/logging.h\"\r\n#include \"utils/Unicode.h\"\r\n\r\nusing ::android::StringPiece;\r\n\r\nnamespace aapt {\r\nnamespace text {\r\n\r\nUtf8Iterator::Utf8Iterator(const StringPiece& str)\r\n : str_(str), current_pos_(0), next_pos_(0), current_codepoint_(0) {\r\n DoNext();\r\n}\r\n\r\nvoid Utf8Iterator::DoNext() {\r\n current_pos_ = next_pos_;\r\n int32_t result = utf32_from_utf8_at(str_.data(), str_.size(), current_pos_, &next_pos_);\r\n if (result == -1) {\r\n current_codepoint_ = 0u;\r\n } else {\r\n current_codepoint_ = static_cast<char32_t>(result);\r\n }\r\n}\r\n\r\nbool Utf8Iterator::HasNext() const {\r\n return current_codepoint_ != 0;\r\n}\r\n\r\nsize_t Utf8Iterator::Position() const {\r\n return current_pos_;\r\n}\r\n\r\nvoid Utf8Iterator::Skip(int amount) {\r\n while (amount > 0 && HasNext()) {\r\n Next();\r\n --amount;\r\n }\r\n}\r\n\r\nchar32_t Utf8Iterator::Next() {\r\n CHECK(HasNext()) << \"Next() called after iterator exhausted\";\r\n char32_t result = current_codepoint_;\r\n DoNext();\r\n return result;\r\n}\r\n\r\n} // namespace text\r\n} // namespace aapt\r\n" }, { "alpha_fraction": 0.6400386691093445, "alphanum_fraction": 0.6471953392028809, "avg_line_length": 40.37704849243164, "blob_id": "ac79f984c7825ed5bf198da1685f7dcffecba37e", "content_id": "674a5808b6fac6202d3c5404c527eca5e6620967", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5170, "license_type": "permissive", "max_line_length": 100, "num_lines": 122, "path": "/tests/VectorDrawableTest/src/com/android/test/dynamic/AnimatedVectorDrawableDupPerf.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\r\n * in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the License\r\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\r\n * or implied. See the License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage com.android.test.dynamic;\r\n\r\nimport android.app.Activity;\r\nimport android.content.res.Resources;\r\nimport android.graphics.drawable.AnimatedVectorDrawable;\r\nimport android.graphics.drawable.VectorDrawable;\r\nimport android.os.Bundle;\r\nimport android.util.AttributeSet;\r\nimport android.util.Log;\r\nimport android.util.Xml;\r\nimport android.widget.Button;\r\nimport android.widget.GridLayout;\r\nimport android.widget.ScrollView;\r\nimport android.widget.TextView;\r\n\r\nimport org.xmlpull.v1.XmlPullParser;\r\nimport org.xmlpull.v1.XmlPullParserException;\r\n\r\nimport java.io.IOException;\r\nimport java.text.DecimalFormat;\r\n\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class AnimatedVectorDrawableDupPerf extends Activity {\r\n\r\n private static final String LOGTAG = \"AnimatedVectorDrawableDupPerf\";\r\n protected int[] icon = {\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n R.drawable.animation_vector_linear_progress_bar,\r\n };\r\n\r\n /** @hide */\r\n public static AnimatedVectorDrawable create(Resources resources, int rid) {\r\n try {\r\n final XmlPullParser parser = resources.getXml(rid);\r\n final AttributeSet attrs = Xml.asAttributeSet(parser);\r\n int type;\r\n while ((type=parser.next()) != XmlPullParser.START_TAG &&\r\n type != XmlPullParser.END_DOCUMENT) {\r\n // Empty loop\r\n }\r\n if (type != XmlPullParser.START_TAG) {\r\n throw new XmlPullParserException(\"No start tag found\");\r\n }\r\n\r\n final AnimatedVectorDrawable drawable = new AnimatedVectorDrawable();\r\n drawable.inflate(resources, parser, attrs);\r\n\r\n return drawable;\r\n } catch (XmlPullParserException e) {\r\n Log.e(LOGTAG, \"parser error\", e);\r\n } catch (IOException e) {\r\n Log.e(LOGTAG, \"parser error\", e);\r\n }\r\n return null;\r\n }\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n ScrollView scrollView = new ScrollView(this);\r\n GridLayout container = new GridLayout(this);\r\n scrollView.addView(container);\r\n container.setColumnCount(5);\r\n Resources res = this.getResources();\r\n container.setBackgroundColor(0xFF888888);\r\n AnimatedVectorDrawable []d = new AnimatedVectorDrawable[icon.length];\r\n long time = android.os.SystemClock.elapsedRealtimeNanos();\r\n for (int i = 0; i < icon.length; i++) {\r\n d[i] = create(res,icon[i]);\r\n }\r\n time = android.os.SystemClock.elapsedRealtimeNanos()-time;\r\n TextView t = new TextView(this);\r\n DecimalFormat df = new DecimalFormat(\"#.##\");\r\n t.setText(\"avgL=\" + df.format(time / (icon.length * 1000000.)) + \" ms\");\r\n container.addView(t);\r\n time = android.os.SystemClock.elapsedRealtimeNanos();\r\n for (int i = 0; i < icon.length; i++) {\r\n Button button = new Button(this);\r\n button.setWidth(200);\r\n button.setBackgroundResource(icon[i]);\r\n container.addView(button);\r\n }\r\n setContentView(scrollView);\r\n time = android.os.SystemClock.elapsedRealtimeNanos()-time;\r\n t = new TextView(this);\r\n t.setText(\"avgS=\" + df.format(time / (icon.length * 1000000.)) + \" ms\");\r\n container.addView(t);\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7187257409095764, "alphanum_fraction": 0.7257187366485596, "avg_line_length": 33.75, "blob_id": "a5b9c4f52d674d06e7928ef3720ac39a0c86498b", "content_id": "32b52c670e51e22372850ef3a1a76f313ec2f74b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1287, "license_type": "permissive", "max_line_length": 90, "num_lines": 36, "path": "/core/java/android/hardware/camera2/impl/SyntheticKey.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage android.hardware.camera2.impl;\r\n\r\nimport java.lang.annotation.ElementType;\r\nimport java.lang.annotation.Retention;\r\nimport java.lang.annotation.RetentionPolicy;\r\nimport java.lang.annotation.Target;\r\n\r\n/**\r\n * Denote a static field {@code Key} as being synthetic (i.e. not having a native\r\n * tag one-to-one counterpart).\r\n *\r\n * <p>Keys without this annotated are assumed to always have a native counter-part.</p>\r\n *\r\n * <p>In particular a key with a native counter-part will always have it's {@code #getTag}\r\n * method succeed.</p>\r\n */\r\n@Retention(RetentionPolicy.RUNTIME)\r\n@Target(ElementType.FIELD)\r\npublic @interface SyntheticKey {\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7016706466674805, "alphanum_fraction": 0.7035065293312073, "avg_line_length": 40.22480773925781, "blob_id": "2960424e549cb0a2777383db45b99c339289b6d0", "content_id": "f2da0473e60a460e1d61ff4422d9829dd14581dc", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 10894, "license_type": "permissive", "max_line_length": 100, "num_lines": 258, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/notification/collection/coordinator/KeyguardCoordinatorTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.statusbar.notification.collection.coordinator;\r\n\r\nimport static android.app.Notification.VISIBILITY_PUBLIC;\r\nimport static android.app.Notification.VISIBILITY_SECRET;\r\nimport static android.app.NotificationManager.IMPORTANCE_HIGH;\r\nimport static android.app.NotificationManager.IMPORTANCE_MIN;\r\n\r\nimport static junit.framework.Assert.assertFalse;\r\nimport static junit.framework.Assert.assertTrue;\r\n\r\nimport static org.mockito.Mockito.times;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.os.Handler;\r\nimport android.os.UserHandle;\r\nimport android.testing.AndroidTestingRunner;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.keyguard.KeyguardUpdateMonitor;\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.broadcast.BroadcastDispatcher;\r\nimport com.android.systemui.plugins.statusbar.StatusBarStateController;\r\nimport com.android.systemui.statusbar.NotificationLockscreenUserManager;\r\nimport com.android.systemui.statusbar.RankingBuilder;\r\nimport com.android.systemui.statusbar.notification.collection.GroupEntry;\r\nimport com.android.systemui.statusbar.notification.collection.NotifPipeline;\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntry;\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntryBuilder;\r\nimport com.android.systemui.statusbar.notification.collection.listbuilder.pluggable.NotifFilter;\r\nimport com.android.systemui.statusbar.notification.collection.provider.HighPriorityProvider;\r\nimport com.android.systemui.statusbar.policy.KeyguardStateController;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.ArgumentCaptor;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\npublic class KeyguardCoordinatorTest extends SysuiTestCase {\r\n private static final int NOTIF_USER_ID = 0;\r\n private static final int CURR_USER_ID = 1;\r\n\r\n @Mock private Handler mMainHandler;\r\n @Mock private KeyguardStateController mKeyguardStateController;\r\n @Mock private NotificationLockscreenUserManager mLockscreenUserManager;\r\n @Mock private BroadcastDispatcher mBroadcastDispatcher;\r\n @Mock private StatusBarStateController mStatusBarStateController;\r\n @Mock private KeyguardUpdateMonitor mKeyguardUpdateMonitor;\r\n @Mock private HighPriorityProvider mHighPriorityProvider;\r\n @Mock private NotifPipeline mNotifPipeline;\r\n\r\n private NotificationEntry mEntry;\r\n private KeyguardCoordinator mKeyguardCoordinator;\r\n private NotifFilter mKeyguardFilter;\r\n\r\n @Before\r\n public void setup() {\r\n MockitoAnnotations.initMocks(this);\r\n mKeyguardCoordinator = new KeyguardCoordinator(\r\n mContext, mMainHandler, mKeyguardStateController, mLockscreenUserManager,\r\n mBroadcastDispatcher, mStatusBarStateController,\r\n mKeyguardUpdateMonitor, mHighPriorityProvider);\r\n\r\n mEntry = new NotificationEntryBuilder()\r\n .setUser(new UserHandle(NOTIF_USER_ID))\r\n .build();\r\n\r\n ArgumentCaptor<NotifFilter> filterCaptor = ArgumentCaptor.forClass(NotifFilter.class);\r\n mKeyguardCoordinator.attach(mNotifPipeline);\r\n verify(mNotifPipeline, times(1)).addFinalizeFilter(filterCaptor.capture());\r\n mKeyguardFilter = filterCaptor.getValue();\r\n }\r\n\r\n @Test\r\n public void unfilteredState() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // THEN don't filter out the entry\r\n assertFalse(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void keyguardNotShowing() {\r\n // GIVEN the lockscreen isn't showing\r\n setupUnfilteredState(mEntry);\r\n when(mKeyguardStateController.isShowing()).thenReturn(false);\r\n\r\n // THEN don't filter out the entry\r\n assertFalse(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void doNotShowLockscreenNotifications() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // WHEN we shouldn't show any lockscreen notifications\r\n when(mLockscreenUserManager.shouldShowLockscreenNotifications()).thenReturn(false);\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void lockdown() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // WHEN the notification's user is in lockdown:\r\n when(mKeyguardUpdateMonitor.isUserInLockdown(NOTIF_USER_ID)).thenReturn(true);\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void publicMode_settingsDisallow() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // WHEN the notification's user is in public mode and settings are configured to disallow\r\n // notifications in public mode\r\n when(mLockscreenUserManager.isLockscreenPublicMode(NOTIF_USER_ID)).thenReturn(true);\r\n when(mLockscreenUserManager.userAllowsNotificationsInPublic(NOTIF_USER_ID))\r\n .thenReturn(false);\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void publicMode_notifDisallowed() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // WHEN the notification's user is in public mode and settings are configured to disallow\r\n // notifications in public mode\r\n when(mLockscreenUserManager.isLockscreenPublicMode(CURR_USER_ID)).thenReturn(true);\r\n mEntry.setRanking(new RankingBuilder()\r\n .setKey(mEntry.getKey())\r\n .setVisibilityOverride(VISIBILITY_SECRET).build());\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void doesNotExceedThresholdToShow() {\r\n // GIVEN an 'unfiltered-keyguard-showing' state\r\n setupUnfilteredState(mEntry);\r\n\r\n // WHEN the notification doesn't exceed the threshold to show on the lockscreen\r\n mEntry.setRanking(new RankingBuilder()\r\n .setKey(mEntry.getKey())\r\n .setImportance(IMPORTANCE_MIN)\r\n .build());\r\n when(mHighPriorityProvider.isHighPriority(mEntry)).thenReturn(false);\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(mEntry, 0));\r\n }\r\n\r\n @Test\r\n public void summaryExceedsThresholdToShow() {\r\n // GIVEN the notification doesn't exceed the threshold to show on the lockscreen\r\n // but it's part of a group (has a parent)\r\n final GroupEntry parent = new GroupEntry(\"test_group_key\");\r\n final NotificationEntry entryWithParent = new NotificationEntryBuilder()\r\n .setParent(parent)\r\n .setUser(new UserHandle(NOTIF_USER_ID))\r\n .build();\r\n\r\n setupUnfilteredState(entryWithParent);\r\n entryWithParent.setRanking(new RankingBuilder()\r\n .setKey(entryWithParent.getKey())\r\n .setImportance(IMPORTANCE_MIN)\r\n .build());\r\n\r\n // WHEN its parent does exceed threshold tot show on the lockscreen\r\n when(mHighPriorityProvider.isHighPriority(parent)).thenReturn(true);\r\n parent.setSummary(new NotificationEntryBuilder()\r\n .setImportance(IMPORTANCE_HIGH)\r\n .build());\r\n\r\n // THEN don't filter out the entry\r\n assertFalse(mKeyguardFilter.shouldFilterOut(entryWithParent, 0));\r\n\r\n // WHEN its parent doesn't exceed threshold to show on lockscreen\r\n when(mHighPriorityProvider.isHighPriority(parent)).thenReturn(false);\r\n parent.setSummary(new NotificationEntryBuilder()\r\n .setImportance(IMPORTANCE_MIN)\r\n .build());\r\n\r\n // THEN filter out the entry\r\n assertTrue(mKeyguardFilter.shouldFilterOut(entryWithParent, 0));\r\n }\r\n\r\n /**\r\n * setup a state where the notification will not be filtered by the\r\n * KeyguardNotificationCoordinator when the keyguard is showing.\r\n */\r\n private void setupUnfilteredState(NotificationEntry entry) {\r\n // keyguard is showing\r\n when(mKeyguardStateController.isShowing()).thenReturn(true);\r\n\r\n // show notifications on the lockscreen\r\n when(mLockscreenUserManager.shouldShowLockscreenNotifications()).thenReturn(true);\r\n\r\n // neither the current user nor the notification's user is in lockdown\r\n when(mLockscreenUserManager.getCurrentUserId()).thenReturn(CURR_USER_ID);\r\n when(mKeyguardUpdateMonitor.isUserInLockdown(NOTIF_USER_ID)).thenReturn(false);\r\n when(mKeyguardUpdateMonitor.isUserInLockdown(CURR_USER_ID)).thenReturn(false);\r\n\r\n // not in public mode\r\n when(mLockscreenUserManager.isLockscreenPublicMode(CURR_USER_ID)).thenReturn(false);\r\n when(mLockscreenUserManager.isLockscreenPublicMode(NOTIF_USER_ID)).thenReturn(false);\r\n\r\n // entry's ranking - should show on all lockscreens\r\n // + priority of the notification exceeds the threshold to be shown on the lockscreen\r\n entry.setRanking(new RankingBuilder()\r\n .setKey(mEntry.getKey())\r\n .setVisibilityOverride(VISIBILITY_PUBLIC)\r\n .setImportance(IMPORTANCE_HIGH)\r\n .build());\r\n\r\n // settings allows notifications in public mode\r\n when(mLockscreenUserManager.userAllowsNotificationsInPublic(CURR_USER_ID)).thenReturn(true);\r\n when(mLockscreenUserManager.userAllowsNotificationsInPublic(NOTIF_USER_ID))\r\n .thenReturn(true);\r\n\r\n // notification doesn't have a summary\r\n\r\n // notification is high priority, so it shouldn't be filtered\r\n when(mHighPriorityProvider.isHighPriority(mEntry)).thenReturn(true);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.746668815612793, "alphanum_fraction": 0.7479531168937683, "avg_line_length": 38.980262756347656, "blob_id": "389047743e5c200ee2e56808e17e26eb9824f0a5", "content_id": "7377bfaa2508cb620091431bef893d1e4d7d7d9c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6229, "license_type": "permissive", "max_line_length": 99, "num_lines": 152, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/phone/NotificationGroupManagerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2018 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.statusbar.phone;\r\n\r\nimport static junit.framework.Assert.assertEquals;\r\nimport static junit.framework.Assert.assertFalse;\r\nimport static junit.framework.Assert.assertTrue;\r\n\r\nimport static org.junit.Assert.assertNull;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.bubbles.BubbleController;\r\nimport com.android.systemui.plugins.statusbar.StatusBarStateController;\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntry;\r\nimport com.android.systemui.statusbar.notification.people.PeopleNotificationIdentifier;\r\nimport com.android.systemui.statusbar.policy.HeadsUpManager;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Rule;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.junit.MockitoJUnit;\r\nimport org.mockito.junit.MockitoRule;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\[email protected]\r\npublic class NotificationGroupManagerTest extends SysuiTestCase {\r\n @Rule\r\n public MockitoRule rule = MockitoJUnit.rule();\r\n\r\n private NotificationGroupManager mGroupManager;\r\n private final NotificationGroupTestHelper mGroupTestHelper =\r\n new NotificationGroupTestHelper(mContext);\r\n\r\n @Mock HeadsUpManager mHeadsUpManager;\r\n\r\n @Before\r\n public void setup() {\r\n mDependency.injectMockDependency(BubbleController.class);\r\n initializeGroupManager();\r\n }\r\n\r\n private void initializeGroupManager() {\r\n mGroupManager = new NotificationGroupManager(\r\n mock(StatusBarStateController.class),\r\n () -> mock(PeopleNotificationIdentifier.class));\r\n mGroupManager.setHeadsUpManager(mHeadsUpManager);\r\n }\r\n\r\n @Test\r\n public void testIsOnlyChildInGroup() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n\r\n assertTrue(mGroupManager.isOnlyChildInGroup(childEntry.getSbn()));\r\n }\r\n\r\n @Test\r\n public void testIsChildInGroupWithSummary() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n mGroupManager.onEntryAdded(mGroupTestHelper.createChildNotification());\r\n\r\n assertTrue(mGroupManager.isChildInGroupWithSummary(childEntry.getSbn()));\r\n }\r\n\r\n @Test\r\n public void testIsSummaryOfGroupWithChildren() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n mGroupManager.onEntryAdded(mGroupTestHelper.createChildNotification());\r\n\r\n assertTrue(mGroupManager.isSummaryOfGroup(summaryEntry.getSbn()));\r\n assertEquals(summaryEntry, mGroupManager.getGroupSummary(childEntry.getSbn()));\r\n }\r\n\r\n @Test\r\n public void testRemoveChildFromGroupWithSummary() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n mGroupManager.onEntryAdded(mGroupTestHelper.createChildNotification());\r\n\r\n mGroupManager.onEntryRemoved(childEntry);\r\n\r\n assertFalse(mGroupManager.isChildInGroupWithSummary(childEntry.getSbn()));\r\n }\r\n\r\n @Test\r\n public void testRemoveSummaryFromGroupWithSummary() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n mGroupManager.onEntryAdded(mGroupTestHelper.createChildNotification());\r\n\r\n mGroupManager.onEntryRemoved(summaryEntry);\r\n\r\n assertNull(mGroupManager.getGroupSummary(childEntry.getSbn()));\r\n assertFalse(mGroupManager.isSummaryOfGroup(summaryEntry.getSbn()));\r\n }\r\n\r\n @Test\r\n public void testHeadsUpEntryIsIsolated() {\r\n NotificationEntry childEntry = mGroupTestHelper.createChildNotification();\r\n NotificationEntry summaryEntry = mGroupTestHelper.createSummaryNotification();\r\n mGroupManager.onEntryAdded(summaryEntry);\r\n mGroupManager.onEntryAdded(childEntry);\r\n mGroupManager.onEntryAdded(mGroupTestHelper.createChildNotification());\r\n when(mHeadsUpManager.isAlerting(childEntry.getKey())).thenReturn(true);\r\n\r\n mGroupManager.onHeadsUpStateChanged(childEntry, true);\r\n\r\n // Child entries that are heads upped should be considered separate groups visually even if\r\n // they are the same group logically\r\n assertEquals(childEntry, mGroupManager.getGroupSummary(childEntry.getSbn()));\r\n assertEquals(summaryEntry, mGroupManager.getLogicalGroupSummary(childEntry.getSbn()));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6179168820381165, "alphanum_fraction": 0.6233135461807251, "avg_line_length": 29.94827651977539, "blob_id": "dca07001b14a828179150dd0bb108ebb155cc35d", "content_id": "ccf0a20da223aba6c845b46e6e32799188d4685a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1853, "license_type": "permissive", "max_line_length": 87, "num_lines": 58, "path": "/apct-tests/perftests/utils/src/android/perftests/utils/LayoutUtils.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.perftests.utils;\r\n\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.List;\r\n\r\npublic class LayoutUtils {\r\n\r\n private static void recursivelyGather(ViewGroup currentNode, List<View> nodeList) {\r\n nodeList.add(currentNode);\r\n int count = currentNode.getChildCount();\r\n for (int i = 0; i < count; i++) {\r\n View view = currentNode.getChildAt(i);\r\n if (view instanceof ViewGroup) {\r\n recursivelyGather((ViewGroup) view, nodeList);\r\n } else {\r\n nodeList.add(view);\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Flattern the whole view tree into a list of View.\r\n */\r\n public static List<View> gatherViewTree(ViewGroup root) {\r\n List<View> result = new ArrayList<View>();\r\n recursivelyGather(root, result);\r\n return result;\r\n }\r\n\r\n /**\r\n * For every node in the list, call requestLayout.\r\n */\r\n public static void requestLayoutForAllNodes(List<View> nodeList) {\r\n int count = nodeList.size();\r\n for (int i = 0; i < count; i++) {\r\n nodeList.get(i).requestLayout();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.39211317896842957, "alphanum_fraction": 0.39525744318962097, "avg_line_length": 48.88666534423828, "blob_id": "4f773cc3c3ec1e15848639e18256f69a15b8e20f", "content_id": "d10d2ebf23c7dc0afe0355e005f033eb1d13bfff", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 7633, "license_type": "permissive", "max_line_length": 92, "num_lines": 150, "path": "/media/mca/filterfw/jni/jni_shader_program.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_SHADER_PROGRAM_H\r\n#define ANDROID_FILTERFW_JNI_SHADER_PROGRAM_H\r\n\r\n#include <jni.h>\r\n\r\n#include \"native/core/value.h\"\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_allocate(JNIEnv* env,\r\n jobject thiz,\r\n jobject gl_env,\r\n jstring vertex_shader,\r\n jstring fragment_shader);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_deallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_compileAndLink(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setUniformValue(JNIEnv* env,\r\n jobject thiz,\r\n jstring key,\r\n jobject value);\r\n\r\nJNIEXPORT jobject JNICALL\r\nJava_android_filterfw_core_ShaderProgram_getUniformValue(JNIEnv* env,\r\n jobject thiz,\r\n jstring key);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_shaderProcess(JNIEnv* env,\r\n jobject thiz,\r\n jobjectArray inputs,\r\n jobject output);\r\n\r\nJNIEXPORT jobject JNICALL\r\nJava_android_filterfw_core_ShaderProgram_nativeCreateIdentity(JNIEnv* env,\r\n jclass clazz,\r\n jobject gl_env);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setSourceRegion(JNIEnv* env,\r\n jobject thiz,\r\n jfloat x0,\r\n jfloat y0,\r\n jfloat x1,\r\n jfloat y1,\r\n jfloat x2,\r\n jfloat y2,\r\n jfloat x3,\r\n jfloat y3);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setTargetRegion(JNIEnv* env,\r\n jobject thiz,\r\n jfloat x0,\r\n jfloat y0,\r\n jfloat x1,\r\n jfloat y1,\r\n jfloat x2,\r\n jfloat y2,\r\n jfloat x3,\r\n jfloat y3);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderClearsOutput(JNIEnv* env,\r\n jobject thiz,\r\n jboolean clears);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderClearColor(JNIEnv* env,\r\n jobject thiz,\r\n jfloat r,\r\n jfloat g,\r\n jfloat b);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderBlendEnabled(JNIEnv* env,\r\n jobject thiz,\r\n jboolean enable);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderBlendFunc(JNIEnv* env,\r\n jobject thiz,\r\n jint sfactor,\r\n jint dfactor);\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderDrawMode(JNIEnv* env,\r\n jobject thiz,\r\n jint draw_mode);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderTileCounts(JNIEnv* env,\r\n jobject thiz,\r\n jint x_count,\r\n jint y_count);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderVertexCount(JNIEnv* env,\r\n jobject thiz,\r\n jint vertex_count);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_beginShaderDrawing(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderAttributeValues(JNIEnv* env,\r\n jobject thiz,\r\n jstring attr_name,\r\n jfloatArray values,\r\n jint component_count);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_ShaderProgram_setShaderAttributeVertexFrame(JNIEnv* env,\r\n jobject thiz,\r\n jstring attr_name,\r\n jobject vertex_frame,\r\n jint type,\r\n jint component_count,\r\n jint stride,\r\n jint offset,\r\n jboolean normalize);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // ANDROID_FILTERFW_JNI_SHADER_PROGRAM_H\r\n" }, { "alpha_fraction": 0.701813280582428, "alphanum_fraction": 0.7071860432624817, "avg_line_length": 29.680850982666016, "blob_id": "e3f414192446a319a27d0ab1b0fa32ae8d74d00f", "content_id": "2417a6a43f1e455abb73999b6776e4545cb200ae", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1489, "license_type": "permissive", "max_line_length": 75, "num_lines": 47, "path": "/tools/aapt2/NameMangler_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"NameMangler.h\"\r\n\r\n#include <string>\r\n\r\n#include \"test/Test.h\"\r\n\r\nnamespace aapt {\r\n\r\nTEST(NameManglerTest, MangleName) {\r\n std::string package = \"android.appcompat\";\r\n std::string name = \"Platform.AppCompat\";\r\n\r\n std::string mangled_name = NameMangler::MangleEntry(package, name);\r\n EXPECT_EQ(mangled_name, \"android.appcompat$Platform.AppCompat\");\r\n\r\n std::string unmangled_package;\r\n std::string unmangled_name = mangled_name;\r\n ASSERT_TRUE(NameMangler::Unmangle(&unmangled_name, &unmangled_package));\r\n EXPECT_EQ(unmangled_name, \"Platform.AppCompat\");\r\n EXPECT_EQ(unmangled_package, \"android.appcompat\");\r\n}\r\n\r\nTEST(NameManglerTest, IgnoreUnmangledName) {\r\n std::string package;\r\n std::string name = \"foo_bar\";\r\n\r\n EXPECT_FALSE(NameMangler::Unmangle(&name, &package));\r\n EXPECT_EQ(name, \"foo_bar\");\r\n}\r\n\r\n} // namespace aapt\r\n" }, { "alpha_fraction": 0.6612529158592224, "alphanum_fraction": 0.6612529158592224, "avg_line_length": 20.578947067260742, "blob_id": "5628c65a46f8ddd1133e41ac44fdf424c9c23180", "content_id": "62ff02bcc3521de97a9dbad699b3a70f0b023266", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 431, "license_type": "permissive", "max_line_length": 73, "num_lines": 19, "path": "/tests/SystemMemoryTest/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This directory contains a test for system server memory use.\r\n\r\nDirectory structure\r\n===================\r\ndevice\r\n - those parts of the test that run on device.\r\n\r\nhost\r\n - those parts of the test that run on host.\r\n\r\nRunning the test\r\n================\r\n\r\nYou can manually run the test as follows:\r\n\r\n atest -v system-memory-test\r\n\r\nThis installs and runs the test on device. You can see the metrics in the\r\ntradefed output.\r\n\r\n" }, { "alpha_fraction": 0.6583570837974548, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 40.119998931884766, "blob_id": "ab7460ed95f91c47581be277cde7162ab3b31514", "content_id": "7638d52e15984723603e3045af13918bd2a626fe", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4212, "license_type": "permissive", "max_line_length": 194, "num_lines": 100, "path": "/packages/PrintRecommendationService/src/com/android/printservice/recommendation/plugin/hp/HPRecommendationPlugin.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n(c) Copyright 2016 HP Inc.\r\nCopyright (C) 2016 The Android Open Source Project\r\n\r\nLicensed under the Apache License, Version 2.0 (the \"License\");\r\nyou may not use this file except in compliance with the License.\r\nYou may obtain a copy of the License at\r\n\r\nhttp://www.apache.org/licenses/LICENSE-2.0\r\n\r\nUnless required by applicable law or agreed to in writing, software\r\ndistributed under the License is distributed on an \"AS IS\" BASIS,\r\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\nSee the License for the specific language governing permissions and\r\nlimitations under the License.\r\n*/\r\npackage com.android.printservice.recommendation.plugin.hp;\r\n\r\nimport android.content.Context;\r\nimport android.net.nsd.NsdServiceInfo;\r\nimport android.text.TextUtils;\r\n\r\nimport com.android.printservice.recommendation.R;\r\n\r\nimport java.util.Locale;\r\n\r\npublic class HPRecommendationPlugin extends ServiceRecommendationPlugin {\r\n\r\n private static final String PDL__PCL = \"application/vnd.hp-PCL\";\r\n private static final String PDL__PCLM = \"application/PCLm\";\r\n private static final String PDL__PDF = \"application/pdf\";\r\n private static final String PDL__PWG_RASTER = \"image/pwg-raster\";\r\n\r\n private static final String TAG_DESIGNJET = \"DESIGNJET\";\r\n private static final String TAG_PAGEWIDE = \"PAGEWIDE\";\r\n private static final String TAG_LATEX = \"LATEX\";\r\n private static final String TAG_SCITEX = \"SCITEX\";\r\n private static final String TAG_XL = \"XL\";\r\n private static final String ATTRIBUTE_VALUE__TRUE = \"T\";\r\n private static final String MDNS_ATTRIBUTE__HPLFMOBILEPRINTER = \"hplfpmobileprinter\";\r\n private static final String MDNS_ATTRIBUTE__TY = \"ty\";\r\n\r\n\r\n private static String[] mSupportedDesignJet = new String[]{\r\n \"HP DESIGNJET T120\",\r\n \"HP DESIGNJET T520\",\r\n \"HP DESIGNJET T930\",\r\n \"HP DESIGNJET T1530\",\r\n \"HP DESIGNJET T2530\",\r\n \"HP DESIGNJET T730\",\r\n \"HP DESIGNJET T830\",\r\n };\r\n\r\n private boolean isPrintSupported(String printerModel) {\r\n boolean isSupported;\r\n if (!TextUtils.isEmpty(printerModel)) {\r\n String modelToUpper = printerModel.toUpperCase(Locale.US);\r\n if (modelToUpper.contains(TAG_DESIGNJET)) {\r\n isSupported = isSupportedDesignjet(printerModel);\r\n } else\r\n isSupported = !(modelToUpper.contains(TAG_LATEX) || modelToUpper.contains(TAG_SCITEX)) && !(modelToUpper.contains(TAG_PAGEWIDE) && modelToUpper.contains(TAG_XL));\r\n } else {\r\n isSupported = false;\r\n }\r\n\r\n return isSupported;\r\n }\r\n\r\n private static boolean isSupportedDesignjet(String printerModel) {\r\n boolean isSupported = false;\r\n if (!TextUtils.isEmpty(printerModel)) {\r\n String modelToUpper = printerModel.toUpperCase(Locale.US);\r\n for (String supportedPrinter : mSupportedDesignJet) {\r\n if (modelToUpper.contains(supportedPrinter)) {\r\n isSupported = true;\r\n }\r\n }\r\n }\r\n return isSupported;\r\n }\r\n\r\n public HPRecommendationPlugin(Context context) {\r\n super(context, R.string.plugin_vendor_hp, new VendorInfo(context.getResources(), R.array.known_print_vendor_info_for_hp), new String[]{\"_pdl-datastream._tcp\",\"_ipp._tcp\", \"_ipps._tcp\"});\r\n }\r\n\r\n @Override\r\n public boolean matchesCriteria(String vendor, NsdServiceInfo nsdServiceInfo) {\r\n if (!TextUtils.equals(vendor, mVendorInfo.mVendorID)) return false;\r\n\r\n String pdls = MDnsUtils.getString(nsdServiceInfo.getAttributes().get(PDL_ATTRIBUTE));\r\n boolean hasMobileSupport = TextUtils.equals(ATTRIBUTE_VALUE__TRUE, MDnsUtils.getString(nsdServiceInfo.getAttributes().get(MDNS_ATTRIBUTE__HPLFMOBILEPRINTER)));\r\n\r\n return (((hasMobileSupport || isPrintSupported(MDnsUtils.getString(nsdServiceInfo.getAttributes().get(MDNS_ATTRIBUTE__TY))))\r\n &&!TextUtils.isEmpty(pdls))\r\n && (pdls.contains(PDL__PCL)\r\n || pdls.contains(PDL__PDF)\r\n || pdls.contains(PDL__PCLM)\r\n || pdls.contains(PDL__PWG_RASTER)));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6973628997802734, "alphanum_fraction": 0.7028045058250427, "avg_line_length": 37.81666564941406, "blob_id": "9cdf551a73fb08ca7491b43789ecbc6fe18afc74", "content_id": "26b8d6c6b83191f05e5b327ea2488c1b6e207e91", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2389, "license_type": "permissive", "max_line_length": 88, "num_lines": 60, "path": "/packages/SystemUI/src/com/android/systemui/statusbar/KeyboardShortcutAppItemLayout.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.systemui.statusbar;\r\n\r\nimport android.content.Context;\r\nimport android.util.AttributeSet;\r\nimport android.view.View;\r\nimport android.widget.ImageView;\r\nimport android.widget.RelativeLayout;\r\nimport android.widget.TextView;\r\n\r\nimport com.android.systemui.R;\r\n\r\n/**\r\n * Layout used for displaying keyboard shortcut items inside an alert dialog.\r\n * The layout sets the maxWidth of shortcuts keyword textview to 70% of available space.\r\n */\r\npublic class KeyboardShortcutAppItemLayout extends RelativeLayout {\r\n\r\n private static final double MAX_WIDTH_PERCENT_FOR_KEYWORDS = 0.70;\r\n\r\n public KeyboardShortcutAppItemLayout(Context context) {\r\n super(context);\r\n }\r\n\r\n public KeyboardShortcutAppItemLayout(Context context, AttributeSet attrs) {\r\n super(context, attrs);\r\n }\r\n\r\n @Override\r\n protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {\r\n if (MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.EXACTLY) {\r\n ImageView shortcutIcon = findViewById(R.id.keyboard_shortcuts_icon);\r\n TextView shortcutKeyword = findViewById(R.id.keyboard_shortcuts_keyword);\r\n int totalMeasuredWidth = MeasureSpec.getSize(widthMeasureSpec);\r\n int totalPadding = getPaddingLeft() + getPaddingRight();\r\n int availableWidth = totalMeasuredWidth - totalPadding;\r\n if (shortcutIcon.getVisibility() == View.VISIBLE) {\r\n availableWidth = availableWidth - shortcutIcon.getMeasuredWidth();\r\n }\r\n shortcutKeyword.setMaxWidth((int)\r\n Math.round(availableWidth * MAX_WIDTH_PERCENT_FOR_KEYWORDS));\r\n }\r\n super.onMeasure(widthMeasureSpec, heightMeasureSpec);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6099832653999329, "alphanum_fraction": 0.6118939518928528, "avg_line_length": 41.164947509765625, "blob_id": "8a22dd7820ac1f62470bca7774947356e6e484f5", "content_id": "795b01d8bb6ee20fc165b57dcfb432ef522a5c05", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 4187, "license_type": "permissive", "max_line_length": 97, "num_lines": 97, "path": "/media/mca/filterfw/jni/jni_gl_environment.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_GL_ENVIRONMENT_H\r\n#define ANDROID_FILTERFW_JNI_GL_ENVIRONMENT_H\r\n\r\n#include <jni.h>\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeAllocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeDeallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeInitWithNewContext(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeInitWithCurrentContext(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeIsActive(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeIsContextActive(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeIsAnyContextActive(JNIEnv* env, jclass clazz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeActivate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeDeactivate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeSwapBuffers(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeAddSurface(JNIEnv* env,\r\n jobject thiz,\r\n jobject surface);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeAddSurfaceWidthHeight(JNIEnv* env,\r\n jobject thiz,\r\n jobject surface,\r\n jint width,\r\n jint height);\r\n\r\n// The call to hook up the SurfaceMediaSource (in MediaServer) to the GL.\r\n// We get a sp<ISurfaceTexure> from the MediaServer and talks to MediaServer\r\n// over a binder interface. GL hooked up to the MediaServer by using the native\r\n// window created using the <IGraphicBufferProducer> handle\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeAddSurfaceFromMediaRecorder(\r\n JNIEnv* env,\r\n jobject thiz,\r\n jobject mediarecorder);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeActivateSurfaceId(JNIEnv* env,\r\n jobject thiz,\r\n jint surfaceId);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeRemoveSurfaceId(JNIEnv* env,\r\n jobject thiz,\r\n jint surfaceId);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLEnvironment_nativeSetSurfaceTimestamp(JNIEnv* env,\r\n jobject thiz,\r\n jlong timestamp);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // ANDROID_FILTERFW_JNI_GL_ENVIRONMENT_H\r\n" }, { "alpha_fraction": 0.7224080562591553, "alphanum_fraction": 0.7224080562591553, "avg_line_length": 19.35714340209961, "blob_id": "c6f651263168f36d76ccfa1e57f53f12f0822f79", "content_id": "a9c9512438db79623ca0f81fc9c64c993568077b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 299, "license_type": "permissive", "max_line_length": 55, "num_lines": 14, "path": "/packages/overlays/DisplayCutoutEmulationHoleOverlay/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "LOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_RRO_THEME := DisplayCutoutEmulationHole\r\n\r\n\r\nLOCAL_PRODUCT_MODULE := true\r\n\r\nLOCAL_RESOURCE_DIR := $(LOCAL_PATH)/res\r\n\r\nLOCAL_PACKAGE_NAME := DisplayCutoutEmulationHoleOverlay\r\nLOCAL_SDK_VERSION := current\r\n\r\ninclude $(BUILD_RRO_PACKAGE)\r\n" }, { "alpha_fraction": 0.8363954424858093, "alphanum_fraction": 0.8363954424858093, "avg_line_length": 34.870967864990234, "blob_id": "06e92b87df023ff6a8c6b07834f91d1e0c65d172", "content_id": "aa0a248eef1f60557374a0dd13263dcb7d2fdd71", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1143, "license_type": "permissive", "max_line_length": 63, "num_lines": 31, "path": "/libs/hwui/jni/BitmapFactory.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#ifndef _ANDROID_GRAPHICS_BITMAP_FACTORY_H_\r\n#define _ANDROID_GRAPHICS_BITMAP_FACTORY_H_\r\n\r\n#include \"GraphicsJNI.h\"\r\n#include \"SkEncodedImageFormat.h\"\r\n\r\nextern jclass gOptions_class;\r\nextern jfieldID gOptions_justBoundsFieldID;\r\nextern jfieldID gOptions_sampleSizeFieldID;\r\nextern jfieldID gOptions_configFieldID;\r\nextern jfieldID gOptions_colorSpaceFieldID;\r\nextern jfieldID gOptions_premultipliedFieldID;\r\nextern jfieldID gOptions_ditherFieldID;\r\nextern jfieldID gOptions_purgeableFieldID;\r\nextern jfieldID gOptions_shareableFieldID;\r\nextern jfieldID gOptions_nativeAllocFieldID;\r\nextern jfieldID gOptions_preferQualityOverSpeedFieldID;\r\nextern jfieldID gOptions_widthFieldID;\r\nextern jfieldID gOptions_heightFieldID;\r\nextern jfieldID gOptions_mimeFieldID;\r\nextern jfieldID gOptions_outConfigFieldID;\r\nextern jfieldID gOptions_outColorSpaceFieldID;\r\nextern jfieldID gOptions_mCancelID;\r\nextern jfieldID gOptions_bitmapFieldID;\r\n\r\nextern jclass gBitmapConfig_class;\r\nextern jmethodID gBitmapConfig_nativeToConfigMethodID;\r\n\r\njstring getMimeTypeAsJavaString(JNIEnv*, SkEncodedImageFormat);\r\n\r\n#endif // _ANDROID_GRAPHICS_BITMAP_FACTORY_H_\r\n" }, { "alpha_fraction": 0.628104031085968, "alphanum_fraction": 0.6304690837860107, "avg_line_length": 32.75342559814453, "blob_id": "7acabd1ed04c2ad57b0aa64bae00bc5f3b74d3f6", "content_id": "09a1309b14093c92b94d2215fb40b86359aa6458", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5074, "license_type": "permissive", "max_line_length": 98, "num_lines": 146, "path": "/media/tests/CameraBrowser/src/com/android/camerabrowser/ObjectBrowser.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.camerabrowser;\r\n\r\nimport android.app.ListActivity;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.database.Cursor;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.mtp.MtpConstants;\r\nimport android.mtp.MtpDevice;\r\nimport android.mtp.MtpObjectInfo;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.LayoutInflater;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\nimport android.widget.BaseAdapter;\r\nimport android.widget.ImageView;\r\nimport android.widget.ListView;\r\nimport android.widget.TextView;\r\n\r\nimport java.util.List;\r\n\r\n /**\r\n * A list view displaying all objects within a container (folder or storage unit).\r\n */\r\npublic class ObjectBrowser extends ListActivity {\r\n\r\n private static final String TAG = \"ObjectBrowser\";\r\n\r\n private MtpClient mClient;\r\n private List<MtpObjectInfo> mObjectList;\r\n private String mDeviceName;\r\n private int mStorageID;\r\n private int mObjectID;\r\n private DeviceDisconnectedReceiver mDisconnectedReceiver;\r\n\r\n private class ObjectAdapter extends BaseAdapter {\r\n private final Context mContext;\r\n private final LayoutInflater mInflater;\r\n\r\n public ObjectAdapter(Context c) {\r\n mContext = c;\r\n mInflater = (LayoutInflater)c.getSystemService(Context.LAYOUT_INFLATER_SERVICE);\r\n }\r\n\r\n public int getCount() {\r\n if (mObjectList == null) {\r\n return 0;\r\n } else {\r\n return mObjectList.size();\r\n }\r\n }\r\n\r\n public Object getItem(int position) {\r\n return mObjectList.get(position);\r\n }\r\n\r\n public long getItemId(int position) {\r\n return position;\r\n }\r\n\r\n public View getView(int position, View convertView, ViewGroup parent) {\r\n View view;\r\n if (convertView == null) {\r\n view = mInflater.inflate(R.layout.object_list, parent, false);\r\n } else {\r\n view = convertView;\r\n }\r\n\r\n TextView nameView = (TextView)view.findViewById(R.id.name);\r\n MtpObjectInfo info = mObjectList.get(position);\r\n nameView.setText(info.getName());\r\n\r\n int thumbFormat = info.getThumbFormat();\r\n if (thumbFormat == MtpConstants.FORMAT_EXIF_JPEG\r\n || thumbFormat == MtpConstants.FORMAT_JFIF) {\r\n byte[] thumbnail = mClient.getThumbnail(mDeviceName, info.getObjectHandle());\r\n if (thumbnail != null) {\r\n Bitmap bitmap = BitmapFactory.decodeByteArray(thumbnail, 0, thumbnail.length);\r\n if (bitmap != null) {\r\n ImageView thumbView = (ImageView)view.findViewById(R.id.thumbnail);\r\n thumbView.setImageBitmap(bitmap);\r\n }\r\n }\r\n }\r\n return view;\r\n }\r\n }\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n mClient = ((CameraBrowserApplication)getApplication()).getMtpClient();\r\n mDeviceName = getIntent().getStringExtra(\"device\");\r\n mStorageID = getIntent().getIntExtra(\"storage\", 0);\r\n mObjectID = getIntent().getIntExtra(\"object\", 0);\r\n mDisconnectedReceiver = new DeviceDisconnectedReceiver(this, mDeviceName);\r\n }\r\n\r\n @Override\r\n protected void onResume() {\r\n super.onResume();\r\n\r\n mObjectList = mClient.getObjectList(mDeviceName, mStorageID, mObjectID);\r\n setListAdapter(new ObjectAdapter(this));\r\n }\r\n\r\n @Override\r\n protected void onDestroy() {\r\n unregisterReceiver(mDisconnectedReceiver);\r\n super.onDestroy();\r\n }\r\n\r\n @Override\r\n protected void onListItemClick(ListView l, View v, int position, long id) {\r\n MtpObjectInfo info = mObjectList.get(position);\r\n Intent intent;\r\n if (info.getFormat() == MtpConstants.FORMAT_ASSOCIATION) {\r\n intent = new Intent(this, ObjectBrowser.class);\r\n } else {\r\n intent = new Intent(this, ObjectViewer.class);\r\n }\r\n intent.putExtra(\"device\", mDeviceName);\r\n intent.putExtra(\"storage\", mStorageID);\r\n intent.putExtra(\"object\", info.getObjectHandle());\r\n startActivity(intent);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.702001690864563, "alphanum_fraction": 0.7060896754264832, "avg_line_length": 38.306819915771484, "blob_id": "dc243fc099e494fddf4a0a77fe71247b169b5d71", "content_id": "fc04b31b146480451395db3c7dd5d4b573ba142b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7094, "license_type": "permissive", "max_line_length": 100, "num_lines": 176, "path": "/packages/CarSystemUI/tests/src/com/android/systemui/car/userswitcher/UserSwitchTransitionViewControllerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.car.userswitcher;\r\n\r\nimport static org.mockito.ArgumentMatchers.any;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.never;\r\nimport static org.mockito.Mockito.reset;\r\nimport static org.mockito.Mockito.verify;\r\n\r\nimport android.content.Context;\r\nimport android.content.res.Resources;\r\nimport android.os.Handler;\r\nimport android.os.UserManager;\r\nimport android.test.suitebuilder.annotation.SmallTest;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\nimport android.testing.TestableResources;\r\nimport android.view.IWindowManager;\r\nimport android.view.LayoutInflater;\r\nimport android.view.ViewGroup;\r\n\r\nimport com.android.systemui.R;\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.car.window.OverlayViewGlobalStateController;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@RunWith(AndroidTestingRunner.class)\r\[email protected]\r\n@SmallTest\r\npublic class UserSwitchTransitionViewControllerTest extends SysuiTestCase {\r\n private static final int TEST_USER_1 = 100;\r\n private static final int TEST_USER_2 = 110;\r\n\r\n private TestableUserSwitchTransitionViewController mCarUserSwitchingDialogController;\r\n private TestableResources mTestableResources;\r\n @Mock\r\n private OverlayViewGlobalStateController mOverlayViewGlobalStateController;\r\n @Mock\r\n private IWindowManager mWindowManagerService;\r\n\r\n @Before\r\n public void setUp() {\r\n MockitoAnnotations.initMocks(this);\r\n mTestableResources = mContext.getOrCreateTestableResources();\r\n mCarUserSwitchingDialogController = new TestableUserSwitchTransitionViewController(\r\n mContext,\r\n Handler.getMain(),\r\n mTestableResources.getResources(),\r\n (UserManager) mContext.getSystemService(Context.USER_SERVICE),\r\n mWindowManagerService,\r\n mOverlayViewGlobalStateController\r\n );\r\n\r\n mCarUserSwitchingDialogController.inflate((ViewGroup) LayoutInflater.from(mContext).inflate(\r\n R.layout.sysui_overlay_window, /* root= */ null));\r\n }\r\n\r\n @Test\r\n public void onHandleShow_newUserSelected_showsDialog() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n\r\n verify(mOverlayViewGlobalStateController).showView(eq(mCarUserSwitchingDialogController),\r\n any());\r\n }\r\n\r\n @Test\r\n public void onHandleShow_alreadyShowing_ignoresRequest() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_2);\r\n\r\n // Verify that the request was processed only once.\r\n verify(mOverlayViewGlobalStateController).showView(eq(mCarUserSwitchingDialogController),\r\n any());\r\n }\r\n\r\n @Test\r\n public void onHandleShow_sameUserSelected_ignoresRequest() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n mCarUserSwitchingDialogController.handleHide();\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n\r\n // Verify that the request was processed only once.\r\n verify(mOverlayViewGlobalStateController).showView(eq(mCarUserSwitchingDialogController),\r\n any());\r\n }\r\n\r\n @Test\r\n public void onHide_currentlyShowing_hidesDialog() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n mCarUserSwitchingDialogController.handleHide();\r\n\r\n verify(mOverlayViewGlobalStateController).hideView(eq(mCarUserSwitchingDialogController),\r\n any());\r\n }\r\n\r\n @Test\r\n public void onHide_notShowing_ignoresRequest() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n mCarUserSwitchingDialogController.handleHide();\r\n mCarUserSwitchingDialogController.handleHide();\r\n\r\n // Verify that the request was processed only once.\r\n verify(mOverlayViewGlobalStateController).hideView(eq(mCarUserSwitchingDialogController),\r\n any());\r\n }\r\n\r\n @Test\r\n public void onWindowShownTimeoutPassed_viewNotHidden_hidesUserSwitchTransitionView() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n reset(mOverlayViewGlobalStateController);\r\n\r\n getContext().getMainThreadHandler().postDelayed(() -> {\r\n verify(mOverlayViewGlobalStateController).hideView(\r\n eq(mCarUserSwitchingDialogController), any());\r\n }, mCarUserSwitchingDialogController.getWindowShownTimeoutMs() + 10);\r\n }\r\n\r\n @Test\r\n public void onWindowShownTimeoutPassed_viewHidden_doesNotHideUserSwitchTransitionViewAgain() {\r\n mCarUserSwitchingDialogController.handleShow(/* currentUserId= */ TEST_USER_1);\r\n mCarUserSwitchingDialogController.handleHide();\r\n reset(mOverlayViewGlobalStateController);\r\n\r\n getContext().getMainThreadHandler().postDelayed(() -> {\r\n verify(mOverlayViewGlobalStateController, never()).hideView(\r\n eq(mCarUserSwitchingDialogController), any());\r\n }, mCarUserSwitchingDialogController.getWindowShownTimeoutMs() + 10);\r\n }\r\n\r\n private final class TestableUserSwitchTransitionViewController extends\r\n UserSwitchTransitionViewController {\r\n\r\n private final Handler mHandler;\r\n\r\n TestableUserSwitchTransitionViewController(Context context, Handler handler,\r\n Resources resources, UserManager userManager,\r\n IWindowManager windowManagerService,\r\n OverlayViewGlobalStateController overlayViewGlobalStateController) {\r\n super(context, handler, resources, userManager, windowManagerService,\r\n overlayViewGlobalStateController);\r\n mHandler = handler;\r\n }\r\n\r\n @Override\r\n public void handleShow(int currentUserId) {\r\n super.handleShow(currentUserId);\r\n waitForIdleSync(mHandler);\r\n }\r\n\r\n @Override\r\n public void handleHide() {\r\n super.handleHide();\r\n waitForIdleSync(mHandler);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6555915474891663, "alphanum_fraction": 0.6620745658874512, "avg_line_length": 36.5, "blob_id": "683cba509c66ba100ececd9d9cd0155c7c59be70", "content_id": "313bd252d3b0613160dc13d1476fd4ea7f75de89", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1234, "license_type": "permissive", "max_line_length": 77, "num_lines": 32, "path": "/graphics/java/android/graphics/ComposePathEffect.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2007 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.graphics;\r\n\r\npublic class ComposePathEffect extends PathEffect {\r\n\r\n /**\r\n * Construct a PathEffect whose effect is to apply first the inner effect\r\n * and the the outer pathEffect (e.g. outer(inner(path))).\r\n */\r\n public ComposePathEffect(PathEffect outerpe, PathEffect innerpe) {\r\n native_instance = nativeCreate(outerpe.native_instance,\r\n innerpe.native_instance);\r\n }\r\n \r\n private static native long nativeCreate(long nativeOuterpe,\r\n long nativeInnerpe);\r\n}\r\n\r\n" }, { "alpha_fraction": 0.47852423787117004, "alphanum_fraction": 0.7301762104034424, "avg_line_length": 38.35555648803711, "blob_id": "cb075196edc36804f8cd1f1c84823aa73315eccd", "content_id": "ab23fdd181ff4a0c05f089533f2d59175af5580f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1826, "license_type": "permissive", "max_line_length": 80, "num_lines": 45, "path": "/libs/hwui/tests/scripts/prep_marlfish.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#marlfish is marlin & sailfish (☞゚ヮ゚)☞\r\n\r\ncpubase=/sys/devices/system/cpu\r\n\r\nadb root\r\nadb wait-for-device\r\nadb shell stop thermal-engine\r\nadb shell stop perfd\r\n\r\n# silver cores\r\n#307200 384000 460800 537600 614400 691200 768000 844800 902400 979200\r\n#1056000 1132800 1209600 1286400 1363200 1440000 1516800 1593600\r\n# gold cores\r\n#307200 384000 460800 537600 614400 691200 748800 825600 902400 979200\r\n#1056000 1132800 1209600 1286400 1363200 1440000 1516800 1593600 1670400\r\n#1747200 1824000 1900800 1977600 2054400 2150400\r\n\r\nS=979200\r\ncpu=0\r\n# Changing governor and frequency in one core will be automatically applied\r\n# to other cores in the cluster\r\nwhile [ $((cpu < 3)) -eq 1 ]; do\r\n adb shell \"echo userspace > $cpubase/cpu2/cpufreq/scaling_governor\"\r\n echo \"Setting cpu ${cpu} & $(($cpu + 1)) cluster to $S hz\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_max_freq\"\r\n adb shell \"echo $S > $cpubase/cpu${cpu}/cpufreq/scaling_min_freq\"\r\n cpu=$(($cpu + 2))\r\ndone\r\n\r\necho \"setting GPU bus and idle timer\"\r\nadb shell \"echo 0 > /sys/class/kgsl/kgsl-3d0/bus_split\"\r\nadb shell \"echo 1 > /sys/class/kgsl/kgsl-3d0/force_clk_on\"\r\nadb shell \"echo 10000 > /sys/class/kgsl/kgsl-3d0/idle_timer\"\r\n\r\n#0 762 1144 1525 2288 3143 4173 5195 5859 7759 9887 11863 13763\r\nadb shell \"echo 13763 > /sys/class/devfreq/soc:qcom,gpubw/min_freq\" &> /dev/null\r\n\r\n#133000000 214000000 315000000 401800000 510000000 560000000 624000000\r\necho \"performance mode, 315 MHz\"\r\nadb shell \"echo performance > /sys/class/kgsl/kgsl-3d0/devfreq/governor\"\r\nadb shell \"echo 315000000 > /sys/class/kgsl/kgsl-3d0/devfreq/min_freq\"\r\nadb shell \"echo 315000000 > /sys/class/kgsl/kgsl-3d0/devfreq/max_freq\"\r\n\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/min_pwrlevel\"\r\nadb shell \"echo 4 > /sys/class/kgsl/kgsl-3d0/max_pwrlevel\"\r\n" }, { "alpha_fraction": 0.7506352066993713, "alphanum_fraction": 0.7526316046714783, "avg_line_length": 38.814815521240234, "blob_id": "a6c609608ec09bce904e7d042c2e8c76ca3d0190", "content_id": "a00da9020ff8566c2d5cf64a93d6102bc5c96dfc", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5510, "license_type": "permissive", "max_line_length": 100, "num_lines": 135, "path": "/core/java/android/os/Users.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<!--\r\n Copyright (C) 2020 The Android Open Source Project\r\n\r\n Licensed under the Apache License, Version 2.0 (the \"License\");\r\n you may not use this file except in compliance with the License.\r\n You may obtain a copy of the License at\r\n\r\n http://www.apache.org/licenses/LICENSE-2.0\r\n\r\n Unless required by applicable law or agreed to in writing, software\r\n distributed under the License is distributed on an \"AS IS\" BASIS,\r\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n See the License for the specific language governing permissions and\r\n limitations under the License\r\n -->\r\n\r\n# Users for system developers\r\n\r\n## Concepts\r\n\r\n### Users and profiles\r\n\r\n#### User\r\n\r\nA user is a representation of a person using a device, with their own distinct application data\r\nand some unique settings. Throughout this document, the word 'user' will be used in this technical\r\nsense, i.e. for this virtual environment, whereas the word 'person' will be used to denote an actual\r\nhuman interacting with the device.\r\n\r\nEach user has a separate [`userId`](#int-userid).\r\n\r\n#### Profile Group\r\n\r\nOften, there is a 1-to-1 mapping of people who use a device to 'users'; e.g. there may be two users\r\non a device - the owner and a guest, each with their own separate home screen.\r\n\r\nHowever, Android also supports multiple profiles for a single person, e.g. one for their private\r\nlife and one for work, both sharing a single home screen.\r\nEach profile in a profile group is a distinct user, with a unique [`userId`](#int-userid), and have\r\na different set of apps and accounts,\r\nbut they share a single UI, single launcher, and single wallpaper.\r\nAll profiles of a profile group can be active at the same time.\r\n\r\nYou can list the profiles of a user via `UserManager#getEnabledProfiles` (you usually don't deal \r\nwith disabled profiles)\r\n\r\n#### Parent user\r\n\r\nThe main user of a profile group, to which the other profiles of the group 'belong'.\r\nThis is usually the personal (as opposed to work) profile. Get this via\r\n`UserManager#getProfileParent` (returns `null` if the user does not have profiles).\r\n\r\n#### Profile (Managed profile)\r\n\r\nA profile of the parent user, i.e. a profile belonging to the same profile group as a parent user,\r\nwith whom they share a single home screen.\r\nCurrently, the only type of profile supported in AOSP is a 'Managed Profile'.\r\nThe name comes from the fact that these profiles are usually\r\nmanaged by a device policy controller app. You can create a managed profile from within the device\r\npolicy controller app on your phone.\r\n\r\nNote that, as a member of the profile group, the parent user may sometimes also be considered a\r\n'profile', but generally speaking, the word 'profile' denotes a user that is subordinate to a\r\nparent.\r\n\r\n#### Foreground user vs background user\r\n\r\nOnly a single user can be in the foreground.\r\nThis is the user with whom the person using the device is currently interacting, or, in the case\r\nof profiles, the parent profile of this user.\r\nAll other running users are background users.\r\nSome users may not be running at all, neither in the foreground nor the background.\r\n\r\n#### Account\r\n\r\nAn account of a user with a (usually internet based) service. E.g. [email protected] or\r\[email protected]. Each user can have multiple accounts. A user does not have to have a\r\naccount.\r\n\r\n#### System User\r\n\r\nThe user with [`userId`](#int-userid) 0 denotes the system user, which is always required to be\r\nrunning.\r\n\r\nOn most devices, the system user is also used by the primary person using the device; however,\r\non certain types of devices, the system user may be a stand-alone user, not intended for direct\r\nhuman interaction.\r\n\r\n## Data types\r\n\r\n### int userId\r\n\r\nThe id of a user. List all users via `adb shell dumpsys user`.\r\nIn code, these are sometimes marked as `@UserIdInt`.\r\n\r\n### int uid\r\n\r\nIdentity of an app. This is the same as a Linux uid, but in Android there is one uid per package,\r\nper user.\r\n\r\nIt is highly discouraged, but uids can be shared between multiple packages using the\r\n`android:sharedUserId` manifest attribute.\r\n\r\n### class UserHandle\r\n\r\nA wrapper for userId. Used esp. in public APIs instead of `int userId` as it clearly distinguishes\r\nfrom uid.\r\n\r\n## Security model\r\n\r\nMultiple packages can share an uid by using `android:sharedUserId` manifest attribute. If packages\r\nshare a uid they can run in the same process via `android:process` manifest attribute. Further file\r\nlevel access is also tracked by uid. Hence any security or privacy mechanism needs to be built on\r\na uid granularity.\r\n\r\nOn the other hand apps belonging to the same user cannot see each others files. They can only\r\ninteract via activity launches, broadcasts, providers, and service bindings. All of them can be be\r\nprotected by [permissions](../permission/Permissions.md). Hence any new general communication\r\nmechanism should be access controlled by permissions.\r\n\r\n## Lifecycle\r\n\r\nA system service should deal with users being started and stopped by overriding\r\n`SystemService.onSwitchUser` and `SystemService.onStopUser`.\r\n\r\nIf a user become inactive the system should stop all apps of this user from interacting\r\nwith other apps or the system.\r\n\r\nAnother important lifecycle event is `onUnlockUser`. Only for an unlocked user can you access\r\nall data, e.g. which packages are installed.\r\n\r\nYou only want to deal with user profiles that\r\n\r\n- are in the profile group of the foreground user\r\n- the user profile is unlocked and not yet stopped\r\n" }, { "alpha_fraction": 0.67132568359375, "alphanum_fraction": 0.6802202463150024, "avg_line_length": 32.72058868408203, "blob_id": "9220cc75cb210b22ce59f97dd0369dc3d4b1a80c", "content_id": "ce7a415228e8d445a6813af176000535f5b42591", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2361, "license_type": "permissive", "max_line_length": 93, "num_lines": 68, "path": "/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableSize.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage android.hardware.camera2.marshal.impl;\r\n\r\nimport android.util.Size;\r\nimport android.hardware.camera2.marshal.Marshaler;\r\nimport android.hardware.camera2.marshal.MarshalQueryable;\r\nimport android.hardware.camera2.utils.TypeReference;\r\n\r\nimport static android.hardware.camera2.impl.CameraMetadataNative.*;\r\nimport static android.hardware.camera2.marshal.MarshalHelpers.*;\r\n\r\nimport java.nio.ByteBuffer;\r\n\r\n/**\r\n * Marshal {@link Size} to/from {@code TYPE_INT32}\r\n */\r\npublic class MarshalQueryableSize implements MarshalQueryable<Size> {\r\n private static final int SIZE = SIZEOF_INT32 * 2;\r\n\r\n private class MarshalerSize extends Marshaler<Size> {\r\n protected MarshalerSize(TypeReference<Size> typeReference, int nativeType) {\r\n super(MarshalQueryableSize.this, typeReference, nativeType);\r\n }\r\n\r\n @Override\r\n public void marshal(Size value, ByteBuffer buffer) {\r\n buffer.putInt(value.getWidth());\r\n buffer.putInt(value.getHeight());\r\n }\r\n\r\n @Override\r\n public Size unmarshal(ByteBuffer buffer) {\r\n int width = buffer.getInt();\r\n int height = buffer.getInt();\r\n\r\n return new Size(width, height);\r\n }\r\n\r\n @Override\r\n public int getNativeSize() {\r\n return SIZE;\r\n }\r\n }\r\n\r\n @Override\r\n public Marshaler<Size> createMarshaler(TypeReference<Size> managedType, int nativeType) {\r\n return new MarshalerSize(managedType, nativeType);\r\n }\r\n\r\n @Override\r\n public boolean isTypeMappingSupported(TypeReference<Size> managedType, int nativeType) {\r\n return nativeType == TYPE_INT32 && (Size.class.equals(managedType.getType()));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7575757503509521, "alphanum_fraction": 0.7575757503509521, "avg_line_length": 42, "blob_id": "f4d51a8161d798a200721041725e6ca75794e2c0", "content_id": "01c40f704f2d472589debc6273e600376ae58190", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 132, "license_type": "permissive", "max_line_length": 66, "num_lines": 3, "path": "/cmds/input/input", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\nexport CLASSPATH=/system/framework/input.jar\r\nexec app_process /system/bin com.android.commands.input.Input \"$@\"\r\n" }, { "alpha_fraction": 0.6813483238220215, "alphanum_fraction": 0.6849437952041626, "avg_line_length": 33.31745910644531, "blob_id": "acfa370caaddf61f0a1f9e3abdcf001bde1058ad", "content_id": "f1de70a43e923ec3c3893a3c6affa492b9ad287b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2225, "license_type": "permissive", "max_line_length": 100, "num_lines": 63, "path": "/core/java/android/printservice/CustomPrinterIconCallback.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.printservice;\r\n\r\nimport android.annotation.NonNull;\r\nimport android.annotation.Nullable;\r\nimport android.graphics.drawable.Icon;\r\nimport android.os.RemoteException;\r\nimport android.print.PrinterId;\r\nimport android.util.Log;\r\n\r\n\r\n/**\r\n * Callback for {@link PrinterDiscoverySession#onRequestCustomPrinterIcon}.\r\n */\r\npublic final class CustomPrinterIconCallback {\r\n /** The printer the call back is for */\r\n private final @NonNull PrinterId mPrinterId;\r\n private final @NonNull IPrintServiceClient mObserver;\r\n private static final String LOG_TAG = \"CustomPrinterIconCB\";\r\n\r\n /**\r\n * Create a callback class to be used once a icon is loaded\r\n *\r\n * @param printerId The printer the icon should be loaded for\r\n * @param observer The observer that needs to be notified about the update.\r\n */\r\n CustomPrinterIconCallback(@NonNull PrinterId printerId, @NonNull IPrintServiceClient observer) {\r\n mPrinterId = printerId;\r\n mObserver = observer;\r\n }\r\n\r\n /**\r\n * Provide a new icon for a printer. Can be called more than once to update the icon.\r\n *\r\n * @param icon The new icon for the printer or null to unset the current icon\r\n * @return true iff the icon could be updated\r\n */\r\n public boolean onCustomPrinterIconLoaded(@Nullable Icon icon) {\r\n try {\r\n mObserver.onCustomPrinterIconLoaded(mPrinterId, icon);\r\n } catch (RemoteException e) {\r\n Log.e(LOG_TAG , \"Could not update icon\", e);\r\n return false;\r\n }\r\n\r\n return true;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7543478012084961, "alphanum_fraction": 0.7543478012084961, "avg_line_length": 44, "blob_id": "0d5462fa452b1fecf7ed00014ea76fbf86fdd92c", "content_id": "2bc74f5b24a9fdbfa688c940a1d7c22f2fd58143", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 460, "license_type": "permissive", "max_line_length": 90, "num_lines": 10, "path": "/graphics/java/android/graphics/drawable/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<HTML>\r\n<BODY>\r\n<p>Provides classes to manage a variety of visual elements that are intended for\r\ndisplay only, such as bitmaps and gradients. These elements are often used\r\nby widgets as background images or simply as indicators (for example, a volume\r\nlevel indicator).</p>\r\n<p>You can create most of these drawables using XML, as described in <a\r\nhref=\"{@docRoot}guide/topics/resources/drawable-resource.html\">Drawable Resources</a>.</p>\r\n</BODY>\r\n</HTML>\r\n" }, { "alpha_fraction": 0.7173610329627991, "alphanum_fraction": 0.7191338539123535, "avg_line_length": 40.22994613647461, "blob_id": "23731261248d09d7ec7ba600edb9e14d14af06d4", "content_id": "5f2bc2de7e1dc215ca49d13886fee5ac159d8b5c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7897, "license_type": "permissive", "max_line_length": 96, "num_lines": 187, "path": "/packages/CarSystemUI/tests/src/com/android/systemui/car/navigationbar/ButtonRoleHolderControllerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.car.navigationbar;\r\n\r\nimport static com.google.common.truth.Truth.assertThat;\r\n\r\nimport static org.mockito.ArgumentMatchers.any;\r\nimport static org.mockito.ArgumentMatchers.anyInt;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.doReturn;\r\nimport static org.mockito.Mockito.doThrow;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.app.role.RoleManager;\r\nimport android.content.pm.ApplicationInfo;\r\nimport android.content.pm.PackageManager;\r\nimport android.graphics.drawable.Drawable;\r\nimport android.os.UserHandle;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\nimport android.view.LayoutInflater;\r\nimport android.widget.LinearLayout;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.car.CarDeviceProvisionedController;\r\nimport com.android.systemui.tests.R;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\nimport java.util.List;\r\n\r\n@RunWith(AndroidTestingRunner.class)\r\[email protected]\r\n@SmallTest\r\npublic class ButtonRoleHolderControllerTest extends SysuiTestCase {\r\n private static final String TEST_VALID_PACKAGE_NAME = \"foo\";\r\n private static final String TEST_INVALID_PACKAGE_NAME = \"bar\";\r\n private static final UserHandle TEST_CURRENT_USER = UserHandle.of(100);\r\n private static final UserHandle TEST_NON_CURRENT_USER = UserHandle.of(101);\r\n\r\n private LinearLayout mTestView;\r\n private CarNavigationButton mNavButtonDefaultAppIconForRoleWithEnabled;\r\n private CarNavigationButton mNavButtonDefaultAppIconForRoleWithDisabled;\r\n private ButtonRoleHolderController mControllerUnderTest;\r\n private Drawable mAppIcon;\r\n\r\n @Mock\r\n private RoleManager mRoleManager;\r\n @Mock\r\n private CarDeviceProvisionedController mDeviceProvisionedController;\r\n @Mock\r\n private PackageManager mPackageManager;\r\n @Mock\r\n private ApplicationInfo mApplicationInfo;\r\n\r\n @Before\r\n public void setUp() throws PackageManager.NameNotFoundException {\r\n MockitoAnnotations.initMocks(this);\r\n\r\n mTestView = (LinearLayout) LayoutInflater.from(mContext).inflate(\r\n R.layout.button_role_holder_controller_test, /* root= */ null);\r\n mNavButtonDefaultAppIconForRoleWithEnabled = mTestView\r\n .findViewById(R.id.assistant_role_button);\r\n mNavButtonDefaultAppIconForRoleWithDisabled = mTestView\r\n .findViewById(R.id.assistant_role_disabled_button);\r\n mAppIcon = mContext.getDrawable(R.drawable.car_ic_apps);\r\n when(mApplicationInfo.loadIcon(any())).thenReturn(mAppIcon);\r\n doThrow(new PackageManager.NameNotFoundException()).when(mPackageManager)\r\n .getApplicationInfo(any(), anyInt());\r\n doReturn(mApplicationInfo).when(mPackageManager)\r\n .getApplicationInfo(eq(TEST_VALID_PACKAGE_NAME), anyInt());\r\n when(mDeviceProvisionedController\r\n .getCurrentUser())\r\n .thenReturn(TEST_CURRENT_USER.getIdentifier());\r\n mControllerUnderTest = new ButtonRoleHolderController(mContext,\r\n mPackageManager, mRoleManager, mDeviceProvisionedController);\r\n }\r\n\r\n @Test\r\n public void addAllButtonsWithRoleName_roleAssigned_appIconEnabled_useAssignedAppIcon() {\r\n when(mRoleManager.getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_VALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon()).isEqualTo(mAppIcon);\r\n }\r\n\r\n @Test\r\n public void addAllButtonsWithRoleName_roleUnassigned_appIconEnabled_useDefaultIcon() {\r\n when(mRoleManager.getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(null);\r\n\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon()).isNull();\r\n }\r\n\r\n @Test\r\n public void onRoleChanged_currentUser_appIconEnabled_useAssignedAppIcon() {\r\n when(mRoleManager.getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(null);\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_VALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.onRoleChanged(RoleManager.ROLE_ASSISTANT, TEST_CURRENT_USER);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon()).isEqualTo(mAppIcon);\r\n }\r\n\r\n @Test\r\n public void onRoleChanged_nonCurrentUser_appIconEnabled_iconIsNotUpdated() {\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(null);\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n Drawable beforeIcon = mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon();\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_VALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.onRoleChanged(RoleManager.ROLE_ASSISTANT, TEST_NON_CURRENT_USER);\r\n\r\n Drawable afterIcon = mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon();\r\n assertThat(afterIcon).isEqualTo(beforeIcon);\r\n }\r\n\r\n @Test\r\n public void onRoleChanged_invalidPackage_useDefaultIcon() {\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_INVALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithEnabled.getAppIcon()).isNull();\r\n }\r\n\r\n @Test\r\n public void addAllButtonsWithRoleName_appIconDisabled_useDefaultIcon() {\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_VALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithDisabled.getAppIcon()).isNull();\r\n }\r\n\r\n @Test\r\n public void onRoleChanged_roleAssigned_appIconDisabled_useDefaultIcon() {\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(null);\r\n mControllerUnderTest.addAllButtonsWithRoleName(mTestView);\r\n assertThat(mNavButtonDefaultAppIconForRoleWithDisabled.getAppIcon()).isNull();\r\n when(mRoleManager\r\n .getRoleHoldersAsUser(eq(RoleManager.ROLE_ASSISTANT), any()))\r\n .thenReturn(List.of(TEST_VALID_PACKAGE_NAME));\r\n\r\n mControllerUnderTest.onRoleChanged(RoleManager.ROLE_ASSISTANT, TEST_CURRENT_USER);\r\n\r\n assertThat(mNavButtonDefaultAppIconForRoleWithDisabled.getAppIcon()).isNull();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6479690670967102, "alphanum_fraction": 0.6582849621772766, "avg_line_length": 30.3125, "blob_id": "098f2beac8882efb055646129beb8acd3b461fb9", "content_id": "62747533a7bf30b61ea036e2ee61349c2d7850b2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1551, "license_type": "permissive", "max_line_length": 75, "num_lines": 48, "path": "/libs/hwui/tests/microbench/LinearAllocatorBench.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include <benchmark/benchmark.h>\r\n\r\n#include \"utils/LinearAllocator.h\"\r\n\r\n#include <vector>\r\n\r\nusing namespace android;\r\nusing namespace android::uirenderer;\r\n\r\nstatic void BM_LinearStdAllocator_vectorBaseline(benchmark::State& state) {\r\n while (state.KeepRunning()) {\r\n std::vector<char> v;\r\n for (int j = 0; j < 200; j++) {\r\n v.push_back(j);\r\n }\r\n benchmark::DoNotOptimize(&v);\r\n }\r\n}\r\nBENCHMARK(BM_LinearStdAllocator_vectorBaseline);\r\n\r\nstatic void BM_LinearStdAllocator_vector(benchmark::State& state) {\r\n while (state.KeepRunning()) {\r\n LinearAllocator la;\r\n LinearStdAllocator<void*> stdAllocator(la);\r\n std::vector<char, LinearStdAllocator<char> > v(stdAllocator);\r\n for (int j = 0; j < 200; j++) {\r\n v.push_back(j);\r\n }\r\n benchmark::DoNotOptimize(&v);\r\n }\r\n}\r\nBENCHMARK(BM_LinearStdAllocator_vector);\r\n" }, { "alpha_fraction": 0.6400725245475769, "alphanum_fraction": 0.6409791707992554, "avg_line_length": 27.810810089111328, "blob_id": "a098e94cc3a193e8aa1ecb6bc86bb6f81f8437b0", "content_id": "a84c0a8012f91a29448291f83e1d51637bc2670c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1103, "license_type": "permissive", "max_line_length": 94, "num_lines": 37, "path": "/tests/StatusBar/src/com/android/statusbartest/TestAlertActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.statusbartest;\r\n\r\nimport android.app.Activity;\r\nimport android.app.NotificationManager;\r\nimport android.content.Intent;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\n\r\npublic class TestAlertActivity extends Activity {\r\n int mId;\r\n\r\n @Override\r\n public void onResume() {\r\n super.onResume();\r\n Log.d(\"StatusBarTest\", \"TestAlertActivity.onResume\");\r\n Intent intent = getIntent();\r\n mId = intent.getIntExtra(\"id\", -1);\r\n Log.d(\"StatusBarTest\", \"Remembering notification id=\" + mId);\r\n setContentView(R.layout.test_alert);\r\n }\r\n\r\n @Override\r\n public void onPause() {\r\n super.onPause();\r\n Log.d(\"StatusBarTest\", \"onPause: Canceling notification id=\" + mId);\r\n NotificationManager nm = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);\r\n nm.cancel(mId);\r\n finish();\r\n }\r\n\r\n @SuppressWarnings({\"UnusedDeclaration\"})\r\n public void dismiss(View v) {\r\n Log.d(\"StatusBarTest\", \"TestAlertActivity.dismiss\");\r\n finish();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6753709316253662, "alphanum_fraction": 0.6813056468963623, "avg_line_length": 34.630435943603516, "blob_id": "9ec5353a124948faf16501cc1596d7d52e14d90e", "content_id": "e5f5521fe097e4fd1a39f49a15f1119a097b7af5", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1685, "license_type": "permissive", "max_line_length": 80, "num_lines": 46, "path": "/tests/AmSlam/src/test/amslam/PingReceiver.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage test.amslam;\r\n\r\nimport android.app.Service;\r\nimport android.content.Intent;\r\nimport android.os.Handler;\r\nimport android.os.IBinder;\r\nimport android.os.SystemClock;\r\n\r\npublic class PingReceiver extends Service {\r\n\r\n @Override\r\n public int onStartCommand(Intent intent, int flags, int startId) {\r\n Intent response = new Intent(this, PongReceiver.class);\r\n response.putExtra(\"start_time\", intent.getLongExtra(\"start_time\", 0));\r\n response.putExtra(\"bounce_time\", SystemClock.uptimeMillis());\r\n response.putExtra(\"receiver\", getClass().getSimpleName());\r\n sendBroadcast(response);\r\n stopSelf();\r\n // If we exit before returning from onStartCommand the system will\r\n // think we crashed and attempt a re-delivery, which we don't want here.\r\n // Post'ing the kill deals with this just fine.\r\n new Handler().post(() -> System.exit(0));\r\n return START_NOT_STICKY;\r\n }\r\n\r\n @Override\r\n public IBinder onBind(Intent intent) {\r\n return null;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6271617412567139, "alphanum_fraction": 0.6312309503555298, "avg_line_length": 34.407405853271484, "blob_id": "770159f8d2759ed2701a62694ed713f9ceea3095", "content_id": "fa3323b6f1ea9edd1380dd2405b534d744503b5a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1966, "license_type": "permissive", "max_line_length": 96, "num_lines": 54, "path": "/media/mca/filterfw/jni/jni_vertex_frame.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_VERTEX_FRAME_H\r\n#define ANDROID_FILTERFW_JNI_VERTEX_FRAME_H\r\n\r\n#include <jni.h>\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_VertexFrame_nativeAllocate(JNIEnv* env, jobject thiz, jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_VertexFrame_nativeDeallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_VertexFrame_setNativeInts(JNIEnv* env, jobject thiz, jintArray ints);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_VertexFrame_setNativeFloats(JNIEnv* env,\r\n jobject thiz,\r\n jfloatArray floats);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_VertexFrame_setNativeData(JNIEnv* env,\r\n jobject thiz,\r\n jbyteArray data,\r\n jint offset,\r\n jint length);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_VertexFrame_getNativeVboId(JNIEnv* env, jobject thiz);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif // ANDROID_FILTERFW_JNI_VERTEX_FRAME_H\r\n" }, { "alpha_fraction": 0.6997036337852478, "alphanum_fraction": 0.7079354524612427, "avg_line_length": 31.0108699798584, "blob_id": "400ecc398c764d6bf66ba2c03f54e4ee2fe3df9e", "content_id": "6ff5735749a42c66adb4dfe73d655a8e98bb7f53", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3037, "license_type": "permissive", "max_line_length": 129, "num_lines": 92, "path": "/packages/SystemUI/docs/clock-plugins.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Clock Plugins\r\n\r\n## Introduction\r\n\r\nThe clock appearing on the lock screen and always on display (AOD) can be\r\ncustomized via the ClockPlugin plugin interface.\r\n\r\n## System Health\r\n\r\nClocks are high risk for battery consumption and screen burn-in because they\r\nmodify the UI of AOD.\r\n\r\nTo reduce battery consumption, it is recommended to\r\ntarget a maximum on-pixel-ratio (OPR) of 5%. Clocks that are composed of\r\nlarge blocks of color that cause the OPR to exceed 5% should be avoided.\r\n\r\nTo prevent screen burn-in, clocks should not be composed of large solid\r\nblocks of color, and the clock should be moved around the screen to\r\ndistribute the on pixels across a large number of pixels. Software\r\nburn-in testing is a good starting point to assess the pixel shifting\r\n(clock movement) scheme and shape of the clock.\r\n\r\n### Software Burn-In Test\r\n\r\nThe goal is to look for bright spots in the luminosity average over a period of\r\ntime. It is difficult to define a threshold where burn-in will occur. It is,\r\ntherefore, recommended to compare against an element on AOD that is known not\r\nto cause problems.\r\n\r\nFor clock face that contain color, it is recommended to use an all white\r\nversion of the face. Since white has the highest luminosity, this version of\r\nthe clock face represents the worst case scenario.\r\n\r\nTo start, generate a sequence of screenshots for each minute over a 12 hr interval.\r\n\r\n```\r\nserial = '84TY004MS' # serial number for the device\r\ncount = 1\r\nt = datetime.datetime(2019, 1, 1)\r\nstop = t + datetime.timedelta(hours=12)\r\nif not os.path.exists(OUTPUT_FOLDER):\r\n raise RuntimeError('output folder \"%s\" does not exist' % OUTPUT_FOLDER)\r\nwhile t <= stop:\r\n os.system(\"adb -s %s shell 'date %s ; am broadcast -a android.intent.action.TIME_SET'\" % (serial, t.strftime('%m%d%H%M%Y.%S')))\r\n os.system('adb -s %s shell screencap -p > %s/screencap_%06d.png' % (serial, OUTPUT_FOLDER, count))\r\n t += datetime.timedelta(minutes=1)\r\n count += 1\r\n```\r\n\r\nAverage the luminosity of the screenshots.\r\n\r\n```\r\n#!python\r\nimport numpy\r\nimport scipy.ndimage\r\nfrom imageio import imread, imwrite\r\nimport matplotlib.pylab as plt\r\nimport os\r\nimport os.path\r\n\r\ndef images(path):\r\n return [os.path.join(path, name) for name in os.listdir(path) if name.endswith('.png')]\r\n\r\ndef average(images):\r\n AVG = None\r\n for name in images:\r\n IM = scipy.ndimage.imread(name, mode='L')\r\n A = numpy.array(IM, dtype=numpy.double)\r\n if AVG is None:\r\n AVG = A\r\n else:\r\n AVG += A\r\n AVG /= len(images)\r\n return numpy.array(AVG, dtype=numpy.uint8)\r\n\r\ndef main(path):\r\n ims = images(path)\r\n if len(ims) == 0:\r\n raise ValueError(\"folder '%s' doesn't contain any png files\" % path)\r\n AVG = average(ims)\r\n imwrite('average.png', AVG)\r\n plt.imshow(AVG)\r\n plt.show()\r\n\r\nif __name__=='__main__':\r\n import sys\r\n main(sys.argv[1])\r\n```\r\n\r\nLook for bright spots in the luminosity average. If bright spots are found,\r\naction should be taken to change the shape of the clock face or increase the\r\namount of pixel shifting.\r\n" }, { "alpha_fraction": 0.5766373872756958, "alphanum_fraction": 0.5795071125030518, "avg_line_length": 38.299320220947266, "blob_id": "0630487eab3e80be8233ac0d48ebfe86eb9acb83", "content_id": "c64ea113e88c162a7a85131cb0e7eacb50063606", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5924, "license_type": "permissive", "max_line_length": 101, "num_lines": 147, "path": "/tests/FrameworkPerf/src/com/android/frameworkperf/SimpleInflater.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.frameworkperf;\r\n\r\nimport java.io.IOException;\r\n\r\nimport org.xmlpull.v1.XmlPullParser;\r\nimport org.xmlpull.v1.XmlPullParserException;\r\n\r\nimport android.content.Context;\r\nimport android.content.res.TypedArray;\r\nimport android.content.res.XmlResourceParser;\r\nimport android.util.AttributeSet;\r\nimport android.util.Xml;\r\nimport android.view.InflateException;\r\n\r\npublic class SimpleInflater {\r\n /** Menu tag name in XML. */\r\n private static final String XML_MENU = \"menu\";\r\n \r\n /** Group tag name in XML. */\r\n private static final String XML_GROUP = \"group\";\r\n \r\n /** Item tag name in XML. */\r\n private static final String XML_ITEM = \"item\";\r\n\r\n private Context mContext;\r\n\r\n public SimpleInflater(Context context) {\r\n mContext = context;\r\n }\r\n\r\n public void inflate(int menuRes) {\r\n XmlResourceParser parser = null;\r\n try {\r\n parser = mContext.getResources().getLayout(menuRes);\r\n AttributeSet attrs = Xml.asAttributeSet(parser);\r\n \r\n parseMenu(parser, attrs);\r\n } catch (XmlPullParserException e) {\r\n throw new InflateException(\"Error inflating menu XML\", e);\r\n } catch (IOException e) {\r\n throw new InflateException(\"Error inflating menu XML\", e);\r\n } finally {\r\n if (parser != null) parser.close();\r\n }\r\n }\r\n\r\n private void parseMenu(XmlPullParser parser, AttributeSet attrs)\r\n throws XmlPullParserException, IOException {\r\n int eventType = parser.getEventType();\r\n String tagName;\r\n boolean lookingForEndOfUnknownTag = false;\r\n String unknownTagName = null;\r\n\r\n // This loop will skip to the menu start tag\r\n do {\r\n if (eventType == XmlPullParser.START_TAG) {\r\n tagName = parser.getName();\r\n if (tagName.equals(XML_MENU)) {\r\n // Go to next tag\r\n eventType = parser.next();\r\n break;\r\n }\r\n \r\n throw new RuntimeException(\"Expecting menu, got \" + tagName);\r\n }\r\n eventType = parser.next();\r\n } while (eventType != XmlPullParser.END_DOCUMENT);\r\n \r\n boolean reachedEndOfMenu = false;\r\n while (!reachedEndOfMenu) {\r\n switch (eventType) {\r\n case XmlPullParser.START_TAG:\r\n if (lookingForEndOfUnknownTag) {\r\n break;\r\n }\r\n \r\n tagName = parser.getName();\r\n if (tagName.equals(XML_ITEM)) {\r\n readItem(attrs);\r\n } else if (tagName.equals(XML_MENU)) {\r\n parseMenu(parser, attrs);\r\n } else {\r\n lookingForEndOfUnknownTag = true;\r\n unknownTagName = tagName;\r\n }\r\n break;\r\n \r\n case XmlPullParser.END_TAG:\r\n tagName = parser.getName();\r\n if (lookingForEndOfUnknownTag && tagName.equals(unknownTagName)) {\r\n lookingForEndOfUnknownTag = false;\r\n unknownTagName = null;\r\n } else if (tagName.equals(XML_ITEM)) {\r\n } else if (tagName.equals(XML_MENU)) {\r\n reachedEndOfMenu = true;\r\n }\r\n break;\r\n \r\n case XmlPullParser.END_DOCUMENT:\r\n throw new RuntimeException(\"Unexpected end of document\");\r\n }\r\n \r\n eventType = parser.next();\r\n }\r\n }\r\n\r\n public void readItem(AttributeSet attrs) {\r\n TypedArray a = mContext.obtainStyledAttributes(attrs,\r\n com.android.internal.R.styleable.MenuItem);\r\n\r\n // Inherit attributes from the group as default value\r\n int itemId = a.getResourceId(R.styleable.MenuItem_android_id, 0);\r\n final int category = a.getInt(R.styleable.MenuItem_android_menuCategory, 0);\r\n final int order = a.getInt(R.styleable.MenuItem_android_orderInCategory, 0);\r\n CharSequence itemTitle = a.getText(R.styleable.MenuItem_android_title);\r\n CharSequence itemTitleCondensed = a.getText(R.styleable.MenuItem_android_titleCondensed);\r\n int itemIconResId = a.getResourceId(R.styleable.MenuItem_android_icon, 0);\r\n String itemAlphabeticShortcut = a.getString(R.styleable.MenuItem_android_alphabeticShortcut);\r\n String itemNumericShortcut = a.getString(R.styleable.MenuItem_android_numericShortcut);\r\n int itemCheckable = 0;\r\n if (a.hasValue(R.styleable.MenuItem_android_checkable)) {\r\n // Item has attribute checkable, use it\r\n itemCheckable = a.getBoolean(R.styleable.MenuItem_android_checkable, false) ? 1 : 0;\r\n }\r\n boolean itemChecked = a.getBoolean(R.styleable.MenuItem_android_checked, false);\r\n boolean itemVisible = a.getBoolean(R.styleable.MenuItem_android_visible, false);\r\n boolean itemEnabled = a.getBoolean(R.styleable.MenuItem_android_enabled, false);\r\n\r\n a.recycle();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6176215410232544, "alphanum_fraction": 0.6228298544883728, "avg_line_length": 26.44444465637207, "blob_id": "2168ae5a706370a0684c9b031b431b7162a41a6d", "content_id": "989c13a8d53cc9532bef0d1e68f41aa9711ddb68", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2304, "license_type": "permissive", "max_line_length": 77, "num_lines": 81, "path": "/tests/net/java/com/android/server/connectivity/MetricsTestUtil.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.connectivity;\r\n\r\nimport android.net.ConnectivityMetricsEvent;\r\nimport android.os.Bundle;\r\nimport android.os.Parcel;\r\nimport android.os.Parcelable;\r\n\r\nimport java.util.function.Consumer;\r\n\r\nabstract public class MetricsTestUtil {\r\n private MetricsTestUtil() {\r\n }\r\n\r\n static ConnectivityMetricsEvent ev(Parcelable p) {\r\n ConnectivityMetricsEvent ev = new ConnectivityMetricsEvent();\r\n ev.timestamp = 1L;\r\n ev.data = p;\r\n return ev;\r\n }\r\n\r\n static ConnectivityMetricsEvent describeIpEvent(Consumer<Parcel>... fs) {\r\n Parcel p = Parcel.obtain();\r\n for (Consumer<Parcel> f : fs) {\r\n f.accept(p);\r\n }\r\n p.setDataPosition(0);\r\n return ev(p.readParcelable(ClassLoader.getSystemClassLoader()));\r\n }\r\n\r\n static Consumer<Parcel> aType(Class<?> c) {\r\n return aString(c.getName());\r\n }\r\n\r\n static Consumer<Parcel> aBool(boolean b) {\r\n return aByte((byte) (b ? 1 : 0));\r\n }\r\n\r\n static Consumer<Parcel> aByte(byte b) {\r\n return (p) -> p.writeByte(b);\r\n }\r\n\r\n static Consumer<Parcel> anInt(int i) {\r\n return (p) -> p.writeInt(i);\r\n }\r\n\r\n static Consumer<Parcel> aLong(long l) {\r\n return (p) -> p.writeLong(l);\r\n }\r\n\r\n static Consumer<Parcel> aString(String s) {\r\n return (p) -> p.writeString(s);\r\n }\r\n\r\n static Consumer<Parcel> aByteArray(byte... ary) {\r\n return (p) -> p.writeByteArray(ary);\r\n }\r\n\r\n static Consumer<Parcel> anIntArray(int... ary) {\r\n return (p) -> p.writeIntArray(ary);\r\n }\r\n\r\n static byte b(int i) {\r\n return (byte) i;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6427255868911743, "alphanum_fraction": 0.6443998217582703, "avg_line_length": 35.33124923706055, "blob_id": "71bd7bdb097600f7efe9f0d4cb4e0db65f226c6d", "content_id": "61d17b6351294d538091414dd8e57d872a373cff", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5973, "license_type": "permissive", "max_line_length": 98, "num_lines": 160, "path": "/packages/PrintSpooler/src/com/android/printspooler/util/ApprovedPrintServices.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.printspooler.util;\r\n\r\nimport android.content.ComponentName;\r\nimport android.content.Context;\r\nimport android.content.SharedPreferences;\r\nimport android.content.SharedPreferences.OnSharedPreferenceChangeListener;\r\nimport android.printservice.PrintService;\r\nimport android.util.ArraySet;\r\n\r\nimport java.util.List;\r\nimport java.util.Set;\r\n\r\n/**\r\n * Manage approved print services. These services are stored in the shared preferences.\r\n */\r\npublic class ApprovedPrintServices {\r\n /**\r\n * Used for locking accesses to the approved services.\r\n */\r\n static final public Object sLock = new Object();\r\n\r\n private static final String APPROVED_SERVICES_PREFERENCE = \"PRINT_SPOOLER_APPROVED_SERVICES\";\r\n private final SharedPreferences mPreferences;\r\n\r\n /**\r\n * Create a new {@link ApprovedPrintServices}\r\n *\r\n * @param owner The {@link Context} using this object.\r\n */\r\n public ApprovedPrintServices(Context owner) {\r\n mPreferences = owner.getSharedPreferences(APPROVED_SERVICES_PREFERENCE,\r\n Context.MODE_PRIVATE);\r\n }\r\n\r\n /**\r\n * Get {@link Set} of approved services.\r\n *\r\n * @return A {@link Set} containing all currently approved services.\r\n */\r\n public Set<String> getApprovedServices() {\r\n return mPreferences.getStringSet(APPROVED_SERVICES_PREFERENCE, null);\r\n }\r\n\r\n /**\r\n * Check if a {@link PrintService} is approved.\r\n *\r\n * This function does not acquire the {@link #sLock}.\r\n *\r\n * @param service The {@link ComponentName} of the {@link PrintService} that might be approved\r\n * @return true iff the service is currently approved\r\n */\r\n public boolean isApprovedService(ComponentName service) {\r\n final Set<String> approvedServices = getApprovedServices();\r\n\r\n if (approvedServices != null) {\r\n final String flattenedString = service.flattenToShortString();\r\n\r\n for (String approvedService : approvedServices) {\r\n if (approvedService.equals(flattenedString)) {\r\n return true;\r\n }\r\n }\r\n }\r\n\r\n return false;\r\n }\r\n\r\n /**\r\n * Add a {@link PrintService} to the list of approved print services.\r\n *\r\n * @param serviceToAdd The {@link ComponentName} of the {@link PrintService} to be approved.\r\n */\r\n public void addApprovedService(ComponentName serviceToAdd) {\r\n synchronized (sLock) {\r\n Set<String> oldApprovedServices =\r\n mPreferences.getStringSet(APPROVED_SERVICES_PREFERENCE, null);\r\n\r\n Set<String> newApprovedServices;\r\n if (oldApprovedServices == null) {\r\n newApprovedServices = new ArraySet<String>(1);\r\n } else {\r\n // Copy approved services.\r\n newApprovedServices = new ArraySet<String>(oldApprovedServices);\r\n }\r\n newApprovedServices.add(serviceToAdd.flattenToShortString());\r\n\r\n SharedPreferences.Editor editor = mPreferences.edit();\r\n editor.putStringSet(APPROVED_SERVICES_PREFERENCE, newApprovedServices);\r\n editor.apply();\r\n }\r\n }\r\n\r\n /**\r\n * Add a {@link OnSharedPreferenceChangeListener} that listens for changes to the approved\r\n * services. Should only be called while holding {@link #sLock} to synchronize against\r\n * {@link #addApprovedService}.\r\n *\r\n * @param listener {@link OnSharedPreferenceChangeListener} to register\r\n */\r\n public void registerChangeListenerLocked(OnSharedPreferenceChangeListener listener) {\r\n mPreferences.registerOnSharedPreferenceChangeListener(listener);\r\n }\r\n\r\n /**\r\n * Unregister a listener registered in {@link #registerChangeListenerLocked}.\r\n *\r\n * @param listener {@link OnSharedPreferenceChangeListener} to unregister\r\n */\r\n public void unregisterChangeListener(OnSharedPreferenceChangeListener listener) {\r\n mPreferences.unregisterOnSharedPreferenceChangeListener(listener);\r\n }\r\n\r\n /**\r\n * Remove all approved {@link PrintService print services} that are not in the given set.\r\n *\r\n * @param serviceNamesToKeep The {@link ComponentName names } of the services to keep\r\n */\r\n public void pruneApprovedServices(List<ComponentName> serviceNamesToKeep) {\r\n synchronized (sLock) {\r\n Set<String> approvedServices = getApprovedServices();\r\n\r\n if (approvedServices == null) {\r\n return;\r\n }\r\n\r\n Set<String> newApprovedServices = new ArraySet<>(approvedServices.size());\r\n\r\n final int numServiceNamesToKeep = serviceNamesToKeep.size();\r\n for(int i = 0; i < numServiceNamesToKeep; i++) {\r\n String serviceToKeep = serviceNamesToKeep.get(i).flattenToShortString();\r\n if (approvedServices.contains(serviceToKeep)) {\r\n newApprovedServices.add(serviceToKeep);\r\n }\r\n }\r\n\r\n if (approvedServices.size() != newApprovedServices.size()) {\r\n SharedPreferences.Editor editor = mPreferences.edit();\r\n\r\n editor.putStringSet(APPROVED_SERVICES_PREFERENCE, newApprovedServices);\r\n editor.apply();\r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6577306985855103, "alphanum_fraction": 0.6627181768417358, "avg_line_length": 27.66666603088379, "blob_id": "4302b9c5f9ce57013a06dd12d52234ca1ca0efd2", "content_id": "4e64d4c1efc56321051a6f0ebafda70254b5d288", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1604, "license_type": "permissive", "max_line_length": 75, "num_lines": 54, "path": "/packages/WAPPushManager/tests/src/com/android/smspush/unitTests/ReceiverActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.smspush.unitTests;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Intent;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\n\r\nimport com.android.internal.util.HexDump;\r\n\r\n/**\r\n * Activity type receiver application\r\n */\r\npublic class ReceiverActivity extends Activity {\r\n private static final String LOG_TAG = \"WAP PUSH\";\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n Log.d(LOG_TAG, \"activity created!!\");\r\n\r\n Intent in = getIntent();\r\n byte[] body;\r\n byte[] header;\r\n\r\n body = in.getByteArrayExtra(\"data\");\r\n header = in.getByteArrayExtra(\"header\");\r\n\r\n Log.d(LOG_TAG, \"header:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(header));\r\n Log.d(LOG_TAG, \"body:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(body));\r\n\r\n DataVerify.SetLastReceivedPdu(body);\r\n\r\n finish();\r\n\r\n }\r\n}\r\n\r\n" }, { "alpha_fraction": 0.6013836860656738, "alphanum_fraction": 0.6045768857002258, "avg_line_length": 29.847457885742188, "blob_id": "4efbdff0b4a253a3eb0966b8ba34e61e29a3179a", "content_id": "45756ee4aeb3ea7ee26e68e3324f0bf2566088b4", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1879, "license_type": "permissive", "max_line_length": 115, "num_lines": 59, "path": "/tests/GridLayoutTest/src/com/android/test/layout/LayoutInsetsTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.test.layout;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.os.Build;\r\nimport android.os.Bundle;\r\nimport android.view.Gravity;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.GridLayout;\r\nimport android.widget.TextView;\r\n\r\nimport static android.widget.GridLayout.LayoutParams;\r\nimport static android.widget.GridLayout.LAYOUT_MODE_OPTICAL_BOUNDS;\r\n\r\npublic class LayoutInsetsTest extends Activity {\r\n static int[] GRAVITIES = {Gravity.LEFT, Gravity.LEFT, Gravity.CENTER_HORIZONTAL, Gravity.RIGHT, Gravity.RIGHT};\r\n\r\n public static View create(Context context) {\r\n final int N = GRAVITIES.length;\r\n\r\n GridLayout p = new GridLayout(context);\r\n p.setUseDefaultMargins(true);\r\n //p.setAlignmentMode(ALIGN_BOUNDS);\r\n p.setLayoutMode(LAYOUT_MODE_OPTICAL_BOUNDS);\r\n\r\n p.setColumnCount(N);\r\n\r\n for (int i = 0; i < 2*N; i++) {\r\n View c;\r\n if (i % 2 == 0) {\r\n TextView tv = new TextView(context);\r\n tv.setTextSize(32);\r\n tv.setText(\"A\");\r\n c = tv;\r\n } else {\r\n Button b = new Button(context);\r\n b.setBackgroundResource(R.drawable.btn_default_normal);\r\n b.setText(\"B\");\r\n c = b;\r\n }\r\n\r\n LayoutParams lp = new LayoutParams();\r\n lp.setGravity(GRAVITIES[(i % N)]);\r\n p.addView(c, lp);\r\n\r\n }\r\n\r\n\r\n return p;\r\n }\r\n\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n //getApplicationInfo().targetSdkVersion = Build.VERSION_CODES.ICE_CREAM_SANDWICH;\r\n getApplicationInfo().targetSdkVersion = Build.VERSION_CODES.JELLY_BEAN;\r\n setContentView(create(this));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.743186891078949, "alphanum_fraction": 0.743186891078949, "avg_line_length": 35.130950927734375, "blob_id": "51ab1119ae8edd50f1628d9d9ef9e62b57cf4706", "content_id": "5e0c79478dd80f8954f394905d480c899ca428d6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3119, "license_type": "permissive", "max_line_length": 90, "num_lines": 84, "path": "/packages/SettingsLib/tests/robotests/src/com/android/settingslib/bluetooth/HeadsetProfileTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.settingslib.bluetooth;\r\n\r\nimport static com.google.common.truth.Truth.assertThat;\r\n\r\nimport static org.mockito.Mockito.spy;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.bluetooth.BluetoothAdapter;\r\nimport android.bluetooth.BluetoothDevice;\r\nimport android.bluetooth.BluetoothHeadset;\r\nimport android.bluetooth.BluetoothProfile;\r\nimport android.content.Context;\r\n\r\nimport com.android.settingslib.testutils.shadow.ShadowBluetoothAdapter;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\nimport org.robolectric.RobolectricTestRunner;\r\nimport org.robolectric.RuntimeEnvironment;\r\nimport org.robolectric.annotation.Config;\r\nimport org.robolectric.shadow.api.Shadow;\r\n\r\n@RunWith(RobolectricTestRunner.class)\r\n@Config(shadows = {ShadowBluetoothAdapter.class})\r\npublic class HeadsetProfileTest {\r\n\r\n @Mock\r\n private CachedBluetoothDeviceManager mDeviceManager;\r\n @Mock\r\n private LocalBluetoothProfileManager mProfileManager;\r\n @Mock\r\n private BluetoothHeadset mService;\r\n @Mock\r\n private CachedBluetoothDevice mCachedBluetoothDevice;\r\n @Mock\r\n private BluetoothDevice mBluetoothDevice;\r\n private BluetoothProfile.ServiceListener mServiceListener;\r\n private HeadsetProfile mProfile;\r\n private ShadowBluetoothAdapter mShadowBluetoothAdapter;\r\n\r\n @Before\r\n public void setUp() {\r\n MockitoAnnotations.initMocks(this);\r\n Context context = spy(RuntimeEnvironment.application);\r\n mShadowBluetoothAdapter = Shadow.extract(BluetoothAdapter.getDefaultAdapter());\r\n\r\n when(mCachedBluetoothDevice.getDevice()).thenReturn(mBluetoothDevice);\r\n\r\n mProfile = new HeadsetProfile(context, mDeviceManager, mProfileManager);\r\n mServiceListener = mShadowBluetoothAdapter.getServiceListener();\r\n mServiceListener.onServiceConnected(BluetoothProfile.HEADSET, mService);\r\n }\r\n\r\n @Test\r\n public void bluetoothProfile_shouldReturnTheAudioStatusFromBlueToothHeadsetService() {\r\n when(mService.isAudioOn()).thenReturn(true);\r\n assertThat(mProfile.isAudioOn()).isTrue();\r\n\r\n when(mService.isAudioOn()).thenReturn(false);\r\n assertThat(mProfile.isAudioOn()).isFalse();\r\n }\r\n\r\n @Test\r\n public void testHeadsetProfile_shouldReturnAudioState() {\r\n when(mService.getAudioState(mBluetoothDevice)).\r\n thenReturn(BluetoothHeadset.STATE_AUDIO_DISCONNECTED);\r\n assertThat(mProfile.getAudioState(mBluetoothDevice)).\r\n isEqualTo(BluetoothHeadset.STATE_AUDIO_DISCONNECTED);\r\n\r\n when(mService.getAudioState(mBluetoothDevice)).\r\n thenReturn(BluetoothHeadset.STATE_AUDIO_CONNECTED);\r\n assertThat(mProfile.getAudioState(mBluetoothDevice)).\r\n isEqualTo(BluetoothHeadset.STATE_AUDIO_CONNECTED);\r\n }\r\n\r\n @Test\r\n public void setActiveDevice_returnTrue() {\r\n assertThat(mProfile.setActiveDevice(null)).isTrue();\r\n assertThat(mProfile.setActiveDevice(mBluetoothDevice)).isTrue();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6502732038497925, "alphanum_fraction": 0.6585641503334045, "avg_line_length": 40.79838562011719, "blob_id": "f60698e8f8d6343ef4c5cef1ed980f280ea8e227", "content_id": "71c08d0d526948070b5cc289fc7b919751a44689", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5307, "license_type": "permissive", "max_line_length": 98, "num_lines": 124, "path": "/core/tests/ConnectivityManagerTest/src/com/android/connectivitymanagertest/functional/WifiAssociationTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013, The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.connectivitymanagertest.functional;\r\n\r\nimport android.net.wifi.WifiConfiguration;\r\nimport android.net.wifi.WifiConfiguration.AuthAlgorithm;\r\nimport android.net.wifi.WifiConfiguration.GroupCipher;\r\nimport android.net.wifi.WifiConfiguration.PairwiseCipher;\r\nimport android.net.wifi.WifiConfiguration.Protocol;\r\nimport android.net.wifi.WifiInfo;\r\nimport android.net.wifi.WifiManager;\r\nimport android.os.Bundle;\r\nimport android.test.suitebuilder.annotation.LargeTest;\r\n\r\nimport com.android.connectivitymanagertest.ConnectivityManagerTestBase;\r\nimport com.android.connectivitymanagertest.WifiAssociationTestRunner;\r\nimport com.android.connectivitymanagertest.WifiConfigurationHelper;\r\n\r\n/**\r\n * Test Wi-Fi connection with different configuration\r\n * To run this tests:\r\n * * adb shell am instrument -e ssid <ssid> -e password <password> \\\r\n * -e security-type [OPEN|WEP64|WEP128|WPA_TKIP|WPA2_AES] -e frequency-band [2.4|5.0|auto]\r\n * -w com.android.connectivitymanagertest/.WifiAssociationTestRunner\"\r\n */\r\npublic class WifiAssociationTest extends ConnectivityManagerTestBase {\r\n private enum SecurityType {\r\n OPEN, WEP64, WEP128, WPA_TKIP, WPA2_AES\r\n }\r\n\r\n public WifiAssociationTest() {\r\n super(WifiAssociationTest.class.getSimpleName());\r\n }\r\n\r\n /**\r\n * Test that the wifi can associate with a given access point.\r\n */\r\n @LargeTest\r\n public void testWifiAssociation() {\r\n WifiAssociationTestRunner runner = (WifiAssociationTestRunner) getInstrumentation();\r\n Bundle arguments = runner.getArguments();\r\n\r\n String ssid = arguments.getString(\"ssid\");\r\n assertNotNull(\"ssid is empty\", ssid);\r\n\r\n String securityTypeStr = arguments.getString(\"security-type\");\r\n assertNotNull(\"security-type is empty\", securityTypeStr);\r\n SecurityType securityType = SecurityType.valueOf(securityTypeStr);\r\n\r\n String password = arguments.getString(\"password\");\r\n\r\n assertTrue(\"enable Wifi failed\", enableWifi());\r\n WifiInfo wi = mWifiManager.getConnectionInfo();\r\n logv(\"%s\", wi);\r\n assertNotNull(\"no active wifi info\", wi);\r\n\r\n WifiConfiguration config = getConfig(ssid, securityType, password);\r\n\r\n logv(\"Network config: %s\", config.toString());\r\n connectToWifi(config);\r\n }\r\n\r\n /**\r\n * Get the {@link WifiConfiguration} based on ssid, security, and password.\r\n */\r\n private WifiConfiguration getConfig(String ssid, SecurityType securityType, String password) {\r\n logv(\"Security type is %s\", securityType.toString());\r\n\r\n WifiConfiguration config = null;\r\n switch (securityType) {\r\n case OPEN:\r\n config = WifiConfigurationHelper.createOpenConfig(ssid);\r\n break;\r\n case WEP64:\r\n assertNotNull(\"password is empty\", password);\r\n // always use hex pair for WEP-40\r\n assertTrue(WifiConfigurationHelper.isHex(password, 10));\r\n config = WifiConfigurationHelper.createWepConfig(ssid, password);\r\n config.allowedGroupCiphers.set(GroupCipher.WEP40);\r\n break;\r\n case WEP128:\r\n assertNotNull(\"password is empty\", password);\r\n // always use hex pair for WEP-104\r\n assertTrue(WifiConfigurationHelper.isHex(password, 26));\r\n config = WifiConfigurationHelper.createWepConfig(ssid, password);\r\n config.allowedGroupCiphers.set(GroupCipher.WEP104);\r\n break;\r\n case WPA_TKIP:\r\n assertNotNull(\"password is empty\", password);\r\n config = WifiConfigurationHelper.createPskConfig(ssid, password);\r\n config.allowedAuthAlgorithms.set(AuthAlgorithm.OPEN);\r\n config.allowedProtocols.set(Protocol.WPA);\r\n config.allowedPairwiseCiphers.set(PairwiseCipher.TKIP);\r\n config.allowedGroupCiphers.set(GroupCipher.TKIP);\r\n break;\r\n case WPA2_AES:\r\n assertNotNull(\"password is empty\", password);\r\n config = WifiConfigurationHelper.createPskConfig(ssid, password);\r\n config.allowedAuthAlgorithms.set(AuthAlgorithm.OPEN);\r\n config.allowedProtocols.set(Protocol.RSN);\r\n config.allowedPairwiseCiphers.set(PairwiseCipher.CCMP);\r\n config.allowedGroupCiphers.set(GroupCipher.CCMP);\r\n break;\r\n default:\r\n fail(\"Not a valid security type: \" + securityType);\r\n break;\r\n }\r\n return config;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6141998767852783, "alphanum_fraction": 0.6232156157493591, "avg_line_length": 27.910112380981445, "blob_id": "a46c1e50816ab28a8b23b0b6f318c76ed87b4493", "content_id": "32532e33c458102c73160b140b864d71550ed6a6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2662, "license_type": "permissive", "max_line_length": 99, "num_lines": 89, "path": "/tools/incident_report/formatter.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"proto_format.h\"\r\n\r\n#include <string.h>\r\n\r\nextern int const PROTO_FORMAT_STRING_POOL_SIZE;\r\nextern int const PROTO_FORMAT_ENUM_LABELS_LENGTH;\r\nextern int const PROTO_FORMAT_MESSAGES_LENGTH;\r\nextern int const PROTO_FORMAT_FIELDS_LENGTH;\r\n\r\nextern char const PROTO_FORMAT_STRING_POOL[];\r\nextern ProtoFieldFormat const PROTO_FORMAT_FIELDS[];\r\nextern ProtoEnumLabel const PROTO_FORMAT_ENUM_LABELS[];\r\nextern ProtoMessageFormat const PROTO_FORMAT_MESSAGES[];\r\n\r\nstatic const char*\r\nget_string(int index)\r\n{\r\n if (index >= 0 && index < PROTO_FORMAT_STRING_POOL_SIZE) {\r\n return PROTO_FORMAT_STRING_POOL + index;\r\n } else {\r\n // These indices all come from within the generated table, so just crash now.\r\n *(int*)NULL = 42;\r\n return NULL;\r\n }\r\n}\r\n\r\nstatic ProtoMessageFormat const*\r\nget_message(int index)\r\n{\r\n if (index >= 0 && index < PROTO_FORMAT_MESSAGES_LENGTH) {\r\n return PROTO_FORMAT_MESSAGES + index;\r\n } else {\r\n // These indices all come from within the generated table, so just crash now.\r\n *(int*)NULL = 42;\r\n return NULL;\r\n }\r\n}\r\n\r\nstatic int\r\ncompare_name(const char* full, const char* package, const char* clazz)\r\n{\r\n int const packageLen = strlen(package);\r\n int cmp = strncmp(full, package, packageLen);\r\n if (cmp == 0) {\r\n cmp = full[packageLen] - '.';\r\n if (cmp == 0) {\r\n return strcmp(full + packageLen, clazz);\r\n }\r\n }\r\n return cmp;\r\n}\r\n\r\nint\r\nfind_message_index(const char* name)\r\n{\r\n size_t low = 0;\r\n size_t high = PROTO_FORMAT_FIELDS_LENGTH - 1;\r\n\r\n while (low <= high) {\r\n size_t mid = (low + high) >> 1;\r\n ProtoMessageFormat const* msg = get_message(mid);\r\n\r\n int cmp = compare_name(name, get_string(msg->package_name), get_string(msg->package_name));\r\n if (cmp < 0) {\r\n low = mid + 1;\r\n } else if (cmp > 0) {\r\n high = mid - 1;\r\n } else {\r\n return mid;\r\n }\r\n }\r\n return -1;\r\n}\r\n" }, { "alpha_fraction": 0.7076395750045776, "alphanum_fraction": 0.7138426303863525, "avg_line_length": 34.04705810546875, "blob_id": "680582e8b9cdf7624032a4af98adeb8710c7902f", "content_id": "2e1faa2862bdac218b8d044e05c1706328951a1f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 6126, "license_type": "permissive", "max_line_length": 98, "num_lines": 170, "path": "/packages/SystemUI/tests/src/com/android/systemui/controls/management/ControlsRequestReceiverTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.controls.management\r\n\r\nimport android.app.ActivityManager\r\nimport android.app.ActivityManager.RunningAppProcessInfo.IMPORTANCE_FOREGROUND\r\nimport android.app.ActivityManager.RunningAppProcessInfo.IMPORTANCE_GONE\r\nimport android.content.ComponentName\r\nimport android.content.Context\r\nimport android.content.ContextWrapper\r\nimport android.content.Intent\r\nimport android.content.pm.PackageManager\r\nimport android.os.UserHandle\r\nimport android.service.controls.Control\r\nimport android.service.controls.ControlsProviderService\r\nimport android.testing.AndroidTestingRunner\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Assert.assertFalse\r\nimport org.junit.Assert.assertNull\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Assert.fail\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.ArgumentMatchers.anyInt\r\nimport org.mockito.ArgumentMatchers.anyString\r\nimport org.mockito.ArgumentMatchers.eq\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito.`when`\r\nimport org.mockito.MockitoAnnotations\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner::class)\r\nclass ControlsRequestReceiverTest : SysuiTestCase() {\r\n\r\n @Mock\r\n private lateinit var packageManager: PackageManager\r\n @Mock\r\n private lateinit var activityManager: ActivityManager\r\n @Mock\r\n private lateinit var control: Control\r\n\r\n private val componentName = ComponentName(\"test_pkg\", \"test_cls\")\r\n private lateinit var receiver: ControlsRequestReceiver\r\n private lateinit var wrapper: MyWrapper\r\n private lateinit var intent: Intent\r\n\r\n @Before\r\n fun setUp() {\r\n MockitoAnnotations.initMocks(this)\r\n\r\n mContext.setMockPackageManager(packageManager)\r\n `when`(packageManager.hasSystemFeature(PackageManager.FEATURE_CONTROLS)).thenReturn(true)\r\n mContext.addMockSystemService(ActivityManager::class.java, activityManager)\r\n\r\n receiver = ControlsRequestReceiver()\r\n\r\n wrapper = MyWrapper(context)\r\n\r\n intent = Intent(ControlsProviderService.ACTION_ADD_CONTROL).apply {\r\n putExtra(Intent.EXTRA_COMPONENT_NAME, componentName)\r\n putExtra(ControlsProviderService.EXTRA_CONTROL, control)\r\n }\r\n }\r\n\r\n @Test\r\n fun testPackageVerification_nonExistentPackage() {\r\n `when`(packageManager.getPackageUid(anyString(), anyInt()))\r\n .thenThrow(PackageManager.NameNotFoundException::class.java)\r\n\r\n assertFalse(ControlsRequestReceiver.isPackageInForeground(mContext, \"TEST\"))\r\n }\r\n\r\n @Test\r\n fun testPackageVerification_uidNotInForeground() {\r\n `when`(packageManager.getPackageUid(anyString(), anyInt())).thenReturn(12345)\r\n\r\n `when`(activityManager.getUidImportance(anyInt())).thenReturn(IMPORTANCE_GONE)\r\n\r\n assertFalse(ControlsRequestReceiver.isPackageInForeground(mContext, \"TEST\"))\r\n }\r\n\r\n @Test\r\n fun testPackageVerification_OK() {\r\n `when`(packageManager.getPackageUid(anyString(), anyInt())).thenReturn(12345)\r\n\r\n `when`(activityManager.getUidImportance(anyInt())).thenReturn(IMPORTANCE_GONE)\r\n `when`(activityManager.getUidImportance(12345)).thenReturn(IMPORTANCE_FOREGROUND)\r\n\r\n assertTrue(ControlsRequestReceiver.isPackageInForeground(mContext, \"TEST\"))\r\n }\r\n\r\n @Test\r\n fun testOnReceive_packageNotVerified_nameNotFound() {\r\n `when`(packageManager.getPackageUid(eq(componentName.packageName), anyInt()))\r\n .thenThrow(PackageManager.NameNotFoundException::class.java)\r\n\r\n receiver.onReceive(wrapper, intent)\r\n\r\n assertNull(wrapper.intent)\r\n }\r\n\r\n @Test\r\n fun testOnReceive_packageNotVerified_notForeground() {\r\n `when`(packageManager.getPackageUid(eq(componentName.packageName), anyInt()))\r\n .thenReturn(12345)\r\n\r\n `when`(activityManager.getUidImportance(anyInt())).thenReturn(IMPORTANCE_GONE)\r\n\r\n receiver.onReceive(wrapper, intent)\r\n\r\n assertNull(wrapper.intent)\r\n }\r\n\r\n @Test\r\n fun testOnReceive_OK() {\r\n `when`(packageManager.getPackageUid(eq(componentName.packageName), anyInt()))\r\n .thenReturn(12345)\r\n\r\n `when`(activityManager.getUidImportance(eq(12345))).thenReturn(IMPORTANCE_FOREGROUND)\r\n\r\n receiver.onReceive(wrapper, intent)\r\n\r\n wrapper.intent?.let {\r\n assertEquals(ComponentName(wrapper, ControlsRequestDialog::class.java), it.component)\r\n\r\n assertEquals(control, it.getParcelableExtra(ControlsProviderService.EXTRA_CONTROL))\r\n\r\n assertEquals(componentName, it.getParcelableExtra(Intent.EXTRA_COMPONENT_NAME))\r\n } ?: run { fail(\"Null start intent\") }\r\n }\r\n\r\n @Test\r\n fun testFeatureDisabled_activityNotStarted() {\r\n `when`(packageManager.hasSystemFeature(PackageManager.FEATURE_CONTROLS)).thenReturn(false)\r\n receiver.onReceive(wrapper, intent)\r\n\r\n assertNull(wrapper.intent)\r\n }\r\n\r\n class MyWrapper(context: Context) : ContextWrapper(context) {\r\n var intent: Intent? = null\r\n\r\n override fun startActivityAsUser(intent: Intent, user: UserHandle) {\r\n // Always launch activity as system\r\n assertTrue(user == UserHandle.SYSTEM)\r\n this.intent = intent\r\n }\r\n\r\n override fun startActivity(intent: Intent) {\r\n this.intent = intent\r\n }\r\n }\r\n}" }, { "alpha_fraction": 0.767627477645874, "alphanum_fraction": 0.767627477645874, "avg_line_length": 38.30356979370117, "blob_id": "a42b339a41cc058d95506b9a23b2a8bca596a7b4", "content_id": "70f850c1d62ce05b101f914ca22bdcc33462cacd", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2255, "license_type": "permissive", "max_line_length": 85, "num_lines": 56, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/phone/KeyguardBottomAreaTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.systemui.statusbar.phone\r\n\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper\r\nimport android.view.LayoutInflater\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.R\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.assist.AssistManager\r\nimport com.android.systemui.plugins.ActivityStarter\r\nimport com.android.systemui.statusbar.policy.AccessibilityController\r\nimport com.android.systemui.statusbar.policy.FlashlightController\r\nimport com.android.systemui.statusbar.policy.KeyguardStateController\r\nimport com.android.systemui.tuner.TunerService\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.Mock\r\nimport org.mockito.MockitoAnnotations\r\nimport java.util.concurrent.Executor\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner::class)\r\[email protected](setAsMainLooper = true)\r\nclass KeyguardBottomAreaTest : SysuiTestCase() {\r\n\r\n @Mock\r\n private lateinit var mStatusBar: StatusBar\r\n private lateinit var mKeyguardBottomArea: KeyguardBottomAreaView\r\n\r\n @Before\r\n fun setup() {\r\n MockitoAnnotations.initMocks(this)\r\n // Mocked dependencies\r\n mDependency.injectMockDependency(AccessibilityController::class.java)\r\n mDependency.injectMockDependency(ActivityStarter::class.java)\r\n mDependency.injectMockDependency(AssistManager::class.java)\r\n mDependency.injectTestDependency(Executor::class.java, Executor { it.run() })\r\n mDependency.injectMockDependency(FlashlightController::class.java)\r\n mDependency.injectMockDependency(KeyguardStateController::class.java)\r\n mDependency.injectMockDependency(TunerService::class.java)\r\n\r\n mKeyguardBottomArea = LayoutInflater.from(mContext).inflate(\r\n R.layout.keyguard_bottom_area, null, false) as KeyguardBottomAreaView\r\n mKeyguardBottomArea.setStatusBar(mStatusBar)\r\n }\r\n\r\n @Test\r\n fun initFrom_doesntCrash() {\r\n val other = LayoutInflater.from(mContext).inflate(\r\n R.layout.keyguard_bottom_area, null, false) as KeyguardBottomAreaView\r\n\r\n other.initFrom(mKeyguardBottomArea)\r\n other.launchVoiceAssist()\r\n }\r\n}" }, { "alpha_fraction": 0.6569200754165649, "alphanum_fraction": 0.6754385828971863, "avg_line_length": 29.090909957885742, "blob_id": "7d92e50cb38f4a2459b7dfb441263c49ee11338b", "content_id": "091a5fe83b74cf5028f8b9df9f9ec496bdac8935", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1026, "license_type": "permissive", "max_line_length": 95, "num_lines": 33, "path": "/media/jni/soundpool/tests/build_and_run.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n#\r\n# Run samples from this directory\r\n#\r\n\r\nif [ -z \"$ANDROID_BUILD_TOP\" ]; then\r\n echo \"Android build environment not set\"\r\n exit -1\r\nfi\r\n\r\n# ensure we have mm\r\n. $ANDROID_BUILD_TOP/build/envsetup.sh\r\n\r\nmm\r\n\r\necho \"waiting for device\"\r\n\r\nadb root && adb wait-for-device remount\r\n\r\necho \"========================================\"\r\necho \"testing soundpool_stress\"\r\nuidir=\"/product/media/audio/notifications\"\r\nadb push $OUT/system/bin/soundpool_stress /system/bin\r\n\r\n# test SoundPool playback of all the UI sound samples (loaded twice) looping 10s 1 thread.\r\nadb shell /system/bin/soundpool_stress -l -1 $uidir/*.ogg $uidir/*.ogg\r\n\r\n# test SoundPool playback of all the UI sound samples (repeating 3 times) looping 10s 1 thread.\r\nadb shell /system/bin/soundpool_stress -l 1 -r 3 $uidir/*.ogg\r\n\r\n# performance test SoundPool playback of all the UI sound samples (x2)\r\n# 1 iterations, looping, 1 second playback, 4 threads.\r\nadb shell /system/bin/soundpool_stress -i 1 -l -1 -p 1 -t 4 $uidir/*.ogg $uidir/*.ogg\r\n" }, { "alpha_fraction": 0.5231607556343079, "alphanum_fraction": 0.524523138999939, "avg_line_length": 40.86131286621094, "blob_id": "a9edd578d1cf00b42dbfea790f58d74adab54582", "content_id": "a71a62791f6fea57dff2885cb2f411ede0112d12", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 5872, "license_type": "permissive", "max_line_length": 96, "num_lines": 137, "path": "/media/mca/filterfw/jni/jni_gl_frame.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_JNI_GL_FRAME_H\r\n#define ANDROID_FILTERFW_JNI_GL_FRAME_H\r\n\r\n#include <jni.h>\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeAllocate(JNIEnv* env,\r\n jobject thiz,\r\n jobject gl_env,\r\n jint width,\r\n jint height);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeAllocateWithTexture(JNIEnv* env,\r\n jobject thiz,\r\n jobject gl_env,\r\n jint tex_id,\r\n jint width,\r\n jint height);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeAllocateWithFbo(JNIEnv* env,\r\n jobject thiz,\r\n jobject gl_env,\r\n jint fbo_id,\r\n jint width,\r\n jint height);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeAllocateExternal(JNIEnv* env,\r\n jobject thiz,\r\n jobject gl_env);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeDeallocate(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeInts(JNIEnv* env, jobject thiz, jintArray ints);\r\n\r\nJNIEXPORT jintArray JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeInts(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeFloats(JNIEnv* env, jobject thiz, jfloatArray ints);\r\n\r\nJNIEXPORT jfloatArray JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeFloats(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeData(JNIEnv* env,\r\n jobject thiz,\r\n jbyteArray data,\r\n jint offset,\r\n jint length);\r\n\r\nJNIEXPORT jbyteArray JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeData(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeBitmap(JNIEnv* env,\r\n jobject thiz,\r\n jobject bitmap,\r\n jint size);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeBitmap(JNIEnv* env, jobject thiz, jobject bitmap);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeViewport(JNIEnv* env,\r\n jobject thiz,\r\n jint x,\r\n jint y,\r\n jint width,\r\n jint height);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeTextureId(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jint JNICALL\r\nJava_android_filterfw_core_GLFrame_getNativeFboId(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_generateNativeMipMap(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_setNativeTextureParam(JNIEnv* env,\r\n jobject thiz,\r\n jint param,\r\n jint value);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeResetParams(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeCopyFromNative(JNIEnv* env,\r\n jobject thiz,\r\n jobject frame);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeCopyFromGL(JNIEnv* env,\r\n jobject thiz,\r\n jobject frame);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeFocus(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeReattachTexToFbo(JNIEnv* env, jobject thiz);\r\n\r\nJNIEXPORT jboolean JNICALL\r\nJava_android_filterfw_core_GLFrame_nativeDetachTexFromFbo(JNIEnv* env, jobject thiz);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif /* ANDROID_FILTERFW_JNI_GL_FRAME_H */\r\n" }, { "alpha_fraction": 0.7439024448394775, "alphanum_fraction": 0.7439024448394775, "avg_line_length": 29.538461685180664, "blob_id": "75675fa713cfff0316a257f9788018d11d97825d", "content_id": "84333a7514a5e9d12d5f3cf46b6acd6491a578ca", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 410, "license_type": "permissive", "max_line_length": 93, "num_lines": 13, "path": "/core/java/android/app/admin/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<HTML>\r\n<BODY>\r\n<p>Provides device administration features at the system level, allowing you to create\r\nsecurity-aware applications that are useful in enterprise settings, in which IT professionals\r\nrequire rich control over employee devices.</p>\r\n\r\n<p>For more information, see the\r\n<a href=\"{@docRoot}guide/topics/admin/device-admin.html\">Device Administration</a> \r\nguide.</p>\r\n{@more}\r\n\r\n</BODY>\r\n</HTML>\r\n" }, { "alpha_fraction": 0.6639492511749268, "alphanum_fraction": 0.6775362491607666, "avg_line_length": 35.423728942871094, "blob_id": "e67f0f890d60888e51fbf1f18dcea668e9550714", "content_id": "15a09149fdc68a84f098fd1ec8938d63f704ba48", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2208, "license_type": "permissive", "max_line_length": 140, "num_lines": 59, "path": "/packages/SystemUI/tools/lint/run_lint.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#! /bin/bash\r\n\r\nif [[ $# -ne 2 ]]; then\r\n echo \"Error: Incorrect number of arguments\" >&2\r\n echo \"Usage: ./run_lint.sh <repo_root> <CL_SHA>\" >&2\r\n exit 100\r\nfi\r\n\r\ngit show --name-only --pretty=format: $2 | grep packages/SystemUI/ > /dev/null\r\nexitcode=$?\r\nif [[ exitcode -eq 1 ]]; then\r\n exit 0\r\nfi\r\n\r\nif [[ -z $ANDROID_BUILD_TOP ]]; then\r\n echo \"Error: ANDROID_BUILD_TOP must be set\" >&2\r\n echo \"Try setting up your environment first:\" >&2\r\n echo \" source build/envsetup.sh && lunch <target>\" >&2\r\n exit 101\r\nfi\r\n\r\n# TODO: Run lint as part of the build so we can specify the dependency properly\r\nsystemuijarpath=\"out/soong/.intermediates/frameworks/base/packages/SystemUI/SystemUI-core/android_common/combined/SystemUI-core.jar\"\r\nif [[ ! -f $ANDROID_BUILD_TOP/$systemuijarpath ]]; then\r\n echo \"Error: Classes.jar file not found\" >&2\r\n echo \"Try building that jar file manually:\" >&2\r\n echo \" m -j16 out/soong/.intermediates/frameworks/base/packages/SystemUI/SystemUI-core/android_common/combined/SystemUI-core.jar\" >&2\r\n exit 102\r\nfi\r\n\r\nREPO_ROOT=$1\r\n${REPO_ROOT}/prebuilts/devtools/tools/lint \\\r\n . \\\r\n --exitcode \\\r\n -Werror \\\r\n --config ${REPO_ROOT}/frameworks/base/packages/SystemUI/tools/lint/lint.xml \\\r\n --html ${REPO_ROOT}/out/lint_output.html \\\r\n --baseline ${REPO_ROOT}/frameworks/base/packages/SystemUI/tools/lint/baseline.xml \\\r\n --remove-fixed\r\nexitcode=$?\r\nif [[ exitcode -eq 1 ]]; then\r\n cat >&2 <<EOF\r\n\r\nPlease check the HTML results file and fix the errors.\r\nIf the error cannot be fixed immediately, there are 3 possible resolutions:\r\n1. Use tools:ignore or @SuppressLint annotation. This is preferred\r\n for cases where the lint violation is intended, so that reviewers\r\n can review whether the suppression is appropriate.\r\n2. Use tools/lint.xml to ignore a lint check which we don't care\r\n about for any file, or checks that are not actionable by the\r\n CL author (e.g. translation issues)\r\n3. If there are lint errors that should be fixed, but cannot be done\r\n immediately for some reason, run ./tools/lint/update_baseline.sh to\r\n add them to baseline.xml.\r\n\r\nEOF\r\nfi\r\n\r\nexit $exitcode\r\n" }, { "alpha_fraction": 0.6754443645477295, "alphanum_fraction": 0.680052638053894, "avg_line_length": 35.50617218017578, "blob_id": "f78d08dd4ce54141844abb7275149ccb52a1b059", "content_id": "69fa301fdf138eafcf4c834ab0bccacc4e3604a2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3038, "license_type": "permissive", "max_line_length": 102, "num_lines": 81, "path": "/tests/HierarchyViewerTest/src/com/android/test/hierarchyviewer/MainActivityTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.test.hierarchyviewer;\r\n\r\nimport android.test.ActivityInstrumentationTestCase2;\r\nimport android.view.View;\r\n\r\nimport java.io.ByteArrayOutputStream;\r\nimport java.lang.reflect.Constructor;\r\nimport java.lang.reflect.InvocationTargetException;\r\nimport java.lang.reflect.Method;\r\nimport java.util.List;\r\nimport java.util.Map;\r\n\r\npublic class MainActivityTest extends ActivityInstrumentationTestCase2<MainActivity> {\r\n private MainActivity mActivity;\r\n private View mTextView;\r\n\r\n\r\n public MainActivityTest() {\r\n super(MainActivity.class);\r\n }\r\n\r\n @Override\r\n protected void setUp() throws Exception {\r\n super.setUp();\r\n\r\n mActivity = getActivity();\r\n mTextView = mActivity.findViewById(R.id.textView);\r\n }\r\n\r\n private byte[] encode(View view) throws ClassNotFoundException, NoSuchMethodException,\r\n IllegalAccessException, InstantiationException, InvocationTargetException {\r\n ByteArrayOutputStream baos = new ByteArrayOutputStream(1024 * 1024);\r\n\r\n Object encoder = createEncoder(baos);\r\n invokeMethod(View.class, view, \"encode\", encoder);\r\n invokeMethod(encoder.getClass(), encoder, \"endStream\");\r\n\r\n return baos.toByteArray();\r\n }\r\n\r\n private Object invokeMethod(Class targetClass, Object target, String methodName, Object... params)\r\n throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {\r\n Class[] paramClasses = new Class[params.length];\r\n for (int i = 0; i < params.length; i++) {\r\n paramClasses[i] = params[i].getClass();\r\n }\r\n Method method = targetClass.getDeclaredMethod(methodName, paramClasses);\r\n method.setAccessible(true);\r\n return method.invoke(target, params);\r\n }\r\n\r\n private Object createEncoder(ByteArrayOutputStream baos) throws ClassNotFoundException,\r\n NoSuchMethodException, IllegalAccessException, InvocationTargetException,\r\n InstantiationException {\r\n Class clazz = Class.forName(\"android.view.ViewHierarchyEncoder\");\r\n Constructor constructor = clazz.getConstructor(ByteArrayOutputStream.class);\r\n return constructor.newInstance(baos);\r\n }\r\n\r\n public void testTextView() throws Exception {\r\n byte[] data = encode(mTextView);\r\n assertNotNull(data);\r\n assertTrue(data.length > 0);\r\n\r\n ViewDumpParser parser = new ViewDumpParser();\r\n parser.parse(data);\r\n\r\n List<Map<Short, Object>> views = parser.getViews();\r\n Map<String, Short> propertyNameTable = parser.getIds();\r\n\r\n assertEquals(1, views.size());\r\n assertNotNull(propertyNameTable);\r\n\r\n Map<Short, Object> textViewProperties = views.get(0);\r\n assertEquals(\"android.widget.TextView\",\r\n textViewProperties.get(propertyNameTable.get(\"meta:__name__\")));\r\n\r\n assertEquals(mActivity.getString(R.string.test),\r\n textViewProperties.get(propertyNameTable.get(\"text:text\")));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6286721229553223, "alphanum_fraction": 0.641598105430603, "avg_line_length": 19.325000762939453, "blob_id": "b482c489dc2a1c8cd8d2f017c5cff2247c724718", "content_id": "083fd6c42bcf03041566cd1bd05b2d53309b5ea8", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 851, "license_type": "permissive", "max_line_length": 61, "num_lines": 40, "path": "/tools/aapt/tests/MockCacheUpdater.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "//\r\n// Copyright 2011 The Android Open Source Project\r\n//\r\n#ifndef MOCKCACHEUPDATER_H\r\n#define MOCKCACHEUPDATER_H\r\n\r\n#include <utils/String8.h>\r\n#include \"CacheUpdater.h\"\r\n\r\nusing namespace android;\r\n\r\nclass MockCacheUpdater : public CacheUpdater {\r\npublic:\r\n\r\n MockCacheUpdater()\r\n : deleteCount(0), processCount(0) { };\r\n\r\n // Make sure all the directories along this path exist\r\n virtual void ensureDirectoriesExist(String8 path)\r\n {\r\n // Nothing to do\r\n };\r\n\r\n // Delete a file\r\n virtual void deleteFile(String8 path) {\r\n deleteCount++;\r\n };\r\n\r\n // Process an image from source out to dest\r\n virtual void processImage(String8 source, String8 dest) {\r\n processCount++;\r\n };\r\n\r\n // DATA MEMBERS\r\n int deleteCount;\r\n int processCount;\r\nprivate:\r\n};\r\n\r\n#endif // MOCKCACHEUPDATER_H" }, { "alpha_fraction": 0.6987038850784302, "alphanum_fraction": 0.702081024646759, "avg_line_length": 40.63035202026367, "blob_id": "17dfd38fa0831dfb272928d7b3e9ae8c40fb153a", "content_id": "3acba5201c0f3b0509b45f6c71f3437998293655", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 10956, "license_type": "permissive", "max_line_length": 99, "num_lines": 257, "path": "/packages/SystemUI/tests/src/com/android/systemui/media/MediaControlPanelTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.media\r\n\r\nimport android.content.res.ColorStateList\r\nimport android.graphics.Color\r\nimport android.graphics.drawable.GradientDrawable\r\nimport android.graphics.drawable.RippleDrawable\r\nimport android.media.MediaMetadata\r\nimport android.media.session.MediaSession\r\nimport android.media.session.PlaybackState\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper\r\nimport android.view.View\r\nimport android.view.ViewGroup\r\nimport android.widget.FrameLayout\r\nimport android.widget.ImageButton\r\nimport android.widget.ImageView\r\nimport android.widget.SeekBar\r\nimport android.widget.TextView\r\nimport androidx.constraintlayout.widget.ConstraintSet\r\nimport androidx.lifecycle.LiveData\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.R\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.plugins.ActivityStarter\r\nimport com.android.systemui.util.animation.TransitionLayout\r\nimport com.android.systemui.util.concurrency.FakeExecutor\r\nimport com.android.systemui.util.time.FakeSystemClock\r\nimport com.google.common.truth.Truth.assertThat\r\nimport org.junit.After\r\nimport org.junit.Before\r\nimport org.junit.Rule\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.ArgumentCaptor\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito.mock\r\nimport org.mockito.Mockito.verify\r\nimport org.mockito.Mockito.`when` as whenever\r\nimport org.mockito.junit.MockitoJUnit\r\n\r\nprivate const val KEY = \"TEST_KEY\"\r\nprivate const val APP = \"APP\"\r\nprivate const val BG_COLOR = Color.RED\r\nprivate const val PACKAGE = \"PKG\"\r\nprivate const val ARTIST = \"ARTIST\"\r\nprivate const val TITLE = \"TITLE\"\r\nprivate const val DEVICE_NAME = \"DEVICE_NAME\"\r\nprivate const val SESSION_KEY = \"SESSION_KEY\"\r\nprivate const val SESSION_ARTIST = \"SESSION_ARTIST\"\r\nprivate const val SESSION_TITLE = \"SESSION_TITLE\"\r\nprivate const val USER_ID = 0\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner::class)\r\[email protected](setAsMainLooper = true)\r\npublic class MediaControlPanelTest : SysuiTestCase() {\r\n\r\n private lateinit var player: MediaControlPanel\r\n\r\n private lateinit var bgExecutor: FakeExecutor\r\n @Mock private lateinit var activityStarter: ActivityStarter\r\n\r\n @Mock private lateinit var holder: PlayerViewHolder\r\n @Mock private lateinit var view: TransitionLayout\r\n @Mock private lateinit var seekBarViewModel: SeekBarViewModel\r\n @Mock private lateinit var seekBarData: LiveData<SeekBarViewModel.Progress>\r\n @Mock private lateinit var mediaViewController: MediaViewController\r\n @Mock private lateinit var expandedSet: ConstraintSet\r\n @Mock private lateinit var collapsedSet: ConstraintSet\r\n private lateinit var appIcon: ImageView\r\n private lateinit var appName: TextView\r\n private lateinit var albumView: ImageView\r\n private lateinit var titleText: TextView\r\n private lateinit var artistText: TextView\r\n private lateinit var seamless: ViewGroup\r\n private lateinit var seamlessIcon: ImageView\r\n private lateinit var seamlessText: TextView\r\n private lateinit var seamlessFallback: ImageView\r\n private lateinit var seekBar: SeekBar\r\n private lateinit var elapsedTimeView: TextView\r\n private lateinit var totalTimeView: TextView\r\n private lateinit var action0: ImageButton\r\n private lateinit var action1: ImageButton\r\n private lateinit var action2: ImageButton\r\n private lateinit var action3: ImageButton\r\n private lateinit var action4: ImageButton\r\n\r\n private lateinit var session: MediaSession\r\n private val device = MediaDeviceData(true, null, DEVICE_NAME)\r\n private val disabledDevice = MediaDeviceData(false, null, null)\r\n\r\n @JvmField @Rule val mockito = MockitoJUnit.rule()\r\n\r\n @Before\r\n fun setUp() {\r\n bgExecutor = FakeExecutor(FakeSystemClock())\r\n whenever(mediaViewController.expandedLayout).thenReturn(expandedSet)\r\n whenever(mediaViewController.collapsedLayout).thenReturn(collapsedSet)\r\n\r\n player = MediaControlPanel(context, bgExecutor, activityStarter, mediaViewController,\r\n seekBarViewModel)\r\n whenever(seekBarViewModel.progress).thenReturn(seekBarData)\r\n\r\n // Mock out a view holder for the player to attach to.\r\n whenever(holder.player).thenReturn(view)\r\n appIcon = ImageView(context)\r\n whenever(holder.appIcon).thenReturn(appIcon)\r\n appName = TextView(context)\r\n whenever(holder.appName).thenReturn(appName)\r\n albumView = ImageView(context)\r\n whenever(holder.albumView).thenReturn(albumView)\r\n titleText = TextView(context)\r\n whenever(holder.titleText).thenReturn(titleText)\r\n artistText = TextView(context)\r\n whenever(holder.artistText).thenReturn(artistText)\r\n seamless = FrameLayout(context)\r\n val seamlessBackground = mock(RippleDrawable::class.java)\r\n seamless.foreground = seamlessBackground\r\n whenever(seamlessBackground.getDrawable(0)).thenReturn(mock(GradientDrawable::class.java))\r\n whenever(holder.seamless).thenReturn(seamless)\r\n seamlessIcon = ImageView(context)\r\n whenever(holder.seamlessIcon).thenReturn(seamlessIcon)\r\n seamlessText = TextView(context)\r\n whenever(holder.seamlessText).thenReturn(seamlessText)\r\n seamlessFallback = ImageView(context)\r\n whenever(holder.seamlessFallback).thenReturn(seamlessFallback)\r\n seekBar = SeekBar(context)\r\n whenever(holder.seekBar).thenReturn(seekBar)\r\n elapsedTimeView = TextView(context)\r\n whenever(holder.elapsedTimeView).thenReturn(elapsedTimeView)\r\n totalTimeView = TextView(context)\r\n whenever(holder.totalTimeView).thenReturn(totalTimeView)\r\n action0 = ImageButton(context)\r\n whenever(holder.action0).thenReturn(action0)\r\n action1 = ImageButton(context)\r\n whenever(holder.action1).thenReturn(action1)\r\n action2 = ImageButton(context)\r\n whenever(holder.action2).thenReturn(action2)\r\n action3 = ImageButton(context)\r\n whenever(holder.action3).thenReturn(action3)\r\n action4 = ImageButton(context)\r\n whenever(holder.action4).thenReturn(action4)\r\n\r\n // Create media session\r\n val metadataBuilder = MediaMetadata.Builder().apply {\r\n putString(MediaMetadata.METADATA_KEY_ARTIST, SESSION_ARTIST)\r\n putString(MediaMetadata.METADATA_KEY_TITLE, SESSION_TITLE)\r\n }\r\n val playbackBuilder = PlaybackState.Builder().apply {\r\n setState(PlaybackState.STATE_PAUSED, 6000L, 1f)\r\n setActions(PlaybackState.ACTION_PLAY)\r\n }\r\n session = MediaSession(context, SESSION_KEY).apply {\r\n setMetadata(metadataBuilder.build())\r\n setPlaybackState(playbackBuilder.build())\r\n }\r\n session.setActive(true)\r\n }\r\n\r\n @After\r\n fun tearDown() {\r\n session.release()\r\n player.onDestroy()\r\n }\r\n\r\n @Test\r\n fun bindWhenUnattached() {\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, null, null, device, true, null)\r\n player.bind(state)\r\n assertThat(player.isPlaying()).isFalse()\r\n }\r\n\r\n @Test\r\n fun bindText() {\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, device, true, null)\r\n player.bind(state)\r\n assertThat(appName.getText()).isEqualTo(APP)\r\n assertThat(titleText.getText()).isEqualTo(TITLE)\r\n assertThat(artistText.getText()).isEqualTo(ARTIST)\r\n }\r\n\r\n @Test\r\n fun bindBackgroundColor() {\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, device, true, null)\r\n player.bind(state)\r\n val list = ArgumentCaptor.forClass(ColorStateList::class.java)\r\n verify(view).setBackgroundTintList(list.capture())\r\n assertThat(list.value).isEqualTo(ColorStateList.valueOf(BG_COLOR))\r\n }\r\n\r\n @Test\r\n fun bindDevice() {\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, device, true, null)\r\n player.bind(state)\r\n assertThat(seamlessText.getText()).isEqualTo(DEVICE_NAME)\r\n assertThat(seamless.isEnabled()).isTrue()\r\n }\r\n\r\n @Test\r\n fun bindDisabledDevice() {\r\n seamless.id = 1\r\n seamlessFallback.id = 2\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, disabledDevice, true, null)\r\n player.bind(state)\r\n verify(expandedSet).setVisibility(seamless.id, View.GONE)\r\n verify(expandedSet).setVisibility(seamlessFallback.id, View.VISIBLE)\r\n verify(collapsedSet).setVisibility(seamless.id, View.GONE)\r\n verify(collapsedSet).setVisibility(seamlessFallback.id, View.VISIBLE)\r\n }\r\n\r\n @Test\r\n fun bindNullDevice() {\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, null, true, null)\r\n player.bind(state)\r\n assertThat(seamless.isEnabled()).isTrue()\r\n assertThat(seamlessText.getText()).isEqualTo(context.getResources().getString(\r\n com.android.internal.R.string.ext_media_seamless_action))\r\n }\r\n\r\n @Test\r\n fun bindDeviceResumptionPlayer() {\r\n player.attach(holder)\r\n val state = MediaData(USER_ID, true, BG_COLOR, APP, null, ARTIST, TITLE, null, emptyList(),\r\n emptyList(), PACKAGE, session.getSessionToken(), null, device, true, null,\r\n resumption = true)\r\n player.bind(state)\r\n assertThat(seamlessText.getText()).isEqualTo(DEVICE_NAME)\r\n assertThat(seamless.isEnabled()).isFalse()\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6027511358261108, "alphanum_fraction": 0.6065027117729187, "avg_line_length": 32.271427154541016, "blob_id": "f16800860f7b78cd73e604fa4555292d652eb446", "content_id": "fe33a5855cd690314265efe7c751e2e2b8bf2724", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2399, "license_type": "permissive", "max_line_length": 89, "num_lines": 70, "path": "/core/java/com/android/server/backup/ShortcutBackupHelper.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.server.backup;\r\n\r\nimport android.app.backup.BlobBackupHelper;\r\nimport android.content.Context;\r\nimport android.content.pm.IShortcutService;\r\nimport android.os.ServiceManager;\r\nimport android.os.UserHandle;\r\nimport android.util.Slog;\r\n\r\npublic class ShortcutBackupHelper extends BlobBackupHelper {\r\n private static final String TAG = \"ShortcutBackupAgent\";\r\n private static final int BLOB_VERSION = 1;\r\n\r\n private static final String KEY_USER_FILE = \"shortcutuser.xml\";\r\n\r\n public ShortcutBackupHelper() {\r\n super(BLOB_VERSION, KEY_USER_FILE);\r\n }\r\n\r\n private IShortcutService getShortcutService() {\r\n return IShortcutService.Stub.asInterface(\r\n ServiceManager.getService(Context.SHORTCUT_SERVICE));\r\n }\r\n\r\n @Override\r\n protected byte[] getBackupPayload(String key) {\r\n switch (key) {\r\n case KEY_USER_FILE:\r\n try {\r\n return getShortcutService().getBackupPayload(UserHandle.USER_SYSTEM);\r\n } catch (Exception e) {\r\n Slog.wtf(TAG, \"Backup failed\", e);\r\n }\r\n break;\r\n default:\r\n Slog.w(TAG, \"Unknown key: \" + key);\r\n }\r\n return null;\r\n }\r\n\r\n @Override\r\n protected void applyRestoredPayload(String key, byte[] payload) {\r\n switch (key) {\r\n case KEY_USER_FILE:\r\n try {\r\n getShortcutService().applyRestore(payload, UserHandle.USER_SYSTEM);\r\n } catch (Exception e) {\r\n Slog.wtf(TAG, \"Restore failed\", e);\r\n }\r\n break;\r\n default:\r\n Slog.w(TAG, \"Unknown key: \" + key);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7018181681632996, "alphanum_fraction": 0.7036363482475281, "avg_line_length": 23, "blob_id": "2e67ba4d434765a5e4f1af686559414d612582e7", "content_id": "52e1d941209f2c3bd2a6e2e0f8025d12c77c095c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 550, "license_type": "permissive", "max_line_length": 55, "num_lines": 22, "path": "/core/tests/packagemanagertests/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "LOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\n# We only want this apk build for tests.\r\nLOCAL_MODULE_TAGS := tests\r\n\r\n# Include all test java files.\r\nLOCAL_SRC_FILES := \\\r\n $(call all-java-files-under, src)\r\n\r\nLOCAL_STATIC_JAVA_LIBRARIES := \\\r\n androidx.test.rules \\\r\n frameworks-base-testutils \\\r\n mockito-target-minus-junit4\r\n\r\nLOCAL_JAVA_LIBRARIES := android.test.runner\r\nLOCAL_PACKAGE_NAME := FrameworksCorePackageManagerTests\r\nLOCAL_PRIVATE_PLATFORM_APIS := true\r\n\r\nLOCAL_CERTIFICATE := platform\r\n\r\ninclude $(BUILD_PACKAGE)\r\n" }, { "alpha_fraction": 0.7786499261856079, "alphanum_fraction": 0.7786499261856079, "avg_line_length": 47.07692337036133, "blob_id": "9f4d183aed8ddd428d37fe9ce4afdda2483576d8", "content_id": "90c91d2c44ab6534fc4ee6f999697a284b58d800", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1274, "license_type": "permissive", "max_line_length": 85, "num_lines": 26, "path": "/media/lib/tvremote/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This library (com.android.media.tv.remoteprovider.jar) is a shared java library\r\ncontaining classes required by unbundled atv remote providers.\r\n\r\n--- Rules of this library ---\r\no This library is effectively a System API for unbundled emote service provider\r\n that may be distributed outside the system image. So it MUST BE API STABLE.\r\n You can add but not remove. The rules are the same as for the\r\n public platform SDK API.\r\no This library can see and instantiate internal platform classes, but it must not\r\n expose them in any public method (or by extending them via inheritance). This would\r\n break clients of the library because they cannot see the internal platform classes.\r\n\r\nThis library is distributed in the system image, and loaded as\r\na shared library. So you can change the implementation, but not\r\nthe interface. In this way it is like framework.jar.\r\n\r\n--- Why does this library exist? ---\r\n\r\nUnbundled atv remote providers (such as Emote app) cannot use internal\r\nplatform classes.\r\n\r\nThis library will eventually be replaced when the inputmanager\r\ninfrastructure is ready with APIs allowing unbundled system apps to\r\ninject events into uhid.\r\nThat API isn't ready yet so this library is a compromise to\r\nmake new capabilities available to the system." }, { "alpha_fraction": 0.5756331086158752, "alphanum_fraction": 0.5862423181533813, "avg_line_length": 26.95049476623535, "blob_id": "6bf2910884810e261e6f35e1cdb4fe6333e367a1", "content_id": "81aae596d412a5fb52e157d163d530ebe4ded4aa", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2922, "license_type": "permissive", "max_line_length": 81, "num_lines": 101, "path": "/tools/aapt/tests/FileFinder_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "//\r\n// Copyright 2011 The Android Open Source Project\r\n//\r\n#include <utils/Vector.h>\r\n#include <utils/KeyedVector.h>\r\n#include <iostream>\r\n#include <cassert>\r\n#include <utils/String8.h>\r\n#include <utility>\r\n\r\n#include \"DirectoryWalker.h\"\r\n#include \"MockDirectoryWalker.h\"\r\n#include \"FileFinder.h\"\r\n\r\nusing namespace android;\r\n\r\nusing std::pair;\r\nusing std::cout;\r\nusing std::endl;\r\n\r\n\r\n\r\nint main()\r\n{\r\n\r\n cout << \"\\n\\n STARTING FILE FINDER TESTS\" << endl;\r\n String8 path(\"ApiDemos\");\r\n\r\n // Storage to pass to findFiles()\r\n KeyedVector<String8,time_t> testStorage;\r\n\r\n // Mock Directory Walker initialization. First data, then sdw\r\n Vector< pair<String8,time_t> > data;\r\n data.push( pair<String8,time_t>(String8(\"hello.png\"),3) );\r\n data.push( pair<String8,time_t>(String8(\"world.PNG\"),3) );\r\n data.push( pair<String8,time_t>(String8(\"foo.pNg\"),3) );\r\n // Neither of these should be found\r\n data.push( pair<String8,time_t>(String8(\"hello.jpg\"),3) );\r\n data.push( pair<String8,time_t>(String8(\".hidden.png\"),3));\r\n\r\n DirectoryWalker* sdw = new StringDirectoryWalker(path,data);\r\n\r\n // Extensions to look for\r\n Vector<String8> exts;\r\n exts.push(String8(\".png\"));\r\n\r\n errno = 0;\r\n\r\n // Make sure we get a valid mock directory walker\r\n // Make sure we finish without errors\r\n cout << \"Checking DirectoryWalker...\";\r\n assert(sdw != NULL);\r\n cout << \"PASSED\" << endl;\r\n\r\n // Make sure we finish without errors\r\n cout << \"Running findFiles()...\";\r\n bool findStatus = FileFinder::findFiles(path,exts, testStorage, sdw);\r\n assert(findStatus);\r\n cout << \"PASSED\" << endl;\r\n\r\n const size_t SIZE_EXPECTED = 3;\r\n // Check to make sure we have the right number of things in our storage\r\n cout << \"Running size comparison: Size is \" << testStorage.size() << \", \";\r\n cout << \"Expected \" << SIZE_EXPECTED << \"...\";\r\n if(testStorage.size() == SIZE_EXPECTED)\r\n cout << \"PASSED\" << endl;\r\n else {\r\n cout << \"FAILED\" << endl;\r\n errno++;\r\n }\r\n\r\n // Check to make sure that each of our found items has the right extension\r\n cout << \"Checking Returned Extensions...\";\r\n bool extsOkay = true;\r\n String8 wrongExts;\r\n for (size_t i = 0; i < SIZE_EXPECTED; ++i) {\r\n String8 testExt(testStorage.keyAt(i).getPathExtension());\r\n testExt.toLower();\r\n if (testExt != \".png\") {\r\n wrongExts += testStorage.keyAt(i);\r\n wrongExts += \"\\n\";\r\n extsOkay = false;\r\n }\r\n }\r\n if (extsOkay)\r\n cout << \"PASSED\" << endl;\r\n else {\r\n cout << \"FAILED\" << endl;\r\n cout << \"The following extensions didn't check out\" << endl << wrongExts;\r\n }\r\n\r\n // Clean up\r\n delete sdw;\r\n\r\n if(errno == 0) {\r\n cout << \"ALL TESTS PASSED\" << endl;\r\n } else {\r\n cout << errno << \" TESTS FAILED\" << endl;\r\n }\r\n return errno;\r\n}" }, { "alpha_fraction": 0.5637003779411316, "alphanum_fraction": 0.6053137183189392, "avg_line_length": 31.95652198791504, "blob_id": "5e5dd2faedcc774af95af1c0ff0b1c4f128445a5", "content_id": "2b3a8cb4e33f6c7d36ce0966877cfca10159cf65", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3124, "license_type": "permissive", "max_line_length": 75, "num_lines": 92, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/ScaledPathsActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.graphics.Path;\r\nimport android.graphics.RectF;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class ScaledPathsActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final PathsView view = new PathsView(this);\r\n setContentView(view);\r\n }\r\n\r\n public static class PathsView extends View {\r\n private final Paint mPathPaint;\r\n private final Path mPath;\r\n private final RectF mPathBounds = new RectF();\r\n\r\n public PathsView(Context c) {\r\n super(c);\r\n\r\n mPathPaint = new Paint();\r\n mPathPaint.setAntiAlias(true);\r\n mPathPaint.setColor(0xff0000ff);\r\n mPathPaint.setStrokeWidth(5.0f);\r\n mPathPaint.setStyle(Paint.Style.FILL);\r\n\r\n mPath = new Path();\r\n mPath.moveTo(0.0f, 0.0f);\r\n mPath.cubicTo(0.0f, 0.0f, 100.0f, 150.0f, 100.0f, 200.0f);\r\n mPath.cubicTo(100.0f, 200.0f, 50.0f, 300.0f, -80.0f, 200.0f);\r\n mPath.cubicTo(-80.0f, 200.0f, 100.0f, 200.0f, 200.0f, 0.0f);\r\n\r\n mPath.computeBounds(mPathBounds, true);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n canvas.drawARGB(255, 255, 255, 255);\r\n\r\n mPathPaint.setColor(0xff0000ff);\r\n mPathPaint.setStyle(Paint.Style.FILL);\r\n\r\n canvas.save();\r\n drawPath(canvas, 1.0f, 1.0f);\r\n drawPath(canvas, 2.0f, 2.0f);\r\n drawPath(canvas, 4.0f, 4.0f);\r\n canvas.restore();\r\n\r\n mPathPaint.setColor(0xffff0000);\r\n mPathPaint.setStyle(Paint.Style.STROKE);\r\n\r\n canvas.save();\r\n drawPath(canvas, 1.0f, 1.0f);\r\n drawPath(canvas, 2.0f, 2.0f);\r\n drawPath(canvas, 4.0f, 4.0f);\r\n canvas.restore();\r\n }\r\n\r\n private void drawPath(Canvas canvas, float scaleX, float scaleY) {\r\n canvas.save();\r\n canvas.scale(scaleX, scaleY);\r\n canvas.drawPath(mPath, mPathPaint);\r\n canvas.restore();\r\n canvas.translate(mPathBounds.width() * scaleX, 0.0f);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.536365270614624, "alphanum_fraction": 0.5452132225036621, "avg_line_length": 35.17763137817383, "blob_id": "59ab567095f0126e8f31f92828a2aea9408c791d", "content_id": "8db3b07175bf401558559abaa57c2664a468027f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5651, "license_type": "permissive", "max_line_length": 95, "num_lines": 152, "path": "/core/tests/utiltests/src/com/android/internal/util/FastXmlSerializerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.internal.util;\r\n\r\nimport android.test.suitebuilder.annotation.LargeTest;\r\nimport android.test.suitebuilder.annotation.SmallTest;\r\nimport android.util.Log;\r\nimport android.util.Xml;\r\n\r\nimport junit.framework.TestCase;\r\n\r\nimport org.xmlpull.v1.XmlPullParser;\r\nimport org.xmlpull.v1.XmlSerializer;\r\n\r\nimport java.io.ByteArrayInputStream;\r\nimport java.io.ByteArrayOutputStream;\r\nimport java.nio.charset.StandardCharsets;\r\n\r\n/**\r\n * Tests for {@link FastXmlSerializer}\r\n */\r\n@SmallTest\r\npublic class FastXmlSerializerTest extends TestCase {\r\n private static final String TAG = \"FastXmlSerializerTest\";\r\n\r\n private static final boolean ENABLE_DUMP = false; // DO NOT SUBMIT WITH TRUE.\r\n\r\n private static final String ROOT_TAG = \"root\";\r\n private static final String ATTR = \"attr\";\r\n\r\n public void testEmptyText() throws Exception {\r\n final ByteArrayOutputStream stream = new ByteArrayOutputStream();\r\n\r\n final XmlSerializer out = new FastXmlSerializer();\r\n out.setOutput(stream, \"utf-8\");\r\n out.startDocument(null, true);\r\n out.setFeature(\"http://xmlpull.org/v1/doc/features.html#indent-output\", true);\r\n\r\n out.startTag(null, \"string\");\r\n out.attribute(null, \"name\", \"meow\");\r\n out.text(\"\");\r\n out.endTag(null, \"string\");\r\n\r\n out.endDocument();\r\n\r\n assertEquals(\"<?xml version='1.0' encoding='utf-8' standalone='yes' ?>\\n\"\r\n + \"<string name=\\\"meow\\\"></string>\\n\", stream.toString());\r\n }\r\n\r\n private boolean checkPreserved(String description, String str) {\r\n boolean ok = true;\r\n byte[] data;\r\n try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {\r\n final XmlSerializer out = new FastXmlSerializer();\r\n out.setOutput(baos, StandardCharsets.UTF_16.name());\r\n out.startDocument(null, true);\r\n\r\n out.startTag(null, ROOT_TAG);\r\n out.attribute(null, ATTR, str);\r\n out.text(str);\r\n out.endTag(null, ROOT_TAG);\r\n\r\n out.endDocument();\r\n baos.flush();\r\n data = baos.toByteArray();\r\n } catch (Exception e) {\r\n Log.e(TAG, \"Unable to serialize: \" + description, e);\r\n return false;\r\n }\r\n\r\n if (ENABLE_DUMP) {\r\n Log.d(TAG, \"Dump:\");\r\n Log.d(TAG, new String(data));\r\n }\r\n\r\n try (final ByteArrayInputStream baos = new ByteArrayInputStream(data)) {\r\n XmlPullParser parser = Xml.newPullParser();\r\n parser.setInput(baos, StandardCharsets.UTF_16.name());\r\n\r\n int type;\r\n String tag = null;\r\n while ((type = parser.next()) != XmlPullParser.END_DOCUMENT) {\r\n if (type == XmlPullParser.START_TAG) {\r\n tag = parser.getName();\r\n if (ROOT_TAG.equals(tag)) {\r\n String read = parser.getAttributeValue(null, ATTR);\r\n if (!str.equals(read)) {\r\n Log.e(TAG, \"Attribute not preserved: \" + description\r\n + \" input=\\\"\" + str + \"\\\", but read=\\\"\" + read + \"\\\"\");\r\n ok = false;\r\n }\r\n }\r\n }\r\n if (type == XmlPullParser.TEXT && ROOT_TAG.equals(tag)) {\r\n String read = parser.getText();\r\n if (!str.equals(parser.getText())) {\r\n Log.e(TAG, \"Text not preserved: \" + description\r\n + \" input=\\\"\" + str + \"\\\", but read=\\\"\" + read + \"\\\"\");\r\n ok = false;\r\n }\r\n }\r\n }\r\n } catch (Exception e) {\r\n Log.e(TAG, \"Unable to parse: \" + description, e);\r\n return false;\r\n }\r\n return ok;\r\n }\r\n\r\n private boolean check(String description, String str) throws Exception {\r\n boolean ok = false;\r\n ok |= checkPreserved(description, str);\r\n ok |= checkPreserved(description + \" wrapped with spaces\" ,\" \" + str + \" \");\r\n return ok;\r\n }\r\n\r\n @LargeTest\r\n public void testAllCharacters() throws Exception {\r\n boolean ok = true;\r\n for (int i = 0; i < 0xffff; i++) {\r\n if (0xd800 <= i && i <= 0xdfff) {\r\n // Surrogate pair characters.\r\n continue;\r\n }\r\n ok &= check(\"char: \" + i, String.valueOf((char) i));\r\n }\r\n // Dangling surrogate pairs. We can't preserve them.\r\n assertFalse(check(\"+ud800\", \"\\ud800\"));\r\n assertFalse(check(\"+udc00\", \"\\udc00\"));\r\n\r\n for (int i = 0xd800; i < 0xdc00; i ++) {\r\n for (int j = 0xdc00; j < 0xe000; j++) {\r\n ok &= check(\"char: \" + i, String.valueOf((char) i) + String.valueOf((char) j));\r\n }\r\n }\r\n assertTrue(\"Some tests failed. See logcat for details.\", ok);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6445040106773376, "alphanum_fraction": 0.6471849679946899, "avg_line_length": 38.989009857177734, "blob_id": "17d4a12f5523d700f64a038aa8eaff5b2eeb5fd1", "content_id": "501e3ff5f10c391e825a05efa0557bacf824cf95", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3730, "license_type": "permissive", "max_line_length": 101, "num_lines": 91, "path": "/tests/TransitionTests/src/com/android/transitiontests/LoginActivityFromResources.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.transitiontests;\r\n\r\nimport android.app.Activity;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\nimport android.transition.Scene;\r\nimport android.transition.TransitionInflater;\r\nimport android.widget.TextView;\r\nimport android.transition.TransitionManager;\r\n\r\n\r\npublic class LoginActivityFromResources extends Activity {\r\n ViewGroup mSceneRoot;\r\n Scene mCurrentScene;\r\n TransitionManager mTransitionManager = null;\r\n Scene mLoginScene, mPasswordScene, mIncorrectPasswordScene, mSuccessScene, mUsernameTakenScene,\r\n mNewUserScene;\r\n\r\n @Override\r\n public void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.activity_login);\r\n View container = findViewById(R.id.container);\r\n mSceneRoot = (ViewGroup) container.getParent();\r\n\r\n }\r\n\r\n public void applyScene(Scene scene) {\r\n mTransitionManager.transitionTo(scene);\r\n mCurrentScene = scene;\r\n }\r\n\r\n public void sendMessage(View view) {\r\n if (mTransitionManager == null) {\r\n TransitionInflater inflater = TransitionInflater.from(this);\r\n\r\n mLoginScene = Scene.getSceneForLayout(mSceneRoot, R.layout.activity_login, this);\r\n mPasswordScene = Scene.getSceneForLayout(mSceneRoot, R.layout.login_password, this);\r\n mIncorrectPasswordScene = Scene.getSceneForLayout(mSceneRoot, R.layout\r\n .incorrect_password, this);\r\n mUsernameTakenScene = Scene.getSceneForLayout(mSceneRoot, R.layout.username_taken, this);\r\n mSuccessScene = Scene.getSceneForLayout(mSceneRoot, R.layout.success, this);\r\n mNewUserScene = Scene.getSceneForLayout(mSceneRoot, R.layout.new_user, this);\r\n\r\n mTransitionManager =\r\n inflater.inflateTransitionManager(R.transition.login_transition_mgr,\r\n mSceneRoot);\r\n\r\n mCurrentScene = mLoginScene;\r\n }\r\n TextView textView = (TextView) view;\r\n CharSequence text = textView.getText();\r\n if (text.equals(\"Cancel\")) {\r\n applyScene(mLoginScene);\r\n } else if (text.equals(\"Submit\")) {\r\n if (mCurrentScene == mLoginScene) {\r\n applyScene(mPasswordScene);\r\n } else if (mCurrentScene == mPasswordScene) {\r\n applyScene(Math.random() < .5 ? mSuccessScene : mIncorrectPasswordScene);\r\n } else if (mCurrentScene == mNewUserScene) {\r\n applyScene(Math.random() < .5 ? mSuccessScene : mUsernameTakenScene);\r\n }\r\n } else if (text.equals(\"New User?\")) {\r\n applyScene(mNewUserScene);\r\n } else if (text.equals(\"Okay\")) {\r\n if (mCurrentScene == mIncorrectPasswordScene) {\r\n applyScene(mPasswordScene);\r\n } else { // username taken scene\r\n applyScene(mNewUserScene);\r\n }\r\n } else if (text.equals(\"Reset\")) {\r\n applyScene(mLoginScene);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5962976813316345, "alphanum_fraction": 0.6071428656578064, "avg_line_length": 31.647798538208008, "blob_id": "e34c6712c0ca58ac6d132836cd2abc122bdccd5e", "content_id": "d366c8c21c8c75ac17fbf7a5e1b2c9185e1825fe", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5348, "license_type": "permissive", "max_line_length": 100, "num_lines": 159, "path": "/tests/WindowAnimationJank/src/android/windowanimationjank/ElementLayoutActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\r\n * in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the License\r\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\r\n * or implied. See the License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage android.windowanimationjank;\r\n\r\nimport java.util.Random;\r\n\r\nimport android.app.Activity;\r\nimport android.os.Bundle;\r\nimport android.view.ViewTreeObserver.OnPreDrawListener;\r\nimport android.widget.Chronometer;\r\nimport android.widget.RadioButton;\r\nimport android.widget.Switch;\r\nimport android.widget.TextView;\r\nimport android.widget.ToggleButton;\r\n\r\n/*\r\n * Activity with arbitrary number of random UI elements, refresh itself constantly.\r\n */\r\npublic class ElementLayoutActivity extends Activity implements OnPreDrawListener {\r\n public final static String NUM_ELEMENTS_KEY = \"num_elements\";\r\n\r\n private final static int DEFAULT_NUM_ELEMENTS = 100;\r\n private final static int BACKGROUND_COLOR = 0xfffff000;\r\n private final static int INDICATOR_COLOR = 0xffff0000;\r\n\r\n private FlowLayout mLayout;\r\n // Use the constant seed in order to get predefined order of elements.\r\n private Random mRandom = new Random(0);\r\n // Blinker indicator for visual feedback that Activity is currently updating.\r\n private TextView mIndicator;\r\n private static float mIndicatorState;\r\n\r\n @Override\r\n protected void onCreate(final Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n setContentView(R.layout.flowlayout);\r\n\r\n mLayout = (FlowLayout)findViewById(R.id.root_flow_layout);\r\n mLayout.setBackgroundColor(BACKGROUND_COLOR);\r\n\r\n mIndicator = new TextView(this);\r\n mLayout.addView(mIndicator);\r\n mIndicator.setText(\"***\\n***\");\r\n mIndicator.setBackgroundColor(BACKGROUND_COLOR);\r\n mIndicatorState = 0.0f;\r\n\r\n // Need constantly invalidate view in order to get max redraw rate.\r\n mLayout.getViewTreeObserver().addOnPreDrawListener(this);\r\n\r\n // Read requested number of elements in layout.\r\n int numElements = getIntent().getIntExtra(NUM_ELEMENTS_KEY, DEFAULT_NUM_ELEMENTS);\r\n\r\n for (int i = 0; i < numElements; ++i) {\r\n switch (mRandom.nextInt(5)) {\r\n case 0:\r\n createRadioButton();\r\n break;\r\n case 1:\r\n createToggleButton();\r\n break;\r\n case 2:\r\n createSwitch();\r\n break;\r\n case 3:\r\n createTextView();\r\n break;\r\n case 4:\r\n createChronometer();\r\n break;\r\n }\r\n }\r\n\r\n setContentView(mLayout);\r\n }\r\n\r\n private void createTextView() {\r\n TextView textView = new TextView(this);\r\n int lineCnt = mRandom.nextInt(4);\r\n StringBuffer buffer = new StringBuffer();\r\n for (int i = 0; i < lineCnt; ++i) {\r\n if (i != 0) {\r\n buffer.append(\"\\n\");\r\n }\r\n buffer.append(\"Line:\" + mRandom.nextInt());\r\n }\r\n textView.setText(buffer);\r\n mLayout.addView(textView);\r\n }\r\n\r\n private void createRadioButton() {\r\n RadioButton button = new RadioButton(this);\r\n button.setText(\"RadioButton:\" + mRandom.nextInt());\r\n mLayout.addView(button);\r\n }\r\n\r\n private void createToggleButton() {\r\n ToggleButton button = new ToggleButton(this);\r\n button.setChecked(mRandom.nextBoolean());\r\n mLayout.addView(button);\r\n }\r\n\r\n private void createSwitch() {\r\n Switch button = new Switch(this);\r\n button.setChecked(mRandom.nextBoolean());\r\n mLayout.addView(button);\r\n }\r\n\r\n private void createChronometer() {\r\n Chronometer chronometer = new Chronometer(this);\r\n chronometer.setBase(mRandom.nextLong());\r\n mLayout.addView(chronometer);\r\n chronometer.start();\r\n }\r\n\r\n @Override\r\n protected void onResume() {\r\n super.onResume();\r\n }\r\n\r\n @Override\r\n protected void onPause() {\r\n super.onPause();\r\n }\r\n\r\n @Override\r\n public boolean onPreDraw() {\r\n // Interpolate indicator color\r\n int background = 0xff000000;\r\n for (int i = 0; i < 3; ++i) {\r\n int shift = 8 * i;\r\n int colorB = (BACKGROUND_COLOR >> shift) & 0xff;\r\n int colorI = (INDICATOR_COLOR >> shift) & 0xff;\r\n int color = (int)((float)colorB * (1.0f - mIndicatorState) +\r\n (float)colorI * mIndicatorState);\r\n if (color > 255) {\r\n color = 255;\r\n }\r\n background |= (color << shift);\r\n }\r\n\r\n mIndicator.setBackgroundColor(background);\r\n mIndicatorState += (3 / 60.0f); // around 3 times per second\r\n mIndicatorState = mIndicatorState - (int)mIndicatorState;\r\n\r\n mLayout.postInvalidate();\r\n return true;\r\n }\r\n}" }, { "alpha_fraction": 0.7695713043212891, "alphanum_fraction": 0.7714352011680603, "avg_line_length": 35, "blob_id": "e876c442bcd61ba3dc04fbd657ce2f04aad30e80", "content_id": "1e2c9f9203f6f4809b889075aef8884b06dedc29", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4292, "license_type": "permissive", "max_line_length": 98, "num_lines": 116, "path": "/packages/SystemUI/src/com/android/systemui/controls/dagger/ControlsModule.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.controls.dagger\r\n\r\nimport android.app.Activity\r\nimport android.content.pm.PackageManager\r\nimport com.android.systemui.controls.controller.ControlsBindingController\r\nimport com.android.systemui.controls.controller.ControlsBindingControllerImpl\r\nimport com.android.systemui.controls.controller.ControlsController\r\nimport com.android.systemui.controls.controller.ControlsControllerImpl\r\nimport com.android.systemui.controls.controller.ControlsFavoritePersistenceWrapper\r\nimport com.android.systemui.controls.management.ControlsEditingActivity\r\nimport com.android.systemui.controls.management.ControlsFavoritingActivity\r\nimport com.android.systemui.controls.management.ControlsListingController\r\nimport com.android.systemui.controls.management.ControlsListingControllerImpl\r\nimport com.android.systemui.controls.management.ControlsProviderSelectorActivity\r\nimport com.android.systemui.controls.management.ControlsRequestDialog\r\nimport com.android.systemui.controls.ui.ControlActionCoordinator\r\nimport com.android.systemui.controls.ui.ControlActionCoordinatorImpl\r\nimport com.android.systemui.controls.ui.ControlsUiController\r\nimport com.android.systemui.controls.ui.ControlsUiControllerImpl\r\nimport dagger.Binds\r\nimport dagger.BindsOptionalOf\r\nimport dagger.Module\r\nimport dagger.Provides\r\nimport dagger.multibindings.ClassKey\r\nimport dagger.multibindings.IntoMap\r\nimport javax.inject.Singleton\r\n\r\n/**\r\n * Module for injecting classes in `com.android.systemui.controls`-\r\n *\r\n * Classes provided by this module should only be injected directly into other classes in this\r\n * module. For injecting outside of this module (for example, [GlobalActionsDialog], inject\r\n * [ControlsComponent] and obtain the corresponding optionals from it.\r\n */\r\n@Module\r\nabstract class ControlsModule {\r\n\r\n @Module\r\n companion object {\r\n @JvmStatic\r\n @Provides\r\n @Singleton\r\n @ControlsFeatureEnabled\r\n fun providesControlsFeatureEnabled(pm: PackageManager): Boolean {\r\n return pm.hasSystemFeature(PackageManager.FEATURE_CONTROLS)\r\n }\r\n }\r\n\r\n @Binds\r\n abstract fun provideControlsListingController(\r\n controller: ControlsListingControllerImpl\r\n ): ControlsListingController\r\n\r\n @Binds\r\n abstract fun provideControlsController(controller: ControlsControllerImpl): ControlsController\r\n\r\n @Binds\r\n abstract fun provideControlsBindingController(\r\n controller: ControlsBindingControllerImpl\r\n ): ControlsBindingController\r\n\r\n @Binds\r\n abstract fun provideUiController(controller: ControlsUiControllerImpl): ControlsUiController\r\n\r\n @Binds\r\n abstract fun provideControlActionCoordinator(\r\n coordinator: ControlActionCoordinatorImpl\r\n ): ControlActionCoordinator\r\n\r\n @BindsOptionalOf\r\n abstract fun optionalPersistenceWrapper(): ControlsFavoritePersistenceWrapper\r\n\r\n @Binds\r\n @IntoMap\r\n @ClassKey(ControlsProviderSelectorActivity::class)\r\n abstract fun provideControlsProviderActivity(\r\n activity: ControlsProviderSelectorActivity\r\n ): Activity\r\n\r\n @Binds\r\n @IntoMap\r\n @ClassKey(ControlsFavoritingActivity::class)\r\n abstract fun provideControlsFavoritingActivity(\r\n activity: ControlsFavoritingActivity\r\n ): Activity\r\n\r\n @Binds\r\n @IntoMap\r\n @ClassKey(ControlsEditingActivity::class)\r\n abstract fun provideControlsEditingActivity(\r\n activity: ControlsEditingActivity\r\n ): Activity\r\n\r\n @Binds\r\n @IntoMap\r\n @ClassKey(ControlsRequestDialog::class)\r\n abstract fun provideControlsRequestDialog(\r\n activity: ControlsRequestDialog\r\n ): Activity\r\n}\r\n" }, { "alpha_fraction": 0.5891006588935852, "alphanum_fraction": 0.5917752981185913, "avg_line_length": 29.819149017333984, "blob_id": "3ad897a953eef42dfc3f3b853397e54e24b9ee44", "content_id": "e2184694c5031e21d92c246b6264226d08522ab1", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2991, "license_type": "permissive", "max_line_length": 89, "num_lines": 94, "path": "/packages/StatementService/src/com/android/statementservice/retriever/JsonParser.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.statementservice.retriever;\r\n\r\nimport android.util.JsonReader;\r\nimport android.util.JsonToken;\r\n\r\nimport org.json.JSONArray;\r\nimport org.json.JSONException;\r\nimport org.json.JSONObject;\r\n\r\nimport java.io.IOException;\r\nimport java.util.ArrayList;\r\nimport java.util.List;\r\n\r\n/**\r\n * A helper class that creates a {@link JSONObject} from a {@link JsonReader}.\r\n */\r\npublic final class JsonParser {\r\n\r\n private JsonParser() {}\r\n\r\n /**\r\n * Consumes and parses exactly one JSON object from the {@link JsonReader}.\r\n * The object's fields can only be objects, strings or arrays of strings.\r\n */\r\n public static JSONObject parse(JsonReader reader) throws IOException, JSONException {\r\n JSONObject output = new JSONObject();\r\n String errorMsg = null;\r\n\r\n reader.beginObject();\r\n while (reader.hasNext()) {\r\n String fieldName = reader.nextName();\r\n\r\n if (output.has(fieldName)) {\r\n errorMsg = \"Duplicate field name.\";\r\n reader.skipValue();\r\n continue;\r\n }\r\n\r\n JsonToken token = reader.peek();\r\n if (token.equals(JsonToken.BEGIN_ARRAY)) {\r\n output.put(fieldName, new JSONArray(parseArray(reader)));\r\n } else if (token.equals(JsonToken.STRING)) {\r\n output.put(fieldName, reader.nextString());\r\n } else if (token.equals(JsonToken.BEGIN_OBJECT)) {\r\n try {\r\n output.put(fieldName, parse(reader));\r\n } catch (JSONException e) {\r\n errorMsg = e.getMessage();\r\n }\r\n } else {\r\n reader.skipValue();\r\n errorMsg = \"Unsupported value type.\";\r\n }\r\n }\r\n reader.endObject();\r\n\r\n if (errorMsg != null) {\r\n throw new JSONException(errorMsg);\r\n }\r\n\r\n return output;\r\n }\r\n\r\n /**\r\n * Parses one string array from the {@link JsonReader}.\r\n */\r\n public static List<String> parseArray(JsonReader reader) throws IOException {\r\n ArrayList<String> output = new ArrayList<>();\r\n\r\n reader.beginArray();\r\n while (reader.hasNext()) {\r\n output.add(reader.nextString());\r\n }\r\n reader.endArray();\r\n\r\n return output;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7398374080657959, "alphanum_fraction": 0.7398374080657959, "avg_line_length": 39, "blob_id": "22e9c74403677b4bb495d5870184de0bd4a8c321", "content_id": "b1ef9b28f60f77d8697ab64e5d3c80d570f17608", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 123, "license_type": "permissive", "max_line_length": 60, "num_lines": 3, "path": "/cmds/sm/sm", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\nexport CLASSPATH=/system/framework/sm.jar\r\nexec app_process /system/bin com.android.commands.sm.Sm \"$@\"\r\n" }, { "alpha_fraction": 0.615259051322937, "alphanum_fraction": 0.6291309595108032, "avg_line_length": 24.0744686126709, "blob_id": "43f9eafc6ef7e855193675c9d11643c0e0c93efd", "content_id": "e90bcb184b0fa28ced4b8d7d57200b8fbc05f1c4", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2451, "license_type": "permissive", "max_line_length": 76, "num_lines": 94, "path": "/media/mca/filterfw/native/core/geometry.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef ANDROID_FILTERFW_CORE_GEOMETRY_H\r\n#define ANDROID_FILTERFW_CORE_GEOMETRY_H\r\n\r\n#include <vector>\r\n\r\nnamespace android {\r\nnamespace filterfw {\r\n\r\n// This is an initial implementation of some geometrical structures. This is\r\n// likely to grow and become more sophisticated in the future.\r\n\r\nclass Point {\r\n public:\r\n Point() : x_(0.0f), y_(0.0f) {}\r\n Point(float x, float y) : x_(x), y_(y) {}\r\n\r\n float x() const { return x_; }\r\n float y() const { return y_; }\r\n\r\n float Length() const;\r\n bool ScaleTo(float new_length);\r\n static float Distance(const Point& p0, const Point& p1);\r\n\r\n // Add more of these as needed:\r\n Point operator+(const Point& other) const;\r\n Point operator-(const Point& other) const;\r\n Point operator*(float factor) const;\r\n\r\n void Rotate90Clockwise();\r\n\r\n private:\r\n float x_, y_;\r\n};\r\n\r\nclass Quad {\r\n public:\r\n Quad() : points_(4) {}\r\n virtual ~Quad() {}\r\n\r\n Quad(const Point& p0, const Point& p1, const Point& p2, const Point& p3)\r\n : points_(4) {\r\n points_[0] = p0;\r\n points_[1] = p1;\r\n points_[2] = p2;\r\n points_[3] = p3;\r\n }\r\n\r\n const std::vector<Point>& points() const { return points_; }\r\n const Point& point(int ix) const;\r\n\r\n protected:\r\n std::vector<Point> points_;\r\n};\r\n\r\nstruct Rect {\r\n float x, y, width, height;\r\n\r\n Rect() {\r\n x = y = 0.0f;\r\n width = height = 1.0f;\r\n }\r\n\r\n Rect(float x, float y, float width, float height) {\r\n this->x = x;\r\n this->y = y;\r\n this->width = width;\r\n this->height = height;\r\n }\r\n\r\n bool ExpandToAspectRatio(float ratio);\r\n bool ExpandToMinLength(float length);\r\n bool ScaleWithLengthLimit(float factor, float max_length);\r\n};\r\n\r\n} // namespace filterfw\r\n} // namespace android\r\n\r\n#endif // ANDROID_FILTERFW_CORE_GEOMETRY_H\r\n" }, { "alpha_fraction": 0.5974463224411011, "alphanum_fraction": 0.6189764142036438, "avg_line_length": 42.779903411865234, "blob_id": "204a4f5566e0824c5ba7d799a70c05a9039761ec", "content_id": "b4adab917c91f386b7c46cf4036b0ac371225eff", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 18718, "license_type": "permissive", "max_line_length": 100, "num_lines": 418, "path": "/services/tests/servicestests/src/com/android/server/accessibility/GestureDescriptionTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.accessibility;\r\n\r\nimport static org.hamcrest.CoreMatchers.allOf;\r\nimport static org.hamcrest.CoreMatchers.everyItem;\r\nimport static org.hamcrest.MatcherAssert.assertThat;\r\n\r\nimport android.accessibilityservice.GestureDescription;\r\nimport android.accessibilityservice.GestureDescription.GestureStep;\r\nimport android.accessibilityservice.GestureDescription.MotionEventGenerator;\r\nimport android.accessibilityservice.GestureDescription.StrokeDescription;\r\nimport android.graphics.Path;\r\nimport android.graphics.PointF;\r\nimport org.hamcrest.Description;\r\nimport org.hamcrest.Matcher;\r\nimport org.hamcrest.TypeSafeMatcher;\r\nimport org.junit.Test;\r\n\r\nimport java.util.List;\r\n\r\nimport static junit.framework.TestCase.assertEquals;\r\n\r\n/**\r\n * Tests for GestureDescription\r\n */\r\npublic class GestureDescriptionTest {\r\n @Test\r\n public void testGestureShorterThanSampleRate_producesStartAndEnd() {\r\n PointF click = new PointF(10, 20);\r\n Path clickPath = new Path();\r\n clickPath.moveTo(click.x, click.y);\r\n StrokeDescription clickStroke = new StrokeDescription(clickPath, 0, 10);\r\n GestureDescription.Builder clickBuilder = new GestureDescription.Builder();\r\n clickBuilder.addStroke(clickStroke);\r\n GestureDescription clickGesture = clickBuilder.build();\r\n\r\n List<GestureStep> clickGestureSteps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(clickGesture, 100);\r\n\r\n assertEquals(2, clickGestureSteps.size());\r\n assertThat(clickGestureSteps.get(0), allOf(numTouchPointsIs(1), numStartsOfStroke(1),\r\n numEndsOfStroke(0), hasPoint(click)));\r\n assertThat(clickGestureSteps.get(1), allOf(numTouchPointsIs(1), numStartsOfStroke(0),\r\n numEndsOfStroke(1), hasPoint(click)));\r\n }\r\n\r\n @Test\r\n public void testSwipe_shouldContainEvenlySpacedPoints() {\r\n int samplePeriod = 10;\r\n int numSamples = 5;\r\n float stepX = 2;\r\n float stepY = 3;\r\n PointF start = new PointF(10, 20);\r\n PointF end = new PointF(10 + numSamples * stepX, 20 + numSamples * stepY);\r\n\r\n GestureDescription swipe =\r\n createSwipe(start.x, start.y, end.x, end.y, numSamples * samplePeriod);\r\n List<GestureStep> swipeGestureSteps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(swipe, samplePeriod);\r\n assertEquals(numSamples + 1, swipeGestureSteps.size());\r\n\r\n assertThat(swipeGestureSteps.get(0), allOf(numTouchPointsIs(1), numStartsOfStroke(1),\r\n numEndsOfStroke(0), hasPoint(start)));\r\n assertThat(swipeGestureSteps.get(numSamples), allOf(numTouchPointsIs(1),\r\n numStartsOfStroke(0), numEndsOfStroke(1), hasPoint(end)));\r\n\r\n for (int i = 1; i < numSamples; ++i) {\r\n PointF interpPoint = new PointF(start.x + stepX * i, start.y + stepY * i);\r\n assertThat(swipeGestureSteps.get(i), allOf(numTouchPointsIs(1),\r\n numStartsOfStroke(0), numEndsOfStroke(0), hasPoint(interpPoint)));\r\n }\r\n }\r\n\r\n @Test\r\n public void testSwipeWithNonIntegerValues_shouldRound() {\r\n int strokeTime = 10;\r\n\r\n GestureDescription swipe = createSwipe(10.1f, 20.6f, 11.9f, 22.1f, strokeTime);\r\n List<GestureStep> swipeGestureSteps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(swipe, strokeTime);\r\n assertEquals(2, swipeGestureSteps.size());\r\n assertThat(swipeGestureSteps.get(0), hasPoint(new PointF(10, 21)));\r\n assertThat(swipeGestureSteps.get(1), hasPoint(new PointF(12, 22)));\r\n }\r\n\r\n @Test\r\n public void testPathsWithOverlappingTiming_produceCorrectSteps() {\r\n // There are 4 paths\r\n // 0: an L-shaped path that starts first\r\n // 1: a swipe that starts in the middle of the L-shaped path and ends when the L ends\r\n // 2: a swipe that starts at the same time as #1 but extends past the end of the L\r\n // 3: a swipe that starts when #3 ends\r\n PointF path0Start = new PointF(100, 150);\r\n PointF path0Turn = new PointF(100, 200);\r\n PointF path0End = new PointF(250, 200);\r\n int path0StartTime = 0;\r\n int path0EndTime = 100;\r\n int path0Duration = path0EndTime - path0StartTime;\r\n Path path0 = new Path();\r\n path0.moveTo(path0Start.x, path0Start.y);\r\n path0.lineTo(path0Turn.x, path0Turn.y);\r\n path0.lineTo(path0End.x, path0End.y);\r\n StrokeDescription path0Stroke = new StrokeDescription(path0, path0StartTime, path0Duration);\r\n\r\n PointF path1Start = new PointF(300, 350);\r\n PointF path1End = new PointF(300, 400);\r\n int path1StartTime = 50;\r\n int path1EndTime = path0EndTime;\r\n StrokeDescription path1Stroke = createSwipeStroke(\r\n path1Start.x, path1Start.y, path1End.x, path1End.y, path1StartTime, path1EndTime);\r\n\r\n PointF path2Start = new PointF(400, 450);\r\n PointF path2End = new PointF(400, 500);\r\n int path2StartTime = 50;\r\n int path2EndTime = 150;\r\n StrokeDescription path2Stroke = createSwipeStroke(\r\n path2Start.x, path2Start.y, path2End.x, path2End.y, path2StartTime, path2EndTime);\r\n\r\n PointF path3Start = new PointF(500, 550);\r\n PointF path3End = new PointF(500, 600);\r\n int path3StartTime = path2EndTime;\r\n int path3EndTime = 200;\r\n StrokeDescription path3Stroke = createSwipeStroke(\r\n path3Start.x, path3Start.y, path3End.x, path3End.y, path3StartTime, path3EndTime);\r\n\r\n int deltaT = 12; // Force samples to happen on extra boundaries\r\n GestureDescription.Builder builder = new GestureDescription.Builder();\r\n builder.addStroke(path0Stroke);\r\n builder.addStroke(path1Stroke);\r\n builder.addStroke(path2Stroke);\r\n builder.addStroke(path3Stroke);\r\n List<GestureStep> steps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(builder.build(), deltaT);\r\n\r\n long start = 0;\r\n assertThat(steps.get(0), allOf(numStartsOfStroke(1), numEndsOfStroke(0), isAtTime(start),\r\n numTouchPointsIs(1), hasPoint(path0Start)));\r\n assertThat(steps.get(1), allOf(numTouchPointsIs(1), noStartsOrEnds(),\r\n isAtTime(start + deltaT)));\r\n assertThat(steps.get(2), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 2)));\r\n assertThat(steps.get(3), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 3)));\r\n assertThat(steps.get(4), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 4)));\r\n\r\n assertThat(steps.get(5), allOf(numTouchPointsIs(3), numStartsOfStroke(2),\r\n numEndsOfStroke(0), isAtTime(path1StartTime), hasPoint(path1Start),\r\n hasPoint(path2Start)));\r\n\r\n start = path1StartTime;\r\n assertThat(steps.get(6), allOf(numTouchPointsIs(3), isAtTime(start + deltaT * 1)));\r\n assertThat(steps.get(7), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 2)));\r\n assertThat(steps.get(8), allOf(numTouchPointsIs(3), isAtTime(start + deltaT * 3)));\r\n assertThat(steps.get(9), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 4)));\r\n\r\n assertThat(steps.get(10), allOf(numTouchPointsIs(3), numStartsOfStroke(0),\r\n numEndsOfStroke(2), isAtTime(path0EndTime), hasPoint(path0End),\r\n hasPoint(path1End)));\r\n\r\n start = path0EndTime;\r\n assertThat(steps.get(11), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 1)));\r\n assertThat(steps.get(12), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 2)));\r\n assertThat(steps.get(13), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 3)));\r\n assertThat(steps.get(14), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 4)));\r\n\r\n assertThat(steps.get(15), allOf(numTouchPointsIs(2), numStartsOfStroke(1),\r\n numEndsOfStroke(1), isAtTime(path2EndTime), hasPoint(path2End),\r\n hasPoint(path3Start)));\r\n\r\n start = path2EndTime;\r\n assertThat(steps.get(16), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 1)));\r\n assertThat(steps.get(17), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 2)));\r\n assertThat(steps.get(18), allOf(numTouchPointsIs(1), isAtTime(start + deltaT * 3)));\r\n assertThat(steps.get(19), allOf(noStartsOrEnds(), isAtTime(start + deltaT * 4)));\r\n\r\n assertThat(steps.get(20), allOf(numTouchPointsIs(1), numStartsOfStroke(0),\r\n numEndsOfStroke(1), isAtTime(path3EndTime), hasPoint(path3End)));\r\n }\r\n\r\n @Test\r\n public void testMaxTouchpoints_shouldHaveValidCoords() {\r\n GestureDescription.Builder maxPointBuilder = new GestureDescription.Builder();\r\n PointF baseStartPoint = new PointF(100, 100);\r\n PointF baseEndPoint = new PointF(100, 200);\r\n int xStep = 10;\r\n int samplePeriod = 15;\r\n int numSamples = 2;\r\n int numPoints = GestureDescription.getMaxStrokeCount();\r\n for (int i = 0; i < numPoints; i++) {\r\n Path path = new Path();\r\n path.moveTo(baseStartPoint.x + i * xStep, baseStartPoint.y);\r\n path.lineTo(baseEndPoint.x + i * xStep, baseEndPoint.y);\r\n maxPointBuilder.addStroke(new StrokeDescription(path, 0, samplePeriod * numSamples));\r\n }\r\n\r\n List<GestureStep> steps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(maxPointBuilder.build(), samplePeriod);\r\n assertEquals(3, steps.size());\r\n\r\n assertThat(steps.get(0), allOf(numTouchPointsIs(numPoints), numStartsOfStroke(numPoints),\r\n numEndsOfStroke(0), isAtTime(0)));\r\n assertThat(steps.get(1), allOf(numTouchPointsIs(numPoints), numStartsOfStroke(0),\r\n numEndsOfStroke(0), isAtTime(samplePeriod)));\r\n assertThat(steps.get(2), allOf(numTouchPointsIs(numPoints), numStartsOfStroke(0),\r\n numEndsOfStroke(numPoints), isAtTime(samplePeriod * 2)));\r\n\r\n PointF baseMidPoint = new PointF((baseStartPoint.x + baseEndPoint.x) / 2,\r\n (baseStartPoint.y + baseEndPoint.y) / 2);\r\n for (int i = 0; i < numPoints; i++) {\r\n assertThat(steps.get(0),\r\n hasPoint(new PointF(baseStartPoint.x + i * xStep, baseStartPoint.y)));\r\n assertThat(steps.get(1),\r\n hasPoint(new PointF(baseMidPoint.x + i * xStep, baseMidPoint.y)));\r\n assertThat(steps.get(2),\r\n hasPoint(new PointF(baseEndPoint.x + i * xStep, baseEndPoint.y)));\r\n }\r\n }\r\n\r\n @Test\r\n public void testGetGestureSteps_touchPointsHaveStrokeId() {\r\n StrokeDescription swipeStroke = createSwipeStroke(10, 20, 30, 40, 0, 100);\r\n GestureDescription swipe = new GestureDescription.Builder().addStroke(swipeStroke).build();\r\n List<GestureStep> swipeGestureSteps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(swipe, 10);\r\n\r\n assertThat(swipeGestureSteps, everyItem(hasStrokeId(swipeStroke.getId())));\r\n }\r\n\r\n @Test\r\n public void testGetGestureSteps_continuedStroke_hasNoEndPoint() {\r\n Path swipePath = new Path();\r\n swipePath.moveTo(10, 20);\r\n swipePath.lineTo(30, 40);\r\n StrokeDescription stroke1 =\r\n new StrokeDescription(swipePath, 0, 100, true);\r\n GestureDescription gesture = new GestureDescription.Builder().addStroke(stroke1).build();\r\n List<GestureStep> steps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(gesture, 10);\r\n\r\n assertThat(steps, everyItem(numEndsOfStroke(0)));\r\n }\r\n\r\n @Test\r\n public void testGetGestureSteps_continuingStroke_hasNoStartPointAndHasContinuedId() {\r\n Path swipePath = new Path();\r\n swipePath.moveTo(10, 20);\r\n swipePath.lineTo(30, 40);\r\n StrokeDescription stroke1 =\r\n new StrokeDescription(swipePath, 0, 100, true);\r\n StrokeDescription stroke2 = stroke1.continueStroke(swipePath, 0, 100, false);\r\n GestureDescription gesture = new GestureDescription.Builder().addStroke(stroke2).build();\r\n List<GestureStep> steps = MotionEventGenerator\r\n .getGestureStepsFromGestureDescription(gesture, 10);\r\n\r\n assertThat(steps, everyItem(\r\n allOf(continuesStrokeId(stroke1.getId()), numStartsOfStroke(0))));\r\n }\r\n\r\n private GestureDescription createSwipe(\r\n float startX, float startY, float endX, float endY, long duration) {\r\n GestureDescription.Builder swipeBuilder = new GestureDescription.Builder();\r\n swipeBuilder.addStroke(createSwipeStroke(startX, startY, endX, endY, 0, duration));\r\n return swipeBuilder.build();\r\n }\r\n\r\n private StrokeDescription createSwipeStroke(\r\n float startX, float startY, float endX, float endY, long startTime, long endTime) {\r\n Path swipePath = new Path();\r\n swipePath.moveTo(startX, startY);\r\n swipePath.lineTo(endX, endY);\r\n StrokeDescription swipeStroke =\r\n new StrokeDescription(swipePath, startTime, endTime - startTime);\r\n return swipeStroke;\r\n }\r\n\r\n Matcher<GestureStep> numTouchPointsIs(final int numTouchPoints) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n return gestureStep.numTouchPoints == numTouchPoints;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Has \" + numTouchPoints + \" touch point(s)\");\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> numStartsOfStroke(final int numStarts) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n int numStartsFound = 0;\r\n for (int i = 0; i < gestureStep.numTouchPoints; i++) {\r\n if (gestureStep.touchPoints[i].mIsStartOfPath) {\r\n numStartsFound++;\r\n }\r\n }\r\n return numStartsFound == numStarts;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Starts \" + numStarts + \" stroke(s)\");\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> numEndsOfStroke(final int numEnds) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n int numEndsFound = 0;\r\n for (int i = 0; i < gestureStep.numTouchPoints; i++) {\r\n if (gestureStep.touchPoints[i].mIsEndOfPath) {\r\n numEndsFound++;\r\n }\r\n }\r\n return numEndsFound == numEnds;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Ends \" + numEnds + \" stroke(s)\");\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> hasPoint(final PointF point) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n for (int i = 0; i < gestureStep.numTouchPoints; i++) {\r\n if ((gestureStep.touchPoints[i].mX == point.x)\r\n && (gestureStep.touchPoints[i].mY == point.y)) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Has at least one point at \" + point);\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> hasStrokeId(final int strokeId) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n for (int i = 0; i < gestureStep.numTouchPoints; i++) {\r\n if (gestureStep.touchPoints[i].mStrokeId == strokeId) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Has at least one point with stroke id \" + strokeId);\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> continuesStrokeId(final int strokeId) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n for (int i = 0; i < gestureStep.numTouchPoints; i++) {\r\n if (gestureStep.touchPoints[i].mContinuedStrokeId == strokeId) {\r\n return true;\r\n }\r\n }\r\n return false;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Continues stroke id \" + strokeId);\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> isAtTime(final long time) {\r\n return new TypeSafeMatcher<GestureStep>() {\r\n @Override\r\n protected boolean matchesSafely(GestureStep gestureStep) {\r\n return gestureStep.timeSinceGestureStart == time;\r\n }\r\n\r\n @Override\r\n public void describeTo(Description description) {\r\n description.appendText(\"Is at time \" + time);\r\n }\r\n };\r\n }\r\n\r\n Matcher<GestureStep> noStartsOrEnds() {\r\n return allOf(numStartsOfStroke(0), numEndsOfStroke(0));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7055555582046509, "alphanum_fraction": 0.7055555582046509, "avg_line_length": 27.66666603088379, "blob_id": "f158dedd00a688fbc3b2b145438d0572b437bf88", "content_id": "6fd2fa447c95e2ece41bff90f7da609f070bca55", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 180, "license_type": "permissive", "max_line_length": 62, "num_lines": 6, "path": "/cmds/bu/bu", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\n# Script to start \"bu\" on the device\r\n#\r\nbase=/system\r\nexport CLASSPATH=$base/framework/bu.jar\r\nexec app_process $base/bin com.android.commands.bu.Backup \"$@\"\r\n\r\n" }, { "alpha_fraction": 0.6192830801010132, "alphanum_fraction": 0.6517747044563293, "avg_line_length": 39.33576583862305, "blob_id": "985f628118d8bbe0074681a41879e549149a0596", "content_id": "1f7199bbd3debc1632fe8ece78a4a558336883ba", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5663, "license_type": "permissive", "max_line_length": 98, "num_lines": 137, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/ColorSpace.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage androidx.media.filterfw;\r\n\r\nimport java.nio.ByteBuffer;\r\n\r\n/**\r\n * Utility functions to convert between color-spaces.\r\n *\r\n * Currently these methods are all CPU based native methods. These could be updated in the future\r\n * to provide other implementations.\r\n */\r\npublic class ColorSpace {\r\n\r\n /**\r\n * Convert YUV420-Planer data to RGBA8888.\r\n *\r\n * The input data is expected to be laid out in 3 planes. The width x height Y plane, followed\r\n * by the U and V planes, where each chroma value corresponds to a 2x2 luminance value block.\r\n * YUV to RGB conversion is done using the ITU-R BT.601 transformation. The output buffer must\r\n * be large enough to hold the data, and the dimensions must be multiples of 2.\r\n *\r\n * @param input data encoded in YUV420-Planar.\r\n * @param output buffer to hold RGBA8888 data.\r\n * @param width the width of the image (must be a multiple of 2)\r\n * @param height the height of the image (must be a multiple of 2)\r\n */\r\n public static void convertYuv420pToRgba8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height) {\r\n expectInputSize(input, (3 * width * height) / 2);\r\n expectOutputSize(output, width * height * 4);\r\n nativeYuv420pToRgba8888(input, output, width, height);\r\n }\r\n\r\n /**\r\n * Convert ARGB8888 to RGBA8888.\r\n *\r\n * The input data is expected to be encoded in 8-bit interleaved ARGB channels. The output\r\n * buffer must be large enough to hold the data. The output buffer may be the same as the\r\n * input buffer.\r\n *\r\n * @param input data encoded in ARGB8888.\r\n * @param output buffer to hold RGBA8888 data.\r\n * @param width the width of the image\r\n * @param height the height of the image\r\n */\r\n public static void convertArgb8888ToRgba8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height) {\r\n expectInputSize(input, width * height * 4);\r\n expectOutputSize(output, width * height * 4);\r\n nativeArgb8888ToRgba8888(input, output, width, height);\r\n }\r\n\r\n /**\r\n * Convert RGBA8888 to HSVA8888.\r\n *\r\n * The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output\r\n * buffer must be large enough to hold the data. The output buffer may be the same as the\r\n * input buffer.\r\n *\r\n * @param input data encoded in RGBA8888.\r\n * @param output buffer to hold HSVA8888 data.\r\n * @param width the width of the image\r\n * @param height the height of the image\r\n */\r\n public static void convertRgba8888ToHsva8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height) {\r\n expectInputSize(input, width * height * 4);\r\n expectOutputSize(output, width * height * 4);\r\n nativeRgba8888ToHsva8888(input, output, width, height);\r\n }\r\n\r\n /**\r\n * Convert RGBA8888 to YCbCrA8888.\r\n *\r\n * The input data is expected to be encoded in 8-bit interleaved RGBA channels. The output\r\n * buffer must be large enough to hold the data. The output buffer may be the same as the\r\n * input buffer.\r\n *\r\n * @param input data encoded in RGBA8888.\r\n * @param output buffer to hold YCbCrA8888 data.\r\n * @param width the width of the image\r\n * @param height the height of the image\r\n */\r\n public static void convertRgba8888ToYcbcra8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height) {\r\n expectInputSize(input, width * height * 4);\r\n expectOutputSize(output, width * height * 4);\r\n nativeRgba8888ToYcbcra8888(input, output, width, height);\r\n }\r\n\r\n private static void expectInputSize(ByteBuffer input, int expectedSize) {\r\n if (input.remaining() < expectedSize) {\r\n throw new IllegalArgumentException(\"Input buffer's size does not fit given width \"\r\n + \"and height! Expected: \" + expectedSize + \", Got: \" + input.remaining()\r\n + \".\");\r\n }\r\n }\r\n\r\n private static void expectOutputSize(ByteBuffer output, int expectedSize) {\r\n if (output.remaining() < expectedSize) {\r\n throw new IllegalArgumentException(\"Output buffer's size does not fit given width \"\r\n + \"and height! Expected: \" + expectedSize + \", Got: \" + output.remaining()\r\n + \".\");\r\n }\r\n }\r\n\r\n private static native void nativeYuv420pToRgba8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height);\r\n\r\n private static native void nativeArgb8888ToRgba8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height);\r\n\r\n private static native void nativeRgba8888ToHsva8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height);\r\n\r\n private static native void nativeRgba8888ToYcbcra8888(\r\n ByteBuffer input, ByteBuffer output, int width, int height);\r\n\r\n static {\r\n System.loadLibrary(\"smartcamera_jni\");\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7752326130867004, "alphanum_fraction": 0.7752326130867004, "avg_line_length": 44.56666564941406, "blob_id": "78546c7d84b1e5f990e3907681f7a2da1e8ad33a", "content_id": "9ef395d524662850eed97fe64df3084967af3731", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1397, "license_type": "permissive", "max_line_length": 77, "num_lines": 30, "path": "/location/lib/README.txt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This library (com.android.location.provider.jar) is a shared java library\r\ncontaining classes required by unbundled location providers.\r\n\r\n--- Rules of this library ---\r\no This library is effectively a PUBLIC API for unbundled location providers\r\n that may be distributed outside the system image. So it MUST BE API STABLE.\r\n You can add but not remove. The rules are the same as for the\r\n public platform SDK API.\r\no This library can see and instantiate internal platform classes (such as\r\n ProviderRequest.java), but it must not expose them in any public method\r\n (or by extending them via inheritance). This would break clients of the\r\n library because they cannot see the internal platform classes.\r\n\r\nThis library is distributed in the system image, and loaded as\r\na shared library. So you can change the implementation, but not\r\nthe interface. In this way it is like framework.jar.\r\n\r\n--- Why does this library exists? ---\r\n\r\nUnbundled location providers (such as the NetworkLocationProvider)\r\ncan not use internal platform classes.\r\n\r\nSo ideally all of these classes would be part of the public platform SDK API,\r\nbut that doesn't seem like a great idea when only applications with a special\r\nsignature can implement this API.\r\n\r\nThe compromise is this library.\r\n\r\nIt wraps internal platform classes (like ProviderRequest) with a stable\r\nAPI that does not leak the internal classes.\r\n" }, { "alpha_fraction": 0.4816800653934479, "alphanum_fraction": 0.7238605618476868, "avg_line_length": 34.09677505493164, "blob_id": "c97fe3de08e830e0f46751b0e68aecd2af634203", "content_id": "9cd212fa78d5b662e43e5e2895ec4715b07fbf31", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1119, "license_type": "permissive", "max_line_length": 82, "num_lines": 31, "path": "/libs/hwui/tests/scripts/prep_ryu.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "adb root\r\nadb wait-for-device\r\nadb shell stop thermal-engine\r\nadb shell stop perfd\r\n\r\n# 51000 102000 204000 306000 408000 510000 612000 714000 816000 918000\r\n# 1020000 1122000 1224000 1326000 1428000 1530000 1632000 1734000 1836000 1912500\r\nS=1326000\r\necho \"set cpu to $S hz\";\r\nadb shell \"echo userspace > /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor\"\r\nadb shell \"echo $S > /sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq\"\r\nadb shell \"echo $S > /sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq\"\r\n\r\n#01: core 76 MHz emc 408 MHz\r\n#02: core 153 MHz emc 665 MHz\r\n#03: core 230 MHz emc 800 MHz *\r\n#04: core 307 MHz emc 1065 MHz\r\n#05: core 384 MHz emc 1331 MHz\r\n#06: core 460 MHz emc 1600 MHz\r\n#07: core 537 MHz emc 1600 MHz\r\n#08: core 614 MHz emc 1600 MHz\r\n#09: core 691 MHz emc 1600 MHz\r\n#0a: core 768 MHz emc 1600 MHz\r\n#0b: core 844 MHz emc 1600 MHz\r\n#0c: core 921 MHz emc 1600 MHz\r\n#0d: core 998 MHz emc 1600 MHz\r\n#AC: core 230 MHz emc 800 MHz a A d D\r\n\r\necho \"set gpu to core 307 MHz emc 1065 MHz\"\r\n# it will lock gpu until you touch a screen\r\nadb shell \"echo 04 > /sys/devices/57000000.gpu/pstate\"\r\n" }, { "alpha_fraction": 0.7295669913291931, "alphanum_fraction": 0.7330083250999451, "avg_line_length": 35.90217208862305, "blob_id": "34e1c9af1858cdeb93a3074e34aa4942fdea9f48", "content_id": "459e6fe82fb16fe3e61e0cc1ee5c627cac870da4", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3487, "license_type": "permissive", "max_line_length": 98, "num_lines": 92, "path": "/tests/net/common/java/android/net/NetworkStackTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage android.net;\r\n\r\nimport static android.Manifest.permission.NETWORK_STACK;\r\nimport static android.content.pm.PackageManager.PERMISSION_DENIED;\r\nimport static android.content.pm.PackageManager.PERMISSION_GRANTED;\r\nimport static android.net.NetworkStack.PERMISSION_MAINLINE_NETWORK_STACK;\r\nimport static android.net.NetworkStack.checkNetworkStackPermission;\r\nimport static android.net.NetworkStack.checkNetworkStackPermissionOr;\r\n\r\nimport static org.junit.Assert.assertEquals;\r\nimport static org.junit.Assert.fail;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.any;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.content.Context;\r\nimport android.os.Build;\r\nimport android.os.IBinder;\r\n\r\nimport androidx.test.runner.AndroidJUnit4;\r\n\r\nimport com.android.testutils.DevSdkIgnoreRule;\r\nimport com.android.testutils.DevSdkIgnoreRule.IgnoreUpTo;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Rule;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@RunWith(AndroidJUnit4.class)\r\npublic class NetworkStackTest {\r\n private static final String [] OTHER_PERMISSION = {\"otherpermission1\", \"otherpermission2\"};\r\n\r\n @Rule\r\n public DevSdkIgnoreRule mDevSdkIgnoreRule = new DevSdkIgnoreRule();\r\n\r\n @Mock Context mCtx;\r\n @Mock private IBinder mConnectorBinder;\r\n\r\n @Before public void setUp() throws Exception {\r\n MockitoAnnotations.initMocks(this);\r\n }\r\n\r\n @Test\r\n public void testCheckNetworkStackPermission() throws Exception {\r\n when(mCtx.checkCallingOrSelfPermission(eq(NETWORK_STACK))).thenReturn(PERMISSION_GRANTED);\r\n when(mCtx.checkCallingOrSelfPermission(eq(PERMISSION_MAINLINE_NETWORK_STACK)))\r\n .thenReturn(PERMISSION_DENIED);\r\n checkNetworkStackPermission(mCtx);\r\n checkNetworkStackPermissionOr(mCtx, OTHER_PERMISSION);\r\n\r\n when(mCtx.checkCallingOrSelfPermission(eq(NETWORK_STACK))).thenReturn(PERMISSION_DENIED);\r\n when(mCtx.checkCallingOrSelfPermission(eq(PERMISSION_MAINLINE_NETWORK_STACK)))\r\n .thenReturn(PERMISSION_GRANTED);\r\n checkNetworkStackPermission(mCtx);\r\n checkNetworkStackPermissionOr(mCtx, OTHER_PERMISSION);\r\n\r\n when(mCtx.checkCallingOrSelfPermission(any())).thenReturn(PERMISSION_DENIED);\r\n\r\n try {\r\n checkNetworkStackPermissionOr(mCtx, OTHER_PERMISSION);\r\n } catch (SecurityException e) {\r\n // Expect to get a SecurityException\r\n return;\r\n }\r\n\r\n fail(\"Expect fail but permission granted.\");\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n public void testGetService() {\r\n NetworkStack.setServiceForTest(mConnectorBinder);\r\n assertEquals(NetworkStack.getService(), mConnectorBinder);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7522796392440796, "alphanum_fraction": 0.7522796392440796, "avg_line_length": 39.25, "blob_id": "4f085bd4dd09fa87d4bcfdd992ffe4ecaa9265f3", "content_id": "87ff4d949bd15acda56422d6e76b55f0539dc4d7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 658, "license_type": "permissive", "max_line_length": 97, "num_lines": 16, "path": "/core/java/android/hardware/usb/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<HTML>\r\n<BODY>\r\n<p>Provides support to communicate with USB hardware peripherals that are connected to \r\nAndroid-powered devices.</p>\r\n\r\n<p>For more information, see the\r\n<a href=\"{@docRoot}guide/topics/connectivity/usb/index.html\">USB</a> guide.</p>\r\n{@more}\r\n\r\n<p>Use {@link android.hardware.usb.UsbManager} to access the state of the USB and to\r\ncommunicate with connected hardware peripherals. Use {@link android.hardware.usb.UsbDevice} to\r\ncommunicate with the hardware peripheral if the Android-powered device is acting as the USB host.\r\nUse {@link android.hardware.usb.UsbAccessory} if the peripheral is acting as the USB host.</p>\r\n\r\n</BODY>\r\n</HTML>" }, { "alpha_fraction": 0.6272246241569519, "alphanum_fraction": 0.6608946323394775, "avg_line_length": 23.048192977905273, "blob_id": "7a7736a7015efa7d1ae962670324e6176a50ae10", "content_id": "2e416fa0232b19488ae5a14653f10c0ca04614d6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2079, "license_type": "permissive", "max_line_length": 74, "num_lines": 83, "path": "/startop/scripts/iorap/lib/inode2filename_test.py", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\r\n#\r\n# Copyright 2019, The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n#\r\n\r\n\"\"\"\r\nUnit tests for inode2filename module.\r\n\r\nInstall:\r\n $> sudo apt-get install python3-pytest ## OR\r\n $> pip install -U pytest\r\nSee also https://docs.pytest.org/en/latest/getting-started.html\r\n\r\nUsage:\r\n $> ./inode2filename_test.py\r\n $> pytest inode2filename_test.py\r\n $> python -m pytest inode2filename_test.py\r\n\r\nSee also https://docs.pytest.org/en/latest/usage.html\r\n\"\"\"\r\n\r\n# global imports\r\nfrom contextlib import contextmanager\r\nimport io\r\nimport shlex\r\nimport sys\r\nimport typing\r\n\r\n# pip imports\r\nimport pytest\r\n\r\n# local imports\r\nfrom inode2filename import *\r\n\r\ndef create_inode2filename(*contents):\r\n buf = io.StringIO()\r\n\r\n for c in contents:\r\n buf.write(c)\r\n buf.write(\"\\n\")\r\n\r\n buf.seek(0)\r\n\r\n i2f = Inode2Filename(buf)\r\n\r\n buf.close()\r\n\r\n return i2f\r\n\r\ndef test_inode2filename():\r\n a = create_inode2filename(\"\")\r\n assert len(a) == 0\r\n assert a.resolve(1, 2) == None\r\n\r\n a = create_inode2filename(\"1 2 3 foo.bar\")\r\n assert len(a) == 1\r\n assert a.resolve(1, 2) == \"foo.bar\"\r\n assert a.resolve(4, 5) == None\r\n\r\n a = create_inode2filename(\"1 2 3 foo.bar\", \"4 5 6 bar.baz\")\r\n assert len(a) == 2\r\n assert a.resolve(1, 2) == \"foo.bar\"\r\n assert a.resolve(4, 5) == \"bar.baz\"\r\n\r\n a = create_inode2filename(\"1567d 8910 -1 /a/b/c/\", \"4 5 6 bar.baz\")\r\n assert len(a) == 2\r\n assert a.resolve(1567, 8910) == \"/a/b/c/\"\r\n assert a.resolve(4, 5) == \"bar.baz\"\r\n\r\nif __name__ == '__main__':\r\n pytest.main()\r\n" }, { "alpha_fraction": 0.6546478867530823, "alphanum_fraction": 0.6608450412750244, "avg_line_length": 36.58695602416992, "blob_id": "3121b45ad78385b6c9c82ef8aade4fc57ef4187a", "content_id": "c90eb623839130ab2ffd4a4b27a889b2db0b1b55", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1775, "license_type": "permissive", "max_line_length": 93, "num_lines": 46, "path": "/core/tests/hosttests/test-apps/MultiDexLegacyTestServices/src/com/android/framework/multidexlegacytestservices/ReflectIntermediateClass.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.framework.multidexlegacytestservices;\r\n\r\nimport java.lang.reflect.InvocationTargetException;\r\nimport java.lang.reflect.Method;\r\n\r\n/**\r\n * Offer an indirection to some Big0xx classes and have their initialization\r\n * spread along a period of time.\r\n */\r\npublic class ReflectIntermediateClass {\r\n\r\n public static int get(int from, int to, int sleepMillis) throws ClassNotFoundException,\r\n SecurityException, NoSuchMethodException, IllegalArgumentException,\r\n IllegalAccessException, InvocationTargetException, InstantiationException {\r\n int value = 0;\r\n for (int i = from; i <= to; i++) {\r\n Class<?> bigClass = Class.forName(\r\n \"com.android.framework.multidexlegacytestservices.manymethods.Big0\" + i);\r\n Method get = bigClass.getMethod(\"get\" + i);\r\n value += ((Integer) get.invoke(bigClass.newInstance())).intValue();\r\n try {\r\n Thread.sleep(sleepMillis);\r\n } catch (InterruptedException e) {\r\n e.printStackTrace();\r\n }\r\n }\r\n return value;\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7149999737739563, "alphanum_fraction": 0.7350000143051147, "avg_line_length": 26.571428298950195, "blob_id": "ec92ffff45e07919062137692ad6f05ab7679873", "content_id": "a92c6d485ff091c61b7a9a1363f275af0ea80257", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 200, "license_type": "permissive", "max_line_length": 45, "num_lines": 7, "path": "/tools/codegen/src/com/android/codegen/SharedConstants.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.codegen\r\n\r\nconst val CODEGEN_NAME = \"codegen\"\r\nconst val CODEGEN_VERSION = \"1.0.15\"\r\n\r\nconst val CANONICAL_BUILDER_CLASS = \"Builder\"\r\nconst val BASE_BUILDER_CLASS = \"BaseBuilder\"\r\n" }, { "alpha_fraction": 0.6084368824958801, "alphanum_fraction": 0.6193064451217651, "avg_line_length": 30.183332443237305, "blob_id": "5d595853a67c804b16a2660764109a5df9f6ff41", "content_id": "d779808e0823d8455b7d5a952021ad251a73b973", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3864, "license_type": "permissive", "max_line_length": 110, "num_lines": 120, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/TextureSource.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw;\r\n\r\nimport android.graphics.Bitmap;\r\nimport android.opengl.GLES11Ext;\r\nimport android.opengl.GLES20;\r\n\r\nimport java.nio.ByteBuffer;\r\n\r\npublic class TextureSource {\r\n\r\n private int mTexId;\r\n private int mTarget;\r\n private boolean mIsOwner;\r\n private boolean mIsAllocated = false;\r\n\r\n public static TextureSource fromTexture(int texId, int target) {\r\n return new TextureSource(texId, target, false);\r\n }\r\n\r\n public static TextureSource fromTexture(int texId) {\r\n return new TextureSource(texId, GLES20.GL_TEXTURE_2D, false);\r\n }\r\n\r\n public static TextureSource newTexture() {\r\n return new TextureSource(GLToolbox.generateTexture(), GLES20.GL_TEXTURE_2D, true);\r\n }\r\n\r\n public static TextureSource newExternalTexture() {\r\n return new TextureSource(GLToolbox.generateTexture(),\r\n GLES11Ext.GL_TEXTURE_EXTERNAL_OES,\r\n true);\r\n }\r\n\r\n public int getTextureId() {\r\n return mTexId;\r\n }\r\n\r\n public int getTarget() {\r\n return mTarget;\r\n }\r\n\r\n public void bind() {\r\n GLES20.glBindTexture(mTarget, mTexId);\r\n GLToolbox.checkGlError(\"glBindTexture\");\r\n }\r\n\r\n public void allocate(int width, int height) {\r\n //Log.i(\"TextureSource\", \"Allocating empty texture \" + mTexId + \": \" + width + \"x\" + height + \".\");\r\n GLToolbox.allocateTexturePixels(mTexId, mTarget, width, height);\r\n mIsAllocated = true;\r\n }\r\n\r\n public void allocateWithPixels(ByteBuffer pixels, int width, int height) {\r\n //Log.i(\"TextureSource\", \"Uploading pixels to texture \" + mTexId + \": \" + width + \"x\" + height + \".\");\r\n GLToolbox.setTexturePixels(mTexId, mTarget, pixels, width, height);\r\n mIsAllocated = true;\r\n }\r\n\r\n public void allocateWithBitmapPixels(Bitmap bitmap) {\r\n //Log.i(\"TextureSource\", \"Uploading pixels to texture \" + mTexId + \"!\");\r\n GLToolbox.setTexturePixels(mTexId, mTarget, bitmap);\r\n mIsAllocated = true;\r\n }\r\n\r\n public void generateMipmaps() {\r\n GLES20.glBindTexture(mTarget, mTexId);\r\n GLES20.glTexParameteri(mTarget,\r\n GLES20.GL_TEXTURE_MIN_FILTER,\r\n GLES20.GL_LINEAR_MIPMAP_LINEAR);\r\n GLES20.glGenerateMipmap(mTarget);\r\n GLES20.glBindTexture(mTarget, 0);\r\n }\r\n\r\n public void setParameter(int parameter, int value) {\r\n GLES20.glBindTexture(mTarget, mTexId);\r\n GLES20.glTexParameteri(mTarget, parameter, value);\r\n GLES20.glBindTexture(mTarget, 0);\r\n }\r\n\r\n /**\r\n * @hide\r\n */\r\n public void release() {\r\n if (GLToolbox.isTexture(mTexId) && mIsOwner) {\r\n GLToolbox.deleteTexture(mTexId);\r\n }\r\n mTexId = GLToolbox.textureNone();\r\n }\r\n\r\n @Override\r\n public String toString() {\r\n return \"TextureSource(id=\" + mTexId + \", target=\" + mTarget + \")\";\r\n }\r\n\r\n boolean isAllocated() {\r\n return mIsAllocated;\r\n }\r\n\r\n private TextureSource(int texId, int target, boolean isOwner) {\r\n mTexId = texId;\r\n mTarget = target;\r\n mIsOwner = isOwner;\r\n }\r\n}\r\n\r\n" }, { "alpha_fraction": 0.7522563934326172, "alphanum_fraction": 0.7534336447715759, "avg_line_length": 33.23041534423828, "blob_id": "66d0ffd97292965d2647e520d86fed819a0718f2", "content_id": "18e503120fe7aa2ee849738a819fb501a450f4d0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7645, "license_type": "permissive", "max_line_length": 133, "num_lines": 217, "path": "/packages/SystemUI/docs/dagger.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Dagger 2 in SystemUI\r\n*Dagger 2 is a dependency injection framework that compiles annotations to code\r\nto create dependencies without reflection*\r\n\r\n## Recommended reading\r\n\r\nGo read about Dagger 2.\r\n\r\n - [User's guide](https://google.github.io/dagger/users-guide)\r\n\r\nTODO: Add some links.\r\n\r\n## State of the world\r\n\r\nDagger 2 has been turned on for SystemUI and a early first pass has been taken\r\nfor converting everything in [Dependency.java](packages/systemui/src/com/android/systemui/Dependency.java)\r\nto use Dagger. Since a lot of SystemUI depends on Dependency, stubs have been added to Dependency \r\nto proxy any gets through to the instances provided by dagger, this will allow migration of SystemUI \r\nthrough a number of CLs.\r\n\r\n### How it works in SystemUI\r\n\r\nFor the classes that we're using in Dependency and are switching to dagger, the\r\nequivalent dagger version is using `@Singleton` and therefore only has one instance.\r\nTo have the single instance span all of SystemUI and be easily accessible for\r\nother components, there is a single root `@Component` that exists that generates\r\nthese. The component lives in [SystemUIFactory](packages/systemui/src/com/android/systemui/SystemUIFactory.java)\r\nand is called `SystemUIRootComponent`.\r\n\r\n```java\r\n\r\n@Singleton\r\n@Component(modules = {SystemUIFactory.class, DependencyProvider.class, DependencyBinder.class,\r\n ContextHolder.class})\r\npublic interface SystemUIRootComponent {\r\n @Singleton\r\n Dependency.DependencyInjector createDependency();\r\n}\r\n```\r\n\r\nThe root component is composed of root modules, which in turn provide the global singleton \r\ndependencies across all of SystemUI.\r\n\r\n- `ContextHolder` is just a wrapper that provides a context.\r\n\r\n- `SystemUIFactory` `@Provides` dependencies that need to be overridden by SystemUI\r\nvariants (like other form factors e.g. Car). \r\n\r\n- `DependencyBinder` creates the mapping from interfaces to implementation classes. \r\n\r\n- `DependencyProvider` provides or binds any remaining depedencies required.\r\n\r\n### Adding injection to a new SystemUI object\r\n\r\nAnything that depends on any `@Singleton` provider from SystemUIRootComponent\r\nshould be declared as a `@Subcomponent` of the root component. This requires\r\ndeclaring your own interface for generating your own modules or just the\r\nobject you need injected. The subcomponent also needs to be added to\r\nSystemUIRootComponent in SystemUIFactory so it can be acquired.\r\n\r\n```java\r\npublic interface SystemUIRootComponent {\r\n+ @Singleton\r\n+ Dependency.DependencyInjector createDependency();\r\n}\r\n\r\npublic class Dependency extends SystemUI {\r\n //...\r\n+ @Subcomponent\r\n+ public interface DependencyInjector {\r\n+ Dependency createSystemUI();\r\n+ }\r\n}\r\n```\r\n\r\nFor objects which extend SystemUI and require injection, you can define an\r\ninjector that creates the injected object for you. This other class should\r\nbe referenced in [@string/config_systemUIServiceComponents](packages/SystemUI/res/values/config.xml).\r\n\r\n```java\r\npublic static class DependencyCreator implements Injector {\r\n @Override\r\n public SystemUI apply(Context context) {\r\n return SystemUIFactory.getInstance().getRootComponent()\r\n .createDependency()\r\n .createSystemUI();\r\n }\r\n}\r\n```\r\n\r\n### Adding a new injectable object\r\n\r\nFirst tag the constructor with `@Inject`. Also tag it with `@Singleton` if only one\r\ninstance should be created.\r\n\r\n```java\r\n@Singleton\r\npublic class SomethingController {\r\n @Inject\r\n public SomethingController(Context context,\r\n @Named(MAIN_HANDLER_NAME) Handler mainHandler) {\r\n // context and mainHandler will be automatically populated.\r\n }\r\n}\r\n```\r\n\r\nIf you have an interface class and an implementation class, dagger needs to know\r\nhow to map it. The simplest way to do this is to add an `@Provides` method to\r\nDependencyProvider. The type of the return value tells dagger which dependency it's providing.\r\n\r\n```java\r\npublic class DependencyProvider {\r\n //...\r\n @Singleton\r\n @Provides\r\n public SomethingController provideSomethingController(Context context,\r\n @Named(MAIN_HANDLER_NAME) Handler mainHandler) {\r\n return new SomethingControllerImpl(context, mainHandler);\r\n }\r\n}\r\n```\r\n\r\nIf you need to access this from Dependency#get, then add an adapter to Dependency\r\nthat maps to the instance provided by Dagger. The changes should be similar\r\nto the following diff.\r\n\r\n```java\r\npublic class Dependency {\r\n //...\r\n @Inject Lazy<SomethingController> mSomethingController;\r\n //...\r\n public void start() {\r\n //...\r\n mProviders.put(SomethingController.class, mSomethingController::get);\r\n }\r\n}\r\n```\r\n\r\n### Using injection with Fragments\r\n\r\nFragments are created as part of the FragmentManager, so they need to be\r\nsetup so the manager knows how to create them. To do that, add a method\r\nto com.android.systemui.fragments.FragmentService$FragmentCreator that\r\nreturns your fragment class. Thats all thats required, once the method\r\nexists, FragmentService will automatically pick it up and use injection\r\nwhenever your fragment needs to be created.\r\n\r\n```java\r\npublic interface FragmentCreator {\r\n+ NavigationBarFragment createNavigationBar();\r\n}\r\n```\r\n\r\nIf you need to create your fragment (i.e. for the add or replace transaction),\r\nthen the FragmentHostManager can do this for you.\r\n\r\n```java\r\nFragmentHostManager.get(view).create(NavigationBarFragment.class);\r\n```\r\n\r\n### Using injection with Views\r\n\r\nGenerally, you shouldn't need to inject for a view, as the view should\r\nbe relatively self contained and logic that requires injection should be\r\nmoved to a higher level construct such as a Fragment or a top-level SystemUI\r\ncomponent, see above for how to do injection for both of which.\r\n\r\nStill here? Yeah, ok, sysui has a lot of pre-existing views that contain a\r\nlot of code that could benefit from injection and will need to be migrated\r\noff from Dependency#get uses. Similar to how fragments are injected, the view\r\nneeds to be added to the interface\r\ncom.android.systemui.util.InjectionInflationController$ViewInstanceCreator.\r\n\r\n```java\r\npublic interface ViewInstanceCreator {\r\n+ QuickStatusBarHeader createQsHeader();\r\n}\r\n```\r\n\r\nPresumably you need to inflate that view from XML (otherwise why do you\r\nneed anything special? see earlier sections about generic injection). To obtain\r\nan inflater that supports injected objects, call InjectionInflationController#injectable,\r\nwhich will wrap the inflater it is passed in one that can create injected\r\nobjects when needed.\r\n\r\n```java\r\n@Override\r\npublic View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,\r\n Bundle savedInstanceState) {\r\n return mInjectionInflater.injectable(inflater).inflate(R.layout.my_layout, container, false);\r\n}\r\n```\r\n\r\nThere is one other important thing to note about injecting with views. SysUI\r\nalready has a Context in its global dagger component, so if you simply inject\r\na Context, you will not get the one that the view should have with proper\r\ntheming. Because of this, always ensure to tag views that have @Inject with\r\nthe @Named view context.\r\n\r\n```java\r\npublic CustomView(@Named(VIEW_CONTEXT) Context themedViewContext, AttributeSet attrs,\r\n OtherCustomDependency something) {\r\n //...\r\n}\r\n```\r\n\r\n## Updating Dagger2\r\n\r\nBinaries can be downloaded from https://repo1.maven.org/maven2/com/google/dagger/ and then loaded\r\ninto\r\n[/prebuilts/tools/common/m2/repository/com/google/dagger/](http://cs/android/prebuilts/tools/common/m2/repository/com/google/dagger/)\r\n\r\n\r\n## TODO List\r\n\r\n - Eliminate usages of Dependency#get\r\n - Add links in above TODO\r\n" }, { "alpha_fraction": 0.7987856268882751, "alphanum_fraction": 0.7987856268882751, "avg_line_length": 43.69032287597656, "blob_id": "6d1e063b3e11e39e6e5e755e46902f183cb03d2f", "content_id": "2d279421fe03f6f59590737f2830440ff2238004", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7086, "license_type": "permissive", "max_line_length": 150, "num_lines": 155, "path": "/packages/SystemUI/README.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# SystemUI\r\n\r\n“Everything you see in Android that's not an app”\r\n\r\nSystemUI is a persistent process that provides UI for the system but outside\r\nof the system_server process.\r\n\r\nThe starting point for most of sysui code is a list of services that extend\r\nSystemUI that are started up by SystemUIApplication. These services then depend\r\non some custom dependency injection provided by Dependency.\r\n\r\nInputs directed at sysui (as opposed to general listeners) generally come in\r\nthrough IStatusBar. Outputs from sysui are through a variety of private APIs to\r\nthe android platform all over.\r\n\r\n## SystemUIApplication\r\n\r\nWhen SystemUIApplication starts up, it will start up the services listed in\r\nconfig_systemUIServiceComponents or config_systemUIServiceComponentsPerUser.\r\n\r\nEach of these services extend SystemUI. SystemUI provides them with a Context\r\nand gives them callbacks for onConfigurationChanged (this historically was\r\nthe main path for onConfigurationChanged, now also happens through\r\nConfigurationController). They also receive a callback for onBootCompleted\r\nsince these objects may be started before the device has finished booting.\r\n\r\nEach SystemUI service is expected to be a major part of system ui and the\r\ngoal is to minimize communication between them. So in general they should be\r\nrelatively silo'd.\r\n\r\n## Dependencies\r\n\r\nThe first SystemUI service that is started should always be Dependency.\r\nDependency provides a static method for getting a hold of dependencies that\r\nhave a lifecycle that spans sysui. Dependency has code for how to create all\r\ndependencies manually added. SystemUIFactory is also capable of\r\nadding/replacing these dependencies.\r\n\r\nDependencies are lazily initialized, so if a Dependency is never referenced at\r\nruntime, it will never be created.\r\n\r\nIf an instantiated dependency implements Dumpable it will be included in dumps\r\nof sysui (and bug reports), allowing it to include current state information.\r\nThis is how \\*Controllers dump state to bug reports.\r\n\r\nIf an instantiated dependency implements ConfigurationChangeReceiver it will\r\nreceive onConfigurationChange callbacks when the configuration changes.\r\n\r\n## IStatusBar\r\n\r\nCommandQueue is the object that receives all of the incoming events from the\r\nsystem_server. It extends IStatusBar and dispatches those callbacks back any\r\nnumber of listeners. The system_server gets a hold of the IStatusBar when\r\nStatusBar calls IStatusBarService#registerStatusBar, so if StatusBar is not\r\nincluded in the XML service list, it will not be registered with the OS.\r\n\r\nCommandQueue posts all incoming callbacks to a handler and then dispatches\r\nthose messages to each callback that is currently registered. CommandQueue\r\nalso tracks the current value of disable flags and will call #disable\r\nimmediately for any callbacks added.\r\n\r\nThere are a few places where CommandQueue is used as a bus to communicate\r\nacross sysui. Such as when StatusBar calls CommandQueue#recomputeDisableFlags.\r\nThis is generally used a shortcut to directly trigger CommandQueue rather than\r\ncalling StatusManager and waiting for the call to come back to IStatusBar.\r\n\r\n## Default SystemUI services list\r\n\r\n### [com.android.systemui.Dependency](/packages/SystemUI/src/com/android/systemui/Dependency.java)\r\n\r\nProvides custom dependency injection.\r\n\r\n### [com.android.systemui.util.NotificationChannels](/packages/SystemUI/src/com/android/systemui/util/NotificationChannels.java)\r\n\r\nCreates/initializes the channels sysui uses when posting notifications.\r\n\r\n### [com.android.systemui.keyguard.KeyguardViewMediator](/packages/SystemUI/src/com/android/systemui/keyguard/KeyguardViewMediator.java)\r\n\r\nManages keyguard view state.\r\n\r\n### [com.android.systemui.recents.Recents](/packages/SystemUI/src/com/android/systemui/recents/Recents.java)\r\n\r\nRecents tracks all the data needed for recents and starts/stops the recents\r\nactivity. It provides this cached data to RecentsActivity when it is started.\r\n\r\n### [com.android.systemui.volume.VolumeUI](/packages/SystemUI/src/com/android/systemui/volume/VolumeUI.java)\r\n\r\nRegisters all the callbacks/listeners required to show the Volume dialog when\r\nit should be shown.\r\n\r\n### [com.android.systemui.stackdivider.Divider](/packages/SystemUI/src/com/android/systemui/stackdivider/Divider.java)\r\n\r\nShows the drag handle for the divider between two apps when in split screen\r\nmode.\r\n\r\n### [com.android.systemui.status.phone.StatusBar](/packages/SystemUI/src/com/android/systemui/status/phone/StatusBar.java)\r\n\r\nThis shows the UI for the status bar and the notification shade it contains.\r\nIt also contains a significant amount of other UI that interacts with these\r\nsurfaces (keyguard, AOD, etc.). StatusBar also contains a notification listener\r\nto receive notification callbacks.\r\n\r\n### [com.android.systemui.usb.StorageNotification](/packages/SystemUI/src/com/android/systemui/usb/StorageNotification.java)\r\n\r\nTracks USB status and sends notifications for it.\r\n\r\n### [com.android.systemui.power.PowerUI](/packages/SystemUI/src/com/android/systemui/power/PowerUI.java)\r\n\r\nTracks power status and sends notifications for low battery/power saver.\r\n\r\n### [com.android.systemui.media.RingtonePlayer](/packages/SystemUI/src/com/android/systemui/media/RingtonePlayer.java)\r\n\r\nPlays ringtones.\r\n\r\n### [com.android.systemui.keyboard.KeyboardUI](/packages/SystemUI/src/com/android/systemui/keyboard/KeyboardUI.java)\r\n\r\nShows UI for keyboard shortcuts (triggered by keyboard shortcut).\r\n\r\n### [com.android.systemui.pip.PipUI](/packages/SystemUI/src/com/android/systemui/pip/PipUI.java)\r\n\r\nShows the overlay controls when Pip is showing.\r\n\r\n### [com.android.systemui.shortcut.ShortcutKeyDispatcher](/packages/SystemUI/src/com/android/systemui/shortcut/ShortcutKeyDispatcher.java)\r\n\r\nDispatches shortcut to System UI components.\r\n\r\n### @string/config_systemUIVendorServiceComponent\r\n\r\nComponent allowing the vendor/OEM to inject a custom component.\r\n\r\n### [com.android.systemui.util.leak.GarbageMonitor$Service](/packages/SystemUI/src/com/android/systemui/util/leak/GarbageMonitor.java)\r\n\r\nTracks large objects in sysui to see if there are leaks.\r\n\r\n### [com.android.systemui.LatencyTester](/packages/SystemUI/src/com/android/systemui/LatencyTester.java)\r\n\r\nClass that only runs on debuggable builds that listens to broadcasts that\r\nsimulate actions in the system that are used for testing the latency.\r\n\r\n### [com.android.systemui.globalactions.GlobalActionsComponent](/packages/SystemUI/src/com/android/systemui/globalactions/GlobalActionsComponent.java)\r\n\r\nShows the global actions dialog (long-press power).\r\n\r\n### [com.android.systemui.ScreenDecorations](/packages/SystemUI/src/com/android/systemui/ScreenDecorations.java)\r\n\r\nDraws decorations about the screen in software (e.g. rounded corners, cutouts).\r\n\r\n### [com.android.systemui.biometrics.BiometricDialogImpl](/packages/SystemUI/src/com/android/systemui/biometrics/BiometricDialogImpl.java)\r\n\r\nBiometric UI.\r\n\r\n---\r\n\r\n * [Plugins](/packages/SystemUI/docs/plugins.md)\r\n * [Demo Mode](/packages/SystemUI/docs/demo_mode.md)\r\n" }, { "alpha_fraction": 0.6548445820808411, "alphanum_fraction": 0.6643509864807129, "avg_line_length": 34.439998626708984, "blob_id": "82054668a74b90574c3b3a06e4bd1b973bdd5410", "content_id": "ed09d5fe26747438b918b2f06a9060bcfa2e6885", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2735, "license_type": "permissive", "max_line_length": 100, "num_lines": 75, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/frametovalues.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n// Native function to extract histogram from image (handed down as ByteBuffer).\r\n\r\n#include \"frametovalues.h\"\r\n\r\n#include <string.h>\r\n#include <jni.h>\r\n#include <unistd.h>\r\n#include <android/log.h>\r\n\r\n#include \"imgprocutil.h\"\r\n\r\njboolean Java_androidx_media_filterpacks_image_ToGrayValuesFilter_toGrayValues(\r\n JNIEnv* env, jclass clazz, jobject imageBuffer, jobject grayBuffer )\r\n{\r\n unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));\r\n unsigned char* grayPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(grayBuffer));\r\n\r\n if (pixelPtr == 0 || grayPtr == 0) {\r\n return JNI_FALSE;\r\n }\r\n\r\n int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4;\r\n\r\n // TODO: the current implementation is focused on the correctness not performance.\r\n // If performance becomes an issue, it is better to increment pixelPtr directly.\r\n int disp = 0;\r\n for(int idx = 0; idx < numPixels; idx++, disp+=4) {\r\n int R = *(pixelPtr + disp);\r\n int G = *(pixelPtr + disp + 1);\r\n int B = *(pixelPtr + disp + 2);\r\n int gray = getIntensityFast(R, G, B);\r\n *(grayPtr+idx) = static_cast<unsigned char>(gray);\r\n }\r\n\r\n return JNI_TRUE;\r\n}\r\n\r\njboolean Java_androidx_media_filterpacks_image_ToRgbValuesFilter_toRgbValues(\r\n JNIEnv* env, jclass clazz, jobject imageBuffer, jobject rgbBuffer )\r\n{\r\n unsigned char* pixelPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));\r\n unsigned char* rgbPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(rgbBuffer));\r\n\r\n if (pixelPtr == 0 || rgbPtr == 0) {\r\n return JNI_FALSE;\r\n }\r\n\r\n int numPixels = env->GetDirectBufferCapacity(imageBuffer) / 4;\r\n\r\n // TODO: this code could be revised to improve the performance as the TODO above.\r\n int pixelDisp = 0;\r\n int rgbDisp = 0;\r\n for(int idx = 0; idx < numPixels; idx++, pixelDisp += 4, rgbDisp += 3) {\r\n for (int c = 0; c < 3; ++c) {\r\n *(rgbPtr + rgbDisp + c) = *(pixelPtr + pixelDisp + c);\r\n }\r\n }\r\n return JNI_TRUE;\r\n}\r\n\r\n" }, { "alpha_fraction": 0.6084731221199036, "alphanum_fraction": 0.6204151511192322, "avg_line_length": 37.53932571411133, "blob_id": "4b0c7b71d6a39ee30e4c1a776eda19b7858d9072", "content_id": "7fa6058aa37d62292e5b29e5b133b0bda6e70be3", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3517, "license_type": "permissive", "max_line_length": 90, "num_lines": 89, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/QuickRejectActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class QuickRejectActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final QuickRejectView view = new QuickRejectView(this);\r\n setContentView(view);\r\n }\r\n\r\n static class QuickRejectView extends View {\r\n private Paint mBitmapPaint;\r\n private final Bitmap mBitmap1;\r\n\r\n QuickRejectView(Context c) {\r\n super(c);\r\n\r\n mBitmap1 = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset1);\r\n\r\n mBitmapPaint = new Paint();\r\n mBitmapPaint.setFilterBitmap(true);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n int count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"count=\" + count);\r\n count = canvas.save();\r\n Log.d(\"OpenGLRenderer\", \"count after save=\" + count);\r\n count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"getSaveCount after save=\" + count);\r\n canvas.restore();\r\n count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"count after restore=\" + count);\r\n canvas.save();\r\n Log.d(\"OpenGLRenderer\", \"count after save=\" + canvas.getSaveCount());\r\n canvas.save();\r\n Log.d(\"OpenGLRenderer\", \"count after save=\" + canvas.getSaveCount());\r\n canvas.save();\r\n Log.d(\"OpenGLRenderer\", \"count after save=\" + canvas.getSaveCount());\r\n canvas.restoreToCount(count);\r\n count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"count after restoreToCount=\" + count);\r\n count = canvas.saveLayer(0, 0, 10, 10, mBitmapPaint, Canvas.ALL_SAVE_FLAG);\r\n Log.d(\"OpenGLRenderer\", \"count after saveLayer=\" + count);\r\n count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"getSaveCount after saveLayer=\" + count);\r\n canvas.restore();\r\n count = canvas.getSaveCount();\r\n Log.d(\"OpenGLRenderer\", \"count after restore=\" + count);\r\n\r\n canvas.save();\r\n canvas.clipRect(0.0f, 0.0f, 40.0f, 40.0f);\r\n canvas.drawBitmap(mBitmap1, 0.0f, 0.0f, mBitmapPaint);\r\n canvas.drawBitmap(mBitmap1, -mBitmap1.getWidth(), 0.0f, mBitmapPaint);\r\n canvas.drawBitmap(mBitmap1, 50.0f, 0.0f, mBitmapPaint);\r\n canvas.restore();\r\n }\r\n }\r\n}" }, { "alpha_fraction": 0.568209171295166, "alphanum_fraction": 0.5738860964775085, "avg_line_length": 33.66257858276367, "blob_id": "b5defbad6dda1160895e4b889633c6469b81416c", "content_id": "85ce9b7424cb661d1d2aab9cb4d123b8d96c0fdb", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5813, "license_type": "permissive", "max_line_length": 105, "num_lines": 163, "path": "/tools/split-select/RuleGenerator.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"RuleGenerator.h\"\r\n#include \"aapt/SdkConstants.h\"\r\n\r\n#include <algorithm>\r\n#include <cmath>\r\n#include <vector>\r\n#include <androidfw/ResourceTypes.h>\r\n\r\nusing namespace android;\r\n\r\nnamespace split {\r\n\r\n// Calculate the point at which the density selection changes between l and h.\r\nstatic inline int findMid(int l, int h) {\r\n double root = sqrt((h*h) + (8*l*h));\r\n return (double(-h) + root) / 2.0;\r\n}\r\n\r\nsp<Rule> RuleGenerator::generateDensity(const Vector<int>& allDensities, size_t index) {\r\n if (allDensities[index] != ResTable_config::DENSITY_ANY) {\r\n sp<Rule> densityRule = new Rule();\r\n densityRule->op = Rule::AND_SUBRULES;\r\n\r\n const bool hasAnyDensity = std::find(allDensities.begin(),\r\n allDensities.end(), (int) ResTable_config::DENSITY_ANY) != allDensities.end();\r\n\r\n if (hasAnyDensity) {\r\n sp<Rule> version = new Rule();\r\n version->op = Rule::LESS_THAN;\r\n version->key = Rule::SDK_VERSION;\r\n version->longArgs.add((long) SDK_LOLLIPOP);\r\n densityRule->subrules.add(version);\r\n }\r\n\r\n if (index > 0) {\r\n sp<Rule> gt = new Rule();\r\n gt->op = Rule::GREATER_THAN;\r\n gt->key = Rule::SCREEN_DENSITY;\r\n gt->longArgs.add(findMid(allDensities[index - 1], allDensities[index]) - 1);\r\n densityRule->subrules.add(gt);\r\n }\r\n\r\n if (index + 1 < allDensities.size() && allDensities[index + 1] != ResTable_config::DENSITY_ANY) {\r\n sp<Rule> lt = new Rule();\r\n lt->op = Rule::LESS_THAN;\r\n lt->key = Rule::SCREEN_DENSITY;\r\n lt->longArgs.add(findMid(allDensities[index], allDensities[index + 1]));\r\n densityRule->subrules.add(lt);\r\n }\r\n return densityRule;\r\n } else {\r\n // SDK_VERSION is handled elsewhere, so we always pick DENSITY_ANY if it's\r\n // available.\r\n sp<Rule> always = new Rule();\r\n always->op = Rule::ALWAYS_TRUE;\r\n return always;\r\n }\r\n}\r\n\r\nsp<Rule> RuleGenerator::generateAbi(const Vector<abi::Variant>& splitAbis, size_t index) {\r\n const abi::Variant thisAbi = splitAbis[index];\r\n const Vector<abi::Variant>& familyVariants = abi::getVariants(abi::getFamily(thisAbi));\r\n\r\n Vector<abi::Variant>::const_iterator start =\r\n std::find(familyVariants.begin(), familyVariants.end(), thisAbi);\r\n\r\n Vector<abi::Variant>::const_iterator end = familyVariants.end();\r\n if (index + 1 < splitAbis.size()) {\r\n end = std::find(start, familyVariants.end(), splitAbis[index + 1]);\r\n }\r\n\r\n sp<Rule> abiRule = new Rule();\r\n abiRule->op = Rule::CONTAINS_ANY;\r\n abiRule->key = Rule::NATIVE_PLATFORM;\r\n while (start != end) {\r\n abiRule->stringArgs.add(String8(abi::toString(*start)));\r\n ++start;\r\n }\r\n return abiRule;\r\n}\r\n\r\nsp<Rule> RuleGenerator::generate(const SortedVector<SplitDescription>& group, size_t index) {\r\n sp<Rule> rootRule = new Rule();\r\n rootRule->op = Rule::AND_SUBRULES;\r\n\r\n if (group[index].config.locale != 0) {\r\n sp<Rule> locale = new Rule();\r\n locale->op = Rule::EQUALS;\r\n locale->key = Rule::LANGUAGE;\r\n char str[RESTABLE_MAX_LOCALE_LEN];\r\n group[index].config.getBcp47Locale(str);\r\n locale->stringArgs.add(String8(str));\r\n rootRule->subrules.add(locale);\r\n }\r\n\r\n if (group[index].config.sdkVersion != 0) {\r\n sp<Rule> sdk = new Rule();\r\n sdk->op = Rule::GREATER_THAN;\r\n sdk->key = Rule::SDK_VERSION;\r\n sdk->longArgs.add(group[index].config.sdkVersion - 1);\r\n rootRule->subrules.add(sdk);\r\n }\r\n\r\n if (group[index].config.density != 0) {\r\n size_t densityIndex = 0;\r\n Vector<int> allDensities;\r\n allDensities.add(group[index].config.density);\r\n\r\n const size_t groupSize = group.size();\r\n for (size_t i = 0; i < groupSize; i++) {\r\n if (group[i].config.density != group[index].config.density) {\r\n // This group differs by density.\r\n allDensities.clear();\r\n for (size_t j = 0; j < groupSize; j++) {\r\n allDensities.add(group[j].config.density);\r\n }\r\n densityIndex = index;\r\n break;\r\n }\r\n }\r\n rootRule->subrules.add(generateDensity(allDensities, densityIndex));\r\n }\r\n\r\n if (group[index].abi != abi::Variant_none) {\r\n size_t abiIndex = 0;\r\n Vector<abi::Variant> allVariants;\r\n allVariants.add(group[index].abi);\r\n\r\n const size_t groupSize = group.size();\r\n for (size_t i = 0; i < groupSize; i++) {\r\n if (group[i].abi != group[index].abi) {\r\n // This group differs by ABI.\r\n allVariants.clear();\r\n for (size_t j = 0; j < groupSize; j++) {\r\n allVariants.add(group[j].abi);\r\n }\r\n abiIndex = index;\r\n break;\r\n }\r\n }\r\n rootRule->subrules.add(generateAbi(allVariants, abiIndex));\r\n }\r\n\r\n return rootRule;\r\n}\r\n\r\n} // namespace split\r\n" }, { "alpha_fraction": 0.6652227640151978, "alphanum_fraction": 0.6701732873916626, "avg_line_length": 26.85714340209961, "blob_id": "610ce6a8687a19a555c84f7916614821d9687575", "content_id": "bce8b615265c101446c2c63513d7460f126d7ec0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1616, "license_type": "permissive", "max_line_length": 75, "num_lines": 56, "path": "/tests/CanvasCompare/src/com/android/test/hwuicompare/MainView.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwuicompare;\r\n\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.util.AttributeSet;\r\nimport android.view.View;\r\n\r\npublic class MainView extends View {\r\n Paint mPaint = new Paint();\r\n\r\n public MainView(Context context) {\r\n super(context);\r\n }\r\n\r\n public MainView(Context context, AttributeSet attrs) {\r\n super(context, attrs);\r\n }\r\n\r\n public MainView(Context context, AttributeSet attrs, int defStyle) {\r\n super(context, attrs, defStyle);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n mPaint.reset();\r\n DisplayModifier.apply(mPaint, canvas);\r\n\r\n if (mDrawCallback != null) {\r\n mDrawCallback.run();\r\n }\r\n }\r\n\r\n private Runnable mDrawCallback;\r\n public void addDrawCallback(Runnable drawCallback) {\r\n mDrawCallback = drawCallback;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6827195286750793, "alphanum_fraction": 0.6896729469299316, "avg_line_length": 31.474138259887695, "blob_id": "259a4a1f6439810c4ad59447b340934895217c4f", "content_id": "c20290bad2b73e1299cb9f698e7a9147099f31ed", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3883, "license_type": "permissive", "max_line_length": 97, "num_lines": 116, "path": "/packages/SystemUI/tests/src/com/android/systemui/dump/LogBufferFreezerTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.dump\r\n\r\nimport android.content.BroadcastReceiver\r\nimport android.content.IntentFilter\r\nimport android.os.UserHandle\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.broadcast.BroadcastDispatcher\r\nimport com.android.systemui.util.concurrency.FakeExecutor\r\nimport com.android.systemui.util.mockito.any\r\nimport com.android.systemui.util.mockito.capture\r\nimport com.android.systemui.util.mockito.eq\r\nimport com.android.systemui.util.time.FakeSystemClock\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.mockito.ArgumentCaptor\r\nimport org.mockito.Captor\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito.never\r\nimport org.mockito.Mockito.times\r\nimport org.mockito.Mockito.verify\r\nimport org.mockito.MockitoAnnotations\r\n\r\n@SmallTest\r\nclass LogBufferFreezerTest : SysuiTestCase() {\r\n\r\n lateinit var freezer: LogBufferFreezer\r\n lateinit var receiver: BroadcastReceiver\r\n\r\n @Mock\r\n lateinit var dumpManager: DumpManager\r\n @Mock\r\n lateinit var broadcastDispatcher: BroadcastDispatcher\r\n @Captor\r\n lateinit var receiverCaptor: ArgumentCaptor<BroadcastReceiver>\r\n\r\n val clock = FakeSystemClock()\r\n val executor = FakeExecutor(clock)\r\n\r\n @Before\r\n fun setUp() {\r\n MockitoAnnotations.initMocks(this)\r\n\r\n freezer = LogBufferFreezer(dumpManager, executor, 500)\r\n\r\n freezer.attach(broadcastDispatcher)\r\n\r\n verify(broadcastDispatcher)\r\n .registerReceiver(\r\n capture(receiverCaptor),\r\n any(IntentFilter::class.java),\r\n eq(executor),\r\n any(UserHandle::class.java))\r\n receiver = receiverCaptor.value\r\n }\r\n\r\n @Test\r\n fun testBuffersAreFrozenInResponseToBroadcast() {\r\n // WHEN the bugreport intent is fired\r\n receiver.onReceive(null, null)\r\n\r\n // THEN the buffers are frozen\r\n verify(dumpManager).freezeBuffers()\r\n }\r\n\r\n @Test\r\n fun testBuffersAreUnfrozenAfterTimeout() {\r\n // GIVEN that we've already frozen the buffers in response to a broadcast\r\n receiver.onReceive(null, null)\r\n verify(dumpManager).freezeBuffers()\r\n\r\n // WHEN the timeout expires\r\n clock.advanceTime(501)\r\n\r\n // THEN the buffers are unfrozen\r\n verify(dumpManager).unfreezeBuffers()\r\n }\r\n\r\n @Test\r\n fun testBuffersAreNotPrematurelyUnfrozen() {\r\n // GIVEN that we received a broadcast 499ms ago (shortly before the timeout would expire)\r\n receiver.onReceive(null, null)\r\n verify(dumpManager).freezeBuffers()\r\n clock.advanceTime(499)\r\n\r\n // WHEN we receive a second broadcast\r\n receiver.onReceive(null, null)\r\n\r\n // THEN the buffers are frozen a second time\r\n verify(dumpManager, times(2)).freezeBuffers()\r\n\r\n // THEN when we advance beyond the first timeout, nothing happens\r\n clock.advanceTime(101)\r\n verify(dumpManager, never()).unfreezeBuffers()\r\n\r\n // THEN only when we advance past the reset timeout window are the buffers unfrozen\r\n clock.advanceTime(401)\r\n verify(dumpManager).unfreezeBuffers()\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6528548002243042, "alphanum_fraction": 0.6845024228096008, "avg_line_length": 36.3125, "blob_id": "df04483492fd56ccf4292d6776739c8226558aae", "content_id": "19f6905207a6397c0085f2fcc860a08454b74984", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3065, "license_type": "permissive", "max_line_length": 85, "num_lines": 80, "path": "/wifi/java/android/net/wifi/AnqpInformationElement.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net.wifi;\r\n\r\n/**\r\n * This object contains the payload of an ANQP element.\r\n * Vendor id is the vendor ID for the element, or 0 if it is an 802.11(u) element.\r\n * Hotspot 2.0 uses the WFA Vendor ID which is 0x506f9a\r\n * The payload contains the bytes of the payload, starting after the length octet(s).\r\n * @hide\r\n */\r\npublic class AnqpInformationElement {\r\n public static final int HOTSPOT20_VENDOR_ID = 0x506f9a;\r\n\r\n public static final int ANQP_QUERY_LIST = 256;\r\n public static final int ANQP_CAPABILITY_LIST = 257;\r\n public static final int ANQP_VENUE_NAME = 258;\r\n public static final int ANQP_EMERGENCY_NUMBER = 259;\r\n public static final int ANQP_NWK_AUTH_TYPE = 260;\r\n public static final int ANQP_ROAMING_CONSORTIUM = 261;\r\n public static final int ANQP_IP_ADDR_AVAILABILITY = 262;\r\n public static final int ANQP_NAI_REALM = 263;\r\n public static final int ANQP_3GPP_NETWORK = 264;\r\n public static final int ANQP_GEO_LOC = 265;\r\n public static final int ANQP_CIVIC_LOC = 266;\r\n public static final int ANQP_LOC_URI = 267;\r\n public static final int ANQP_DOM_NAME = 268;\r\n public static final int ANQP_EMERGENCY_ALERT = 269;\r\n public static final int ANQP_TDLS_CAP = 270;\r\n public static final int ANQP_EMERGENCY_NAI = 271;\r\n public static final int ANQP_NEIGHBOR_REPORT = 272;\r\n public static final int ANQP_VENDOR_SPEC = 56797;\r\n\r\n public static final int HS_QUERY_LIST = 1;\r\n public static final int HS_CAPABILITY_LIST = 2;\r\n public static final int HS_FRIENDLY_NAME = 3;\r\n public static final int HS_WAN_METRICS = 4;\r\n public static final int HS_CONN_CAPABILITY = 5;\r\n public static final int HS_NAI_HOME_REALM_QUERY = 6;\r\n public static final int HS_OPERATING_CLASS = 7;\r\n public static final int HS_OSU_PROVIDERS = 8;\r\n public static final int HS_ICON_REQUEST = 10;\r\n public static final int HS_ICON_FILE = 11;\r\n\r\n private final int mVendorId;\r\n private final int mElementId;\r\n private final byte[] mPayload;\r\n\r\n public AnqpInformationElement(int vendorId, int elementId, byte[] payload) {\r\n mVendorId = vendorId;\r\n mElementId = elementId;\r\n mPayload = payload;\r\n }\r\n\r\n public int getVendorId() {\r\n return mVendorId;\r\n }\r\n\r\n public int getElementId() {\r\n return mElementId;\r\n }\r\n\r\n public byte[] getPayload() {\r\n return mPayload;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7248169183731079, "alphanum_fraction": 0.7487630844116211, "avg_line_length": 42.91555404663086, "blob_id": "e4d9b7d80f5cae4a6f68b07af1e7e8dc2906787c", "content_id": "a4cfe75b3a10c3da36709ea85137db5a041439be", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 10106, "license_type": "permissive", "max_line_length": 136, "num_lines": 225, "path": "/core/java/android/app/AppOps.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<!--\r\n Copyright (C) 2020 The Android Open Source Project\r\n\r\n Licensed under the Apache License, Version 2.0 (the \"License\");\r\n you may not use this file except in compliance with the License.\r\n You may obtain a copy of the License at\r\n\r\n http://www.apache.org/licenses/LICENSE-2.0\r\n\r\n Unless required by applicable law or agreed to in writing, software\r\n distributed under the License is distributed on an \"AS IS\" BASIS,\r\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n See the License for the specific language governing permissions and\r\n limitations under the License\r\n -->\r\n\r\n# App-ops\r\n\r\nApp-ops are used for two purposes: Access control and tracking.\r\n\r\nApp-ops cover a wide variety of functionality from helping with runtime permissions to battery\r\nconsumption tracking.\r\n\r\nApp-ops are defined in `AppOpsManager` as `OP_...` and need to be continuously numbered. The\r\ninteger values of the app-ops are not exposed. For app-ops visible to 3rd party apps,\r\nthe name of the app-op might be exposed as `OPSTR_`. As the integers are not part of the API, they\r\nmight (and have) changed between platform versions and OEM implementations.\r\n`AppOpsManager.opToPublicName` and `AppOpsManager.strOpToOp` allow for conversion between integer\r\nand string identifier for the op.\r\n\r\n## App-ops as access restrictions\r\n\r\nApp-ops can either be controlled for each [uid](../os/Users.md#int-uid) or for each package. Which\r\none is used depends on the API provider maintaining this app-op.\r\n\r\nFor any security or privacy related app-ops the provider needs to control the app-op per uid\r\nas all security and privacy is based on uid in Android.\r\n\r\nApp-op used for non-security related tasks are usually controlled per package to provide finer\r\ngranularity.\r\n\r\n### Setting the app-op mode\r\n\r\nTo control access the app-op can be set to:\r\n\r\n`MODE_DEFAULT`\r\n: Default behavior, might differ from app-op to app-op\r\n\r\n`MODE_ALLOWED`\r\n: Allow the access\r\n\r\n`MODE_FOREGROUND`\r\n: Allow the access but only if the app is currently in the [foreground](#foreground)\r\n\r\n`MODE_IGNORED`\r\n: Don't allow the access, i.e. don't perform the requested action or return dummy data\r\n\r\n`MODE_ERRORED`\r\n: Throw a `SecurityException` on access. This can be suppressed by using a `...noThrow` method to\r\ncheck the mode\r\n\r\nThe initial state of an app-op is defined in `AppOpsManager.sOpDefaultMode`. Confusingly the\r\ninitial state is often not `MODE_DEFAULT`\r\n\r\nPer-package modes can be set using `AppOpsManager.setMode` and per-uid modes can be set using\r\n`AppOpsManager.setUidMode`.\r\n\r\n**Warning**: Do not use `setMode` and `setUidMode` for the same app-op. Due to the way the\r\ninternal storage for the mode works this can lead to very confusing behavior. If this ever happened\r\nby accident this needs to be cleaned up for any affected user as the app-op mode is retained over\r\nreboot.\r\n\r\nApp-ops can also be set via the shell using the `appops set` command. The target package/uid can be\r\ndefined via parameters to this command.\r\n\r\nThe current state of the app-op can be read via the `appops get` command or via `dumpsys appops`.\r\nIf the app-op is not mentioned in the output the app-op is in it's initial state.\r\n\r\nFor example `dumpsys appops`:\r\n```\r\n[...]\r\n Uid 2000:\r\n [...]\r\n COARSE_LOCATION: mode=foreground\r\n START_FOREGROUND: mode=foreground\r\n LEGACY_STORAGE: mode=ignore\r\n [...]\r\n```\r\n\r\n### Guarding access based on app-ops\r\n\r\nAPI providers need to check the mode returned by `AppOpsManager.noteOp` if they are are allowing\r\naccess to operations gated by the app-op. `AppOpsManager.unsafeCheckOp` should be used to check the\r\nmode if no access is granted. E.g. this can be for displaying app-op state in the UI or when\r\nchecking the state before later calling `noteOp` anyway.\r\n\r\nIf an operation refers to a time span (e.g. a audio-recording session) the API provider should\r\nuse `AppOpsManager.startOp` and `AppOpsManager.finishOp` instead of `noteOp`.\r\n\r\n`noteOp` and `startOp` take a `packageName` and `featureId` parameter. These need to be read from\r\nthe calling apps context as `Context.getOpPackageName` and `Context.getFeatureId`, then send to\r\nthe data provider and then passed on the `noteOp`/`startOp` method.\r\n\r\n#### App-ops and permissions\r\n\r\nAccess guarding is often done in combination with permissions using [runtime permissions\r\n](../permission/Permissions.md#runtime-permissions-and-app-ops) or [app-op permissions\r\n](../permission/Permissions.md#app-op-permissions). This is preferred over just using an app-op\r\n as permissions a concept more familiar to app developers.\r\n\r\n### Foreground\r\n\r\nThe `AppOpsService` tracks the apps' proc state (== foreground-ness) by following the\r\n`ActivityManagerService`'s proc state. It reduces the possible proc states to only those needed\r\nfor app-ops. It also delays the changes by a _settle time_. This delay is needed as the proc state\r\ncan fluctuate when switching apps. By delaying the change the appops service is not affected by\r\nthose.\r\n\r\nIn addition to proc state, the `AppOpsService` also receives process capability update from the\r\n`ActivityManagerService`. Proc capability specifies what while-in-use(`MODE_FOREGROUND`) operations\r\n the proc is allowed to perform in its current proc state. There are three proc capabilities\r\n defined so far: \r\n`PROCESS_CAPABILITY_FOREGROUND_LOCATION`, `PROCESS_CAPABILITY_FOREGROUND_CAMERA` and\r\n`PROCESS_CAPABILITY_FOREGROUND_MICROPHONE`, they correspond to the while-in-use operation of\r\nlocation, camera and microphone (microphone is `RECORD_AUDIO`).\r\n\r\nIn `ActivityManagerService`, `PROCESS_STATE_TOP` and `PROCESS_STATE_PERSISTENT` have all\r\nthree capabilities, `PROCESS_STATE_FOREGROUND_SERVICE` has capabilities defined by\r\n `foregroundServiceType` that is specified in foreground service's manifest file. A client process \r\n can pass its capabilities to service using `BIND_INCLUDE_CAPABILITIES` flag.\r\n\r\nThe proc state and capability are used for two use cases: Firstly, Tracking remembers the proc state\r\n for each tracked event. Secondly, `noteOp`/`checkOp` calls for app-op that are set to\r\n `MODE_FOREGROUND` are translated using the `AppOpsService.UidState.evalMode` method into\r\n `MODE_ALLOWED` when the app has the capability and `MODE_IGNORED` when the app does not have the \r\n capability. `checkOpRaw` calls are not affected.\r\n\r\nThe current proc state and capability for an app can be read from `dumpsys appops`.\r\nThe tracking information can be read from `dumpsys appops`\r\n\r\n```\r\nUid u0a118:\r\n state=fg\r\n capability=6\r\n```\r\n\r\n## App-ops for tracking\r\n\r\nApp-ops track many important events, including all accesses to runtime permission protected\r\nAPIs. This is done by tracking when an app-op was noted or started. The tracked data can only be\r\nread by system components.\r\n\r\n**Note:** Only `noteOp`/`startOp` calls are tracked; `unsafeCheckOp` is not tracked. Hence it is\r\nimportant to eventually call `noteOp` or `startOp` when providing access to protected operations\r\nor data.\r\n\r\nSome apps are forwarding access to other apps. E.g. an app might get the location from the\r\nsystem's location provider and then send the location further to a 3rd app. In this case the\r\napp passing on the data needs to call `AppOpsManager.noteProxyOp` to signal the access proxying.\r\nThis might also make sense inside of a single app if the access is forwarded between two features of\r\nthe app. In this case an app-op is noted for the forwarding app (proxy) and the app that received\r\nthe data (proxied). As any app can do it is important to track how much the system trusts this\r\nproxy-access-tracking. For more details see `AppOpService.noteProxyOperation`.\r\n\r\nThe tracking information can be read from `dumpsys appops` split by feature, proc state and\r\nproxying information with the syntax\r\n\r\n```\r\nPackage THE_PACKAGE_NAME:\r\n AN_APP_OP (CURRENT_MODE):\r\n FEATURE_ID (or null for default feature)=[\r\n ACCESS_OR_REJECT: [PROC_STATE-PROXYING_TAG] TIME proxy[INFO_ABOUT_PROXY IF_PROXY_ACCESS]\r\n```\r\n\r\nExample:\r\n\r\n```\r\nPackage com.google.android.gms:\r\n READ_CONTACTS (allow):\r\n null=[\r\n Access: [fgsvc-s] 2020-02-14 14:24:10.559 (-3d23h15m43s642ms)\r\n Access: [fgsvc-tp] 2020-02-14 14:23:58.189 (-3d23h15m56s12ms)\r\n ]\r\n apkappcontext=[\r\n Access: [fg-tp] 2020-02-17 14:24:54.721 (-23h14m59s480ms)\r\n ]\r\n com.google.android.gms.icing=[\r\n Access: [fgsvc-tpd] 2020-02-14 14:26:27.018 (-3d23h13m27s183ms) proxy[uid=10070, pkg=com.android.providers.contacts, feature=null]\r\n Access: [fg-tpd] 2020-02-18 02:26:08.711 (-11h13m45s490ms) proxy[uid=10070, pkg=com.android.providers.contacts, feature=null]\r\n Access: [bg-tpd] 2020-02-14 14:34:55.310 (-3d23h4m58s891ms) proxy[uid=10070, pkg=com.android.providers.contacts, feature=null]\r\n ]\r\n MANAGE_EXTERNAL_STORAGE (default):\r\n null=[\r\n Reject: [fg-s]2020-02-18 08:00:04.444 (-5h39m49s757ms)\r\n Reject: [bg-s]2020-02-18 08:00:04.427 (-5h39m49s774ms)\r\n ]\r\n```\r\n\r\n### Tracking an app's own private data accesses\r\n\r\nAn app can register an `AppOpsManager.OnOpNotedCallback` to get informed about what accesses the\r\nsystem is tracking for it. As each runtime permission has an associated app-op this API is\r\nparticularly useful for an app that want to find unexpected private data accesses.\r\n\r\n## Listening to app-op events\r\n\r\nSystem apps (with the appropriate permissions) can listen to most app-op events, such as\r\n\r\n`noteOp`\r\n: `startWatchingNoted`\r\n\r\n`startOp`/`finishOp`\r\n: `startWatchingActive`\r\n\r\nmode changes\r\n: `startWatchingMode`\r\n\r\n[foreground](#foreground)-ness changes\r\n: `startWatchingMode` using the `WATCH_FOREGROUND_CHANGES` flag\r\n\r\nWatching such events is only ever as good as the tracked events. E.g. if the audio provider does\r\nnot call `startOp` for a audio-session, the app's activeness for the record-audio app-op is not\r\nchanged. Further there were cases where app-ops were noted even though no data was accessed or\r\noperation was performed. Hence before relying on the data from app-ops, double check if the data\r\nis actually reliable.\r\n" }, { "alpha_fraction": 0.6045918464660645, "alphanum_fraction": 0.6135203838348389, "avg_line_length": 26.545454025268555, "blob_id": "53b09f73729e9be3ae3f7660b8e89ca773aa3333", "content_id": "1d2152b2a229fdce051cc7317e58b916ebff6aee", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1568, "license_type": "permissive", "max_line_length": 79, "num_lines": 55, "path": "/tools/aapt/tests/MockFileFinder.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "//\r\n// Copyright 2011 The Android Open Source Project\r\n//\r\n\r\n#ifndef MOCKFILEFINDER_H\r\n#define MOCKFILEFINDER_H\r\n\r\n#include <utils/Vector.h>\r\n#include <utils/KeyedVector.h>\r\n#include <utils/String8.h>\r\n\r\n#include \"DirectoryWalker.h\"\r\n\r\nusing namespace android;\r\n\r\nclass MockFileFinder : public FileFinder {\r\npublic:\r\n MockFileFinder (KeyedVector<String8, KeyedVector<String8,time_t> >& files)\r\n : mFiles(files)\r\n {\r\n // Nothing left to do\r\n };\r\n\r\n /**\r\n * findFiles implementation for the abstraction.\r\n * PRECONDITIONS:\r\n * No checking is done, so there MUST be an entry in mFiles with\r\n * path matching basePath.\r\n *\r\n * POSTCONDITIONS:\r\n * fileStore is filled with a copy of the data in mFiles corresponding\r\n * to the basePath.\r\n */\r\n\r\n virtual bool findFiles(String8 basePath, Vector<String8>& extensions,\r\n KeyedVector<String8,time_t>& fileStore,\r\n DirectoryWalker* dw)\r\n {\r\n const KeyedVector<String8,time_t>* payload(&mFiles.valueFor(basePath));\r\n // Since KeyedVector doesn't implement swap\r\n // (who doesn't use swap??) we loop and add one at a time.\r\n for (size_t i = 0; i < payload->size(); ++i) {\r\n fileStore.add(payload->keyAt(i),payload->valueAt(i));\r\n }\r\n return true;\r\n }\r\n\r\nprivate:\r\n // Virtual mapping between \"directories\" and the \"files\" contained\r\n // in them\r\n KeyedVector<String8, KeyedVector<String8,time_t> > mFiles;\r\n};\r\n\r\n\r\n#endif // MOCKFILEFINDER_H" }, { "alpha_fraction": 0.6956198811531067, "alphanum_fraction": 0.7015590071678162, "avg_line_length": 28.659090042114258, "blob_id": "93365769decea07168f33342cf2eac2e4c7be9a6", "content_id": "b3d49bc8bcadffb3c267d86f17bf2d991c07c634", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1347, "license_type": "permissive", "max_line_length": 75, "num_lines": 44, "path": "/packages/PrintSpooler/src/com/android/printspooler/widget/CustomErrorEditText.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.printspooler.widget;\r\n\r\nimport android.content.Context;\r\nimport android.graphics.drawable.Drawable;\r\nimport android.util.AttributeSet;\r\nimport android.widget.EditText;\r\n\r\n/**\r\n * EditText that shows an error without a popup.\r\n */\r\npublic final class CustomErrorEditText extends EditText {\r\n private CharSequence mError;\r\n\r\n public CustomErrorEditText(Context context, AttributeSet attrs) {\r\n super(context, attrs);\r\n }\r\n\r\n @Override\r\n public CharSequence getError() {\r\n return mError;\r\n }\r\n\r\n @Override\r\n public void setError(CharSequence error, Drawable icon) {\r\n setCompoundDrawables(null, null, icon, null);\r\n mError = error;\r\n }\r\n}" }, { "alpha_fraction": 0.6052286028862, "alphanum_fraction": 0.6069787740707397, "avg_line_length": 36.08333206176758, "blob_id": "df43161d01347d4675c712793193f2358f1d1ace", "content_id": "e806fde605ca001f131957dd797eeb9e794409c0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 9142, "license_type": "permissive", "max_line_length": 100, "num_lines": 240, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/Signature.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n\r\npackage androidx.media.filterfw;\r\n\r\nimport java.util.HashMap;\r\nimport java.util.HashSet;\r\nimport java.util.Map.Entry;\r\nimport java.util.Set;\r\n\r\n/**\r\n * A Signature holds the specification for a filter's input and output ports.\r\n *\r\n * A Signature instance must be returned by the filter's {@link Filter#getSignature()} method. It\r\n * specifies the number and names of the filter's input and output ports, whether or not they\r\n * are required, how data for those ports are accessed, and more. A Signature does not change over\r\n * time. This makes Signatures useful for understanding how a filter can be integrated into a\r\n * graph.\r\n *\r\n * There are a number of flags that can be specified for each input and output port. The flag\r\n * {@code PORT_REQUIRED} indicates that the user must connect the specified port. On the other hand,\r\n * {@code PORT_OPTIONAL} indicates that a port may be connected by the user.\r\n *\r\n * If ports other than the ones in the Signature are allowed, they default to the most generic\r\n * format, that allows passing in any type of Frame. Thus, if more granular access is needed to\r\n * a frame's data, it must be specified in the Signature.\r\n */\r\npublic class Signature {\r\n\r\n private HashMap<String, PortInfo> mInputPorts = null;\r\n private HashMap<String, PortInfo> mOutputPorts = null;\r\n private boolean mAllowOtherInputs = true;\r\n private boolean mAllowOtherOutputs = true;\r\n\r\n static class PortInfo {\r\n public int flags;\r\n public FrameType type;\r\n\r\n public PortInfo() {\r\n flags = 0;\r\n type = FrameType.any();\r\n }\r\n\r\n public PortInfo(int flags, FrameType type) {\r\n this.flags = flags;\r\n this.type = type;\r\n }\r\n\r\n public boolean isRequired() {\r\n return (flags & PORT_REQUIRED) != 0;\r\n }\r\n\r\n public String toString(String ioMode, String name) {\r\n String ioName = ioMode + \" \" + name;\r\n String modeName = isRequired() ? \"required\" : \"optional\";\r\n return modeName + \" \" + ioName + \": \" + type.toString();\r\n }\r\n }\r\n\r\n /** Indicates that the port must be connected in the graph. */\r\n public static final int PORT_REQUIRED = 0x02;\r\n /** Indicates that the port may be connected in the graph . */\r\n public static final int PORT_OPTIONAL = 0x01;\r\n\r\n /**\r\n * Creates a new empty Signature.\r\n */\r\n public Signature() {\r\n }\r\n\r\n /**\r\n * Adds an input port to the Signature.\r\n *\r\n * @param name the name of the input port. Must be unique among input port names.\r\n * @param flags a combination of port flags.\r\n * @param type the type of the input frame.\r\n * @return this Signature instance.\r\n */\r\n public Signature addInputPort(String name, int flags, FrameType type) {\r\n addInputPort(name, new PortInfo(flags, type));\r\n return this;\r\n }\r\n\r\n /**\r\n * Adds an output port to the Signature.\r\n *\r\n * @param name the name of the output port. Must be unique among output port names.\r\n * @param flags a combination of port flags.\r\n * @param type the type of the output frame.\r\n * @return this Signature instance.\r\n */\r\n public Signature addOutputPort(String name, int flags, FrameType type) {\r\n addOutputPort(name, new PortInfo(flags, type));\r\n return this;\r\n }\r\n\r\n /**\r\n * Disallows the user from adding any other input ports.\r\n * Adding any input port not explicitly specified in this Signature will cause an error.\r\n * @return this Signature instance.\r\n */\r\n public Signature disallowOtherInputs() {\r\n mAllowOtherInputs = false;\r\n return this;\r\n }\r\n\r\n /**\r\n * Disallows the user from adding any other output ports.\r\n * Adding any output port not explicitly specified in this Signature will cause an error.\r\n * @return this Signature instance.\r\n */\r\n public Signature disallowOtherOutputs() {\r\n mAllowOtherOutputs = false;\r\n return this;\r\n }\r\n\r\n /**\r\n * Disallows the user from adding any other ports.\r\n * Adding any input or output port not explicitly specified in this Signature will cause an\r\n * error.\r\n * @return this Signature instance.\r\n */\r\n public Signature disallowOtherPorts() {\r\n mAllowOtherInputs = false;\r\n mAllowOtherOutputs = false;\r\n return this;\r\n }\r\n\r\n @Override\r\n public String toString() {\r\n StringBuffer stringBuffer = new StringBuffer();\r\n for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {\r\n stringBuffer.append(entry.getValue().toString(\"input\", entry.getKey()) + \"\\n\");\r\n }\r\n for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {\r\n stringBuffer.append(entry.getValue().toString(\"output\", entry.getKey()) + \"\\n\");\r\n }\r\n if (!mAllowOtherInputs) {\r\n stringBuffer.append(\"disallow other inputs\\n\");\r\n }\r\n if (!mAllowOtherOutputs) {\r\n stringBuffer.append(\"disallow other outputs\\n\");\r\n }\r\n return stringBuffer.toString();\r\n }\r\n\r\n PortInfo getInputPortInfo(String name) {\r\n PortInfo result = mInputPorts != null ? mInputPorts.get(name) : null;\r\n return result != null ? result : new PortInfo();\r\n }\r\n\r\n PortInfo getOutputPortInfo(String name) {\r\n PortInfo result = mOutputPorts != null ? mOutputPorts.get(name) : null;\r\n return result != null ? result : new PortInfo();\r\n }\r\n\r\n void checkInputPortsConform(Filter filter) {\r\n Set<String> filterInputs = new HashSet<String>();\r\n filterInputs.addAll(filter.getConnectedInputPortMap().keySet());\r\n if (mInputPorts != null) {\r\n for (Entry<String, PortInfo> entry : mInputPorts.entrySet()) {\r\n String portName = entry.getKey();\r\n PortInfo portInfo = entry.getValue();\r\n InputPort inputPort = filter.getConnectedInputPort(portName);\r\n if (inputPort == null && portInfo.isRequired()) {\r\n throw new RuntimeException(\"Filter \" + filter + \" does not have required \"\r\n + \"input port '\" + portName + \"'!\");\r\n }\r\n filterInputs.remove(portName);\r\n }\r\n }\r\n if (!mAllowOtherInputs && !filterInputs.isEmpty()) {\r\n throw new RuntimeException(\"Filter \" + filter + \" has invalid input ports: \"\r\n + filterInputs + \"!\");\r\n }\r\n }\r\n\r\n void checkOutputPortsConform(Filter filter) {\r\n Set<String> filterOutputs = new HashSet<String>();\r\n filterOutputs.addAll(filter.getConnectedOutputPortMap().keySet());\r\n if (mOutputPorts != null) {\r\n for (Entry<String, PortInfo> entry : mOutputPorts.entrySet()) {\r\n String portName = entry.getKey();\r\n PortInfo portInfo = entry.getValue();\r\n OutputPort outputPort = filter.getConnectedOutputPort(portName);\r\n if (outputPort == null && portInfo.isRequired()) {\r\n throw new RuntimeException(\"Filter \" + filter + \" does not have required \"\r\n + \"output port '\" + portName + \"'!\");\r\n }\r\n filterOutputs.remove(portName);\r\n }\r\n }\r\n if (!mAllowOtherOutputs && !filterOutputs.isEmpty()) {\r\n throw new RuntimeException(\"Filter \" + filter + \" has invalid output ports: \"\r\n + filterOutputs + \"!\");\r\n }\r\n }\r\n\r\n HashMap<String, PortInfo> getInputPorts() {\r\n return mInputPorts;\r\n }\r\n\r\n HashMap<String, PortInfo> getOutputPorts() {\r\n return mOutputPorts;\r\n }\r\n\r\n private void addInputPort(String name, PortInfo portInfo) {\r\n if (mInputPorts == null) {\r\n mInputPorts = new HashMap<String, PortInfo>();\r\n }\r\n if (mInputPorts.containsKey(name)) {\r\n throw new RuntimeException(\"Attempting to add duplicate input port '\" + name + \"'!\");\r\n }\r\n mInputPorts.put(name, portInfo);\r\n }\r\n\r\n private void addOutputPort(String name, PortInfo portInfo) {\r\n if (mOutputPorts == null) {\r\n mOutputPorts = new HashMap<String, PortInfo>();\r\n }\r\n if (mOutputPorts.containsKey(name)) {\r\n throw new RuntimeException(\"Attempting to add duplicate output port '\" + name + \"'!\");\r\n }\r\n mOutputPorts.put(name, portInfo);\r\n }\r\n}\r\n\r\n" }, { "alpha_fraction": 0.6995654702186584, "alphanum_fraction": 0.7107386589050293, "avg_line_length": 35.034481048583984, "blob_id": "9097a35a4c4b09858cd7f3e6dc2a285ad8baa902", "content_id": "54a043935d61300e4554e4a0aeaf9ccda669923b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3222, "license_type": "permissive", "max_line_length": 89, "num_lines": 87, "path": "/packages/SystemUI/tests/src/com/android/systemui/glwallpaper/ImageWallpaperRendererTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.glwallpaper;\r\n\r\nimport static com.google.common.truth.Truth.assertThat;\r\n\r\nimport static org.mockito.Mockito.doReturn;\r\nimport static org.mockito.Mockito.spy;\r\n\r\nimport android.app.WallpaperManager;\r\nimport android.app.WallpaperManager.ColorManagementProxy;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.ColorSpace;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\n\r\nimport java.io.IOException;\r\nimport java.util.HashSet;\r\nimport java.util.Set;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\[email protected](setAsMainLooper = true)\r\npublic class ImageWallpaperRendererTest extends SysuiTestCase {\r\n\r\n private WallpaperManager mWpmSpy;\r\n\r\n @Before\r\n public void setUp() throws Exception {\r\n final WallpaperManager wpm = mContext.getSystemService(WallpaperManager.class);\r\n mWpmSpy = spy(wpm);\r\n mContext.addMockSystemService(WallpaperManager.class, mWpmSpy);\r\n }\r\n\r\n @Test\r\n public void testWcgContent() throws IOException {\r\n final Bitmap srgbBitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);\r\n final Bitmap p3Bitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888,\r\n false /* hasAlpha */, ColorSpace.get(ColorSpace.Named.DISPLAY_P3));\r\n\r\n final ColorManagementProxy proxy = new ColorManagementProxy(mContext);\r\n final ColorManagementProxy cmProxySpy = spy(proxy);\r\n final Set<ColorSpace> supportedWideGamuts = new HashSet<>();\r\n supportedWideGamuts.add(ColorSpace.get(ColorSpace.Named.DISPLAY_P3));\r\n\r\n try {\r\n doReturn(true).when(mWpmSpy).shouldEnableWideColorGamut();\r\n doReturn(cmProxySpy).when(mWpmSpy).getColorManagementProxy();\r\n doReturn(supportedWideGamuts).when(cmProxySpy).getSupportedColorSpaces();\r\n\r\n mWpmSpy.setBitmap(p3Bitmap);\r\n ImageWallpaperRenderer rendererP3 = new ImageWallpaperRenderer(mContext);\r\n rendererP3.reportSurfaceSize();\r\n assertThat(rendererP3.isWcgContent()).isTrue();\r\n\r\n mWpmSpy.setBitmap(srgbBitmap);\r\n ImageWallpaperRenderer renderer = new ImageWallpaperRenderer(mContext);\r\n assertThat(renderer.isWcgContent()).isFalse();\r\n } finally {\r\n srgbBitmap.recycle();\r\n p3Bitmap.recycle();\r\n }\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6412068605422974, "alphanum_fraction": 0.6534384489059448, "avg_line_length": 35.927833557128906, "blob_id": "1dfb01e22d192b71dde8108b95363073d6d299f8", "content_id": "66952e2a7f943bafdc4626954ac8e1ad25b15b9a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3679, "license_type": "permissive", "max_line_length": 93, "num_lines": 97, "path": "/services/tests/servicestests/src/com/android/server/NativeDaemonConnectorTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2011 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server;\r\n\r\nimport static com.android.server.NativeDaemonConnector.appendEscaped;\r\nimport static com.android.server.NativeDaemonConnector.makeCommand;\r\n\r\nimport android.test.AndroidTestCase;\r\nimport android.test.suitebuilder.annotation.MediumTest;\r\n\r\nimport com.android.server.NativeDaemonConnector.SensitiveArg;\r\n\r\n/**\r\n * Tests for {@link NativeDaemonConnector}.\r\n */\r\n@MediumTest\r\npublic class NativeDaemonConnectorTest extends AndroidTestCase {\r\n private static final String TAG = \"NativeDaemonConnectorTest\";\r\n\r\n public void testArgumentNormal() throws Exception {\r\n final StringBuilder builder = new StringBuilder();\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"\");\r\n assertEquals(\"\", builder.toString());\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"foo\");\r\n assertEquals(\"foo\", builder.toString());\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"foo\\\"bar\");\r\n assertEquals(\"foo\\\\\\\"bar\", builder.toString());\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"foo\\\\bar\\\\\\\"baz\");\r\n assertEquals(\"foo\\\\\\\\bar\\\\\\\\\\\\\\\"baz\", builder.toString());\r\n }\r\n\r\n public void testArgumentWithSpaces() throws Exception {\r\n final StringBuilder builder = new StringBuilder();\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"foo bar\");\r\n assertEquals(\"\\\"foo bar\\\"\", builder.toString());\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"foo\\\"bar\\\\baz foo\");\r\n assertEquals(\"\\\"foo\\\\\\\"bar\\\\\\\\baz foo\\\"\", builder.toString());\r\n }\r\n\r\n public void testArgumentWithUtf() throws Exception {\r\n final StringBuilder builder = new StringBuilder();\r\n\r\n builder.setLength(0);\r\n appendEscaped(builder, \"caf\\u00E9 c\\u00F6ffee\");\r\n assertEquals(\"\\\"caf\\u00E9 c\\u00F6ffee\\\"\", builder.toString());\r\n }\r\n\r\n public void testSensitiveArgs() throws Exception {\r\n final StringBuilder rawBuilder = new StringBuilder();\r\n final StringBuilder logBuilder = new StringBuilder();\r\n\r\n rawBuilder.setLength(0);\r\n logBuilder.setLength(0);\r\n makeCommand(rawBuilder, logBuilder, 1, \"foo\", \"bar\", \"baz\");\r\n assertEquals(\"1 foo bar baz\\0\", rawBuilder.toString());\r\n assertEquals(\"1 foo bar baz\", logBuilder.toString());\r\n\r\n rawBuilder.setLength(0);\r\n logBuilder.setLength(0);\r\n makeCommand(rawBuilder, logBuilder, 1, \"foo\", new SensitiveArg(\"bar\"), \"baz\");\r\n assertEquals(\"1 foo bar baz\\0\", rawBuilder.toString());\r\n assertEquals(\"1 foo [scrubbed] baz\", logBuilder.toString());\r\n\r\n rawBuilder.setLength(0);\r\n logBuilder.setLength(0);\r\n makeCommand(rawBuilder, logBuilder, 1, \"foo\", new SensitiveArg(\"foo bar\"), \"baz baz\",\r\n new SensitiveArg(\"wat\"));\r\n assertEquals(\"1 foo \\\"foo bar\\\" \\\"baz baz\\\" wat\\0\", rawBuilder.toString());\r\n assertEquals(\"1 foo [scrubbed] \\\"baz baz\\\" [scrubbed]\", logBuilder.toString());\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5828291773796082, "alphanum_fraction": 0.585133969783783, "avg_line_length": 31.375, "blob_id": "9f07fa4029fdc8186efd62633d305d25b7f312d9", "content_id": "b54e2e163888c4275ad2878ec05c0bbcaff3f12f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3471, "license_type": "permissive", "max_line_length": 99, "num_lines": 104, "path": "/packages/StatementService/src/com/android/statementservice/retriever/AssetJsonWriter.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.statementservice.retriever;\r\n\r\nimport android.util.JsonWriter;\r\n\r\nimport java.io.IOException;\r\nimport java.io.StringWriter;\r\nimport java.util.List;\r\nimport java.util.Locale;\r\n\r\n/**\r\n * Creates a Json string where the order of the fields can be specified.\r\n */\r\n/* package private */ final class AssetJsonWriter {\r\n\r\n private StringWriter mStringWriter = new StringWriter();\r\n private JsonWriter mWriter;\r\n private boolean mClosed = false;\r\n\r\n public AssetJsonWriter() {\r\n mWriter = new JsonWriter(mStringWriter);\r\n try {\r\n mWriter.beginObject();\r\n } catch (IOException e) {\r\n throw new AssertionError(\"Unreachable exception.\");\r\n }\r\n }\r\n\r\n /**\r\n * Appends a field to the output, putting both the key and value in lowercase. Null values are\r\n * not written.\r\n */\r\n public void writeFieldLower(String key, String value) {\r\n if (mClosed) {\r\n throw new IllegalArgumentException(\r\n \"Cannot write to an object that has already been closed.\");\r\n }\r\n\r\n if (value != null) {\r\n try {\r\n mWriter.name(key.toLowerCase(Locale.US));\r\n mWriter.value(value.toLowerCase(Locale.US));\r\n } catch (IOException e) {\r\n throw new AssertionError(\"Unreachable exception.\");\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Appends an array to the output, putting both the key and values in lowercase. If {@code\r\n * values} is null, this field will not be written. Individual values in the list must not be\r\n * null.\r\n */\r\n public void writeArrayUpper(String key, List<String> values) {\r\n if (mClosed) {\r\n throw new IllegalArgumentException(\r\n \"Cannot write to an object that has already been closed.\");\r\n }\r\n\r\n if (values != null) {\r\n try {\r\n mWriter.name(key.toLowerCase(Locale.US));\r\n mWriter.beginArray();\r\n for (String value : values) {\r\n mWriter.value(value.toUpperCase(Locale.US));\r\n }\r\n mWriter.endArray();\r\n } catch (IOException e) {\r\n throw new AssertionError(\"Unreachable exception.\");\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * Returns the string representation of the constructed json. After calling this method, {@link\r\n * #writeFieldLower} can no longer be called.\r\n */\r\n public String closeAndGetString() {\r\n if (!mClosed) {\r\n try {\r\n mWriter.endObject();\r\n } catch (IOException e) {\r\n throw new AssertionError(\"Unreachable exception.\");\r\n }\r\n mClosed = true;\r\n }\r\n return mStringWriter.toString();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7188019752502441, "alphanum_fraction": 0.7262895107269287, "avg_line_length": 33.411766052246094, "blob_id": "2c0bfac6da49a6b6a19b5f7e72279d632f3f38f7", "content_id": "3200a13c6ca38a05a4d954eb22e891ca713a358c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 1202, "license_type": "permissive", "max_line_length": 74, "num_lines": 34, "path": "/tests/ProtoInputStreamTests/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nLOCAL_PATH := $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_PACKAGE_NAME := ProtoInputStreamTests\r\nLOCAL_PROTOC_OPTIMIZE_TYPE := nano\r\nLOCAL_MODULE_TAGS := tests optional\r\nLOCAL_SRC_FILES := \\\r\n $(call all-java-files-under, src) \\\r\n $(call all-proto-files-under, src)\r\nLOCAL_PRIVATE_PLATFORM_APIS := true\r\nLOCAL_CERTIFICATE := platform\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\n\r\nLOCAL_JAVA_LIBRARIES := android.test.runner\r\nLOCAL_STATIC_JAVA_LIBRARIES := \\\r\n androidx.test.rules \\\r\n frameworks-base-testutils \\\r\n mockito-target-minus-junit4\r\n\r\ninclude $(BUILD_PACKAGE)" }, { "alpha_fraction": 0.6849702000617981, "alphanum_fraction": 0.7091308236122131, "avg_line_length": 31.5473690032959, "blob_id": "ec2fcb0a0161448966399c41ed1426c089bf0e3a", "content_id": "ac148ee48ffeae07bad4b304f16e42a87d0a743f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3187, "license_type": "permissive", "max_line_length": 75, "num_lines": 95, "path": "/libs/androidfw/tests/StringPiece_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"androidfw/StringPiece.h\"\r\n\r\n#include <algorithm>\r\n#include <string>\r\n#include <vector>\r\n\r\n#include \"TestHelpers.h\"\r\n\r\nnamespace android {\r\n\r\nTEST(StringPieceTest, CompareNonNullTerminatedPiece) {\r\n StringPiece a(\"hello world\", 5);\r\n StringPiece b(\"hello moon\", 5);\r\n EXPECT_EQ(a, b);\r\n\r\n StringPiece16 a16(u\"hello world\", 5);\r\n StringPiece16 b16(u\"hello moon\", 5);\r\n EXPECT_EQ(a16, b16);\r\n}\r\n\r\nTEST(StringPieceTest, PiecesHaveCorrectSortOrder) {\r\n std::string testing(\"testing\");\r\n std::string banana(\"banana\");\r\n std::string car(\"car\");\r\n\r\n EXPECT_TRUE(StringPiece(testing) > banana);\r\n EXPECT_TRUE(StringPiece(testing) > car);\r\n EXPECT_TRUE(StringPiece(banana) < testing);\r\n EXPECT_TRUE(StringPiece(banana) < car);\r\n EXPECT_TRUE(StringPiece(car) < testing);\r\n EXPECT_TRUE(StringPiece(car) > banana);\r\n}\r\n\r\nTEST(StringPieceTest, PiecesHaveCorrectSortOrderUtf8) {\r\n std::string testing(\"testing\");\r\n std::string banana(\"banana\");\r\n std::string car(\"car\");\r\n\r\n EXPECT_TRUE(StringPiece(testing) > banana);\r\n EXPECT_TRUE(StringPiece(testing) > car);\r\n EXPECT_TRUE(StringPiece(banana) < testing);\r\n EXPECT_TRUE(StringPiece(banana) < car);\r\n EXPECT_TRUE(StringPiece(car) < testing);\r\n EXPECT_TRUE(StringPiece(car) > banana);\r\n}\r\n\r\nTEST(StringPieceTest, ContainsOtherStringPiece) {\r\n StringPiece text(\"I am a leaf on the wind.\");\r\n StringPiece start_needle(\"I am\");\r\n StringPiece end_needle(\"wind.\");\r\n StringPiece middle_needle(\"leaf\");\r\n StringPiece empty_needle(\"\");\r\n StringPiece missing_needle(\"soar\");\r\n StringPiece long_needle(\"This string is longer than the text.\");\r\n\r\n EXPECT_TRUE(text.contains(start_needle));\r\n EXPECT_TRUE(text.contains(end_needle));\r\n EXPECT_TRUE(text.contains(middle_needle));\r\n EXPECT_TRUE(text.contains(empty_needle));\r\n EXPECT_FALSE(text.contains(missing_needle));\r\n EXPECT_FALSE(text.contains(long_needle));\r\n\r\n StringPiece16 text16(u\"I am a leaf on the wind.\");\r\n StringPiece16 start_needle16(u\"I am\");\r\n StringPiece16 end_needle16(u\"wind.\");\r\n StringPiece16 middle_needle16(u\"leaf\");\r\n StringPiece16 empty_needle16(u\"\");\r\n StringPiece16 missing_needle16(u\"soar\");\r\n StringPiece16 long_needle16(u\"This string is longer than the text.\");\r\n\r\n EXPECT_TRUE(text16.contains(start_needle16));\r\n EXPECT_TRUE(text16.contains(end_needle16));\r\n EXPECT_TRUE(text16.contains(middle_needle16));\r\n EXPECT_TRUE(text16.contains(empty_needle16));\r\n EXPECT_FALSE(text16.contains(missing_needle16));\r\n EXPECT_FALSE(text16.contains(long_needle16));\r\n}\r\n\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.6654605865478516, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 35.26404571533203, "blob_id": "75d4ff45dfc705efa30cb44a54365ac3a73af6a2", "content_id": "fee4e0c416bc044485048b6ce0e39a9021fb49f7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6633, "license_type": "permissive", "max_line_length": 100, "num_lines": 178, "path": "/services/core/java/com/android/server/locksettings/recoverablekeystore/storage/RecoverySnapshotStorage.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.locksettings.recoverablekeystore.storage;\r\n\r\nimport android.annotation.Nullable;\r\nimport android.os.Environment;\r\nimport android.security.keystore.recovery.KeyChainSnapshot;\r\nimport android.util.Log;\r\nimport android.util.SparseArray;\r\n\r\nimport com.android.internal.annotations.GuardedBy;\r\nimport com.android.internal.annotations.VisibleForTesting;\r\nimport com.android.server.locksettings.recoverablekeystore.serialization\r\n .KeyChainSnapshotDeserializer;\r\nimport com.android.server.locksettings.recoverablekeystore.serialization\r\n .KeyChainSnapshotParserException;\r\nimport com.android.server.locksettings.recoverablekeystore.serialization.KeyChainSnapshotSerializer;\r\n\r\nimport java.io.File;\r\nimport java.io.FileInputStream;\r\nimport java.io.FileOutputStream;\r\nimport java.io.IOException;\r\nimport java.security.cert.CertificateEncodingException;\r\nimport java.util.Locale;\r\n\r\n/**\r\n * Storage for recovery snapshots. Stores snapshots in memory, backed by disk storage.\r\n *\r\n * <p>Recovery snapshots are generated after a successful screen unlock. They are only generated if\r\n * the recoverable keystore has been mutated since the previous snapshot. This class stores only the\r\n * latest snapshot for each recovery agent.\r\n *\r\n * <p>This class is thread-safe. It is used both on the service thread and the\r\n * {@link com.android.server.locksettings.recoverablekeystore.KeySyncTask} thread.\r\n */\r\npublic class RecoverySnapshotStorage {\r\n\r\n private static final String TAG = \"RecoverySnapshotStorage\";\r\n\r\n private static final String ROOT_PATH = \"system\";\r\n private static final String STORAGE_PATH = \"recoverablekeystore/snapshots/\";\r\n\r\n @GuardedBy(\"this\")\r\n private final SparseArray<KeyChainSnapshot> mSnapshotByUid = new SparseArray<>();\r\n\r\n private final File rootDirectory;\r\n\r\n /**\r\n * A new instance, storing snapshots in /data/system/recoverablekeystore/snapshots.\r\n *\r\n * <p>NOTE: calling this multiple times DOES NOT return the same instance, so will NOT be backed\r\n * by the same in-memory store.\r\n */\r\n public static RecoverySnapshotStorage newInstance() {\r\n return new RecoverySnapshotStorage(\r\n new File(Environment.getDataDirectory(), ROOT_PATH));\r\n }\r\n\r\n @VisibleForTesting\r\n public RecoverySnapshotStorage(File rootDirectory) {\r\n this.rootDirectory = rootDirectory;\r\n }\r\n\r\n /**\r\n * Sets the latest {@code snapshot} for the recovery agent {@code uid}.\r\n */\r\n public synchronized void put(int uid, KeyChainSnapshot snapshot) {\r\n mSnapshotByUid.put(uid, snapshot);\r\n\r\n try {\r\n writeToDisk(uid, snapshot);\r\n } catch (IOException | CertificateEncodingException e) {\r\n Log.e(TAG,\r\n String.format(Locale.US, \"Error persisting snapshot for %d to disk\", uid),\r\n e);\r\n }\r\n }\r\n\r\n /**\r\n * Returns the latest snapshot for the recovery agent {@code uid}, or null if none exists.\r\n */\r\n @Nullable\r\n public synchronized KeyChainSnapshot get(int uid) {\r\n KeyChainSnapshot snapshot = mSnapshotByUid.get(uid);\r\n if (snapshot != null) {\r\n return snapshot;\r\n }\r\n\r\n try {\r\n return readFromDisk(uid);\r\n } catch (IOException | KeyChainSnapshotParserException e) {\r\n Log.e(TAG, String.format(Locale.US, \"Error reading snapshot for %d from disk\", uid), e);\r\n return null;\r\n }\r\n }\r\n\r\n /**\r\n * Removes any (if any) snapshot associated with recovery agent {@code uid}.\r\n */\r\n public synchronized void remove(int uid) {\r\n mSnapshotByUid.remove(uid);\r\n getSnapshotFile(uid).delete();\r\n }\r\n\r\n /**\r\n * Writes the snapshot for recovery agent {@code uid} to disk.\r\n *\r\n * @throws IOException if an IO error occurs writing to disk.\r\n */\r\n private void writeToDisk(int uid, KeyChainSnapshot snapshot)\r\n throws IOException, CertificateEncodingException {\r\n File snapshotFile = getSnapshotFile(uid);\r\n\r\n try (\r\n FileOutputStream fileOutputStream = new FileOutputStream(snapshotFile)\r\n ) {\r\n KeyChainSnapshotSerializer.serialize(snapshot, fileOutputStream);\r\n } catch (IOException | CertificateEncodingException e) {\r\n // If we fail to write the latest snapshot, we should delete any older snapshot that\r\n // happens to be around. Otherwise snapshot syncs might end up going 'back in time'.\r\n snapshotFile.delete();\r\n throw e;\r\n }\r\n }\r\n\r\n /**\r\n * Reads the last snapshot for recovery agent {@code uid} from disk.\r\n *\r\n * @return The snapshot, or null if none existed.\r\n * @throws IOException if an IO error occurs reading from disk.\r\n */\r\n @Nullable\r\n private KeyChainSnapshot readFromDisk(int uid)\r\n throws IOException, KeyChainSnapshotParserException {\r\n File snapshotFile = getSnapshotFile(uid);\r\n\r\n try (\r\n FileInputStream fileInputStream = new FileInputStream(snapshotFile)\r\n ) {\r\n return KeyChainSnapshotDeserializer.deserialize(fileInputStream);\r\n } catch (IOException | KeyChainSnapshotParserException e) {\r\n // If we fail to read the latest snapshot, we should delete it in case it is in some way\r\n // corrupted. We can regenerate snapshots anyway.\r\n snapshotFile.delete();\r\n throw e;\r\n }\r\n }\r\n\r\n private File getSnapshotFile(int uid) {\r\n File folder = getStorageFolder();\r\n String fileName = getSnapshotFileName(uid);\r\n return new File(folder, fileName);\r\n }\r\n\r\n private String getSnapshotFileName(int uid) {\r\n return String.format(Locale.US, \"%d.xml\", uid);\r\n }\r\n\r\n private File getStorageFolder() {\r\n File folder = new File(rootDirectory, STORAGE_PATH);\r\n folder.mkdirs();\r\n return folder;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6200269460678101, "alphanum_fraction": 0.6400876641273499, "avg_line_length": 26.382774353027344, "blob_id": "3b1093f8d564c283b63675b7cec0f156c55a2580", "content_id": "3164302f0745f2df02a33b9045f9214833727515", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5932, "license_type": "permissive", "max_line_length": 80, "num_lines": 209, "path": "/tools/aapt2/compile/Image.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef AAPT_COMPILE_IMAGE_H\r\n#define AAPT_COMPILE_IMAGE_H\r\n\r\n#include <cstdint>\r\n#include <memory>\r\n#include <string>\r\n#include <vector>\r\n\r\n#include \"android-base/macros.h\"\r\n\r\nnamespace aapt {\r\n\r\n/**\r\n * An in-memory image, loaded from disk, with pixels in RGBA_8888 format.\r\n */\r\nclass Image {\r\n public:\r\n explicit Image() = default;\r\n\r\n /**\r\n * A `height` sized array of pointers, where each element points to a\r\n * `width` sized row of RGBA_8888 pixels.\r\n */\r\n std::unique_ptr<uint8_t* []> rows;\r\n\r\n /**\r\n * The width of the image in RGBA_8888 pixels. This is int32_t because of\r\n * 9-patch data\r\n * format limitations.\r\n */\r\n int32_t width = 0;\r\n\r\n /**\r\n * The height of the image in RGBA_8888 pixels. This is int32_t because of\r\n * 9-patch data\r\n * format limitations.\r\n */\r\n int32_t height = 0;\r\n\r\n /**\r\n * Buffer to the raw image data stored sequentially.\r\n * Use `rows` to access the data on a row-by-row basis.\r\n */\r\n std::unique_ptr<uint8_t[]> data;\r\n\r\n private:\r\n DISALLOW_COPY_AND_ASSIGN(Image);\r\n};\r\n\r\n/**\r\n * A range of pixel values, starting at 'start' and ending before 'end'\r\n * exclusive. Or rather [a, b).\r\n */\r\nstruct Range {\r\n int32_t start = 0;\r\n int32_t end = 0;\r\n\r\n explicit Range() = default;\r\n inline explicit Range(int32_t s, int32_t e) : start(s), end(e) {}\r\n};\r\n\r\ninline bool operator==(const Range& left, const Range& right) {\r\n return left.start == right.start && left.end == right.end;\r\n}\r\n\r\n/**\r\n * Inset lengths from all edges of a rectangle. `left` and `top` are measured\r\n * from the left and top\r\n * edges, while `right` and `bottom` are measured from the right and bottom\r\n * edges, respectively.\r\n */\r\nstruct Bounds {\r\n int32_t left = 0;\r\n int32_t top = 0;\r\n int32_t right = 0;\r\n int32_t bottom = 0;\r\n\r\n explicit Bounds() = default;\r\n inline explicit Bounds(int32_t l, int32_t t, int32_t r, int32_t b)\r\n : left(l), top(t), right(r), bottom(b) {}\r\n\r\n bool nonZero() const;\r\n};\r\n\r\ninline bool Bounds::nonZero() const {\r\n return left != 0 || top != 0 || right != 0 || bottom != 0;\r\n}\r\n\r\ninline bool operator==(const Bounds& left, const Bounds& right) {\r\n return left.left == right.left && left.top == right.top &&\r\n left.right == right.right && left.bottom == right.bottom;\r\n}\r\n\r\n/**\r\n * Contains 9-patch data from a source image. All measurements exclude the 1px\r\n * border of the\r\n * source 9-patch image.\r\n */\r\nclass NinePatch {\r\n public:\r\n static std::unique_ptr<NinePatch> Create(uint8_t** rows, const int32_t width,\r\n const int32_t height,\r\n std::string* err_out);\r\n\r\n /**\r\n * Packs the RGBA_8888 data pointed to by pixel into a uint32_t\r\n * with format 0xAARRGGBB (the way 9-patch expects it).\r\n */\r\n static uint32_t PackRGBA(const uint8_t* pixel);\r\n\r\n /**\r\n * 9-patch content padding/insets. All positions are relative to the 9-patch\r\n * NOT including the 1px thick source border.\r\n */\r\n Bounds padding;\r\n\r\n /**\r\n * Optical layout bounds/insets. This overrides the padding for\r\n * layout purposes. All positions are relative to the 9-patch\r\n * NOT including the 1px thick source border.\r\n * See\r\n * https://developer.android.com/about/versions/android-4.3.html#OpticalBounds\r\n */\r\n Bounds layout_bounds;\r\n\r\n /**\r\n * Outline of the image, calculated based on opacity.\r\n */\r\n Bounds outline;\r\n\r\n /**\r\n * The computed radius of the outline. If non-zero, the outline is a\r\n * rounded-rect.\r\n */\r\n float outline_radius = 0.0f;\r\n\r\n /**\r\n * The largest alpha value within the outline.\r\n */\r\n uint32_t outline_alpha = 0x000000ffu;\r\n\r\n /**\r\n * Horizontal regions of the image that are stretchable.\r\n * All positions are relative to the 9-patch\r\n * NOT including the 1px thick source border.\r\n */\r\n std::vector<Range> horizontal_stretch_regions;\r\n\r\n /**\r\n * Vertical regions of the image that are stretchable.\r\n * All positions are relative to the 9-patch\r\n * NOT including the 1px thick source border.\r\n */\r\n std::vector<Range> vertical_stretch_regions;\r\n\r\n /**\r\n * The colors within each region, fixed or stretchable.\r\n * For w*h regions, the color of region (x,y) is addressable\r\n * via index y*w + x.\r\n */\r\n std::vector<uint32_t> region_colors;\r\n\r\n /**\r\n * Returns serialized data containing the original basic 9-patch meta data.\r\n * Optical layout bounds and round rect outline data must be serialized\r\n * separately using SerializeOpticalLayoutBounds() and\r\n * SerializeRoundedRectOutline().\r\n */\r\n std::unique_ptr<uint8_t[]> SerializeBase(size_t* out_len) const;\r\n\r\n /**\r\n * Serializes the layout bounds.\r\n */\r\n std::unique_ptr<uint8_t[]> SerializeLayoutBounds(size_t* out_len) const;\r\n\r\n /**\r\n * Serializes the rounded-rect outline.\r\n */\r\n std::unique_ptr<uint8_t[]> SerializeRoundedRectOutline(size_t* out_len) const;\r\n\r\n private:\r\n explicit NinePatch() = default;\r\n\r\n DISALLOW_COPY_AND_ASSIGN(NinePatch);\r\n};\r\n\r\n::std::ostream& operator<<(::std::ostream& out, const Range& range);\r\n::std::ostream& operator<<(::std::ostream& out, const Bounds& bounds);\r\n::std::ostream& operator<<(::std::ostream& out, const NinePatch& nine_patch);\r\n\r\n} // namespace aapt\r\n\r\n#endif /* AAPT_COMPILE_IMAGE_H */\r\n" }, { "alpha_fraction": 0.6473429799079895, "alphanum_fraction": 0.659765362739563, "avg_line_length": 30.93181800842285, "blob_id": "a66f237f6870a0273afa9ed2eb5b62af4cc698ee", "content_id": "e4c257dac47251d69f7dd450a006e18696d2963b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1449, "license_type": "permissive", "max_line_length": 78, "num_lines": 44, "path": "/libs/hwui/renderthread/Frame.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"Frame.h\"\r\n#include <SkRect.h>\r\n\r\nnamespace android {\r\nnamespace uirenderer {\r\nnamespace renderthread {\r\n\r\nvoid Frame::map(const SkRect& in, int32_t* out) const {\r\n /* The rectangles are specified relative to the bottom-left of the surface\r\n * and the x and y components of each rectangle specify the bottom-left\r\n * position of that rectangle.\r\n *\r\n * HWUI does everything with 0,0 being top-left, so need to map\r\n * the rect\r\n */\r\n SkIRect idirty;\r\n in.roundOut(&idirty);\r\n int32_t y = mHeight - (idirty.y() + idirty.height());\r\n // layout: {x, y, width, height}\r\n out[0] = idirty.x();\r\n out[1] = y;\r\n out[2] = idirty.width();\r\n out[3] = idirty.height();\r\n}\r\n\r\n} /* namespace renderthread */\r\n} /* namespace uirenderer */\r\n} /* namespace android */\r\n" }, { "alpha_fraction": 0.6975425481796265, "alphanum_fraction": 0.6975425481796265, "avg_line_length": 38.69230651855469, "blob_id": "0cab05a0af65a07d4d75611de6e8f457fbcd695c", "content_id": "a632e031993d7facd6bbdfcd032b7cafaf9634b2", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": true, "language": "Markdown", "length_bytes": 529, "license_type": "permissive", "max_line_length": 76, "num_lines": 13, "path": "/tests/ApkVerityTest/testdata/README.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This test only runs on rooted / debuggable device.\r\n\r\nThe test tries to install subsets of base.{apk,dm}, split.{apk,dm} and their\r\ncorresponding .fsv_sig files (generated by build rule). If installed, the\r\ntests also tries to tamper with the file at absolute disk offset to verify\r\nif fs-verity is effective.\r\n\r\nHow to generate dex metadata (.dm)\r\n==================================\r\n\r\n adb shell profman --generate-test-profile=/data/local/tmp/primary.prof\r\n adb pull /data/local/tmp/primary.prof\r\n zip foo.dm primary.prof\r\n" }, { "alpha_fraction": 0.653094470500946, "alphanum_fraction": 0.6596091389656067, "avg_line_length": 28.700000762939453, "blob_id": "ca1d094e5e0b3b07dc7035a700229aef50cbc0a4", "content_id": "2bd8b0aabc586ae25d3bbc0acc133ca04b418faf", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1228, "license_type": "permissive", "max_line_length": 75, "num_lines": 40, "path": "/telecomm/java/android/telecom/Response.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.telecom;\r\n\r\n/**\r\n * @hide\r\n */\r\npublic interface Response<IN, OUT> {\r\n\r\n /**\r\n * Provide a set of results.\r\n *\r\n * @param request The original request.\r\n * @param result The results.\r\n */\r\n void onResult(IN request, OUT... result);\r\n\r\n /**\r\n * Indicates the inability to provide results.\r\n *\r\n * @param request The original request.\r\n * @param code An integer code indicating the reason for failure.\r\n * @param msg A message explaining the reason for failure.\r\n */\r\n void onError(IN request, int code, String msg);\r\n}\r\n" }, { "alpha_fraction": 0.5552288293838501, "alphanum_fraction": 0.5620290637016296, "avg_line_length": 39.219696044921875, "blob_id": "f2f7118c81b8a8976b539a4636084eeef4fd06c6", "content_id": "b6a17cdd8ebca17331c6011d6fa43e3e55b58ece", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5441, "license_type": "permissive", "max_line_length": 97, "num_lines": 132, "path": "/tools/aapt2/tools/consumers/duplicates.py", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "\"\"\"\r\nLooks for duplicate resource definitions and removes all but the last one.\r\n\"\"\"\r\n\r\nimport os.path\r\nimport xml.parsers.expat\r\n\r\nclass DuplicateRemover:\r\n def matches(self, file_path):\r\n dirname, basename = os.path.split(file_path)\r\n dirname = os.path.split(dirname)[1]\r\n return dirname.startswith(\"values\") and basename.endswith(\".xml\")\r\n\r\n def consume(self, xml_path, input):\r\n parser = xml.parsers.expat.ParserCreate(\"utf-8\")\r\n parser.returns_unicode = True\r\n tracker = ResourceDefinitionLocator(parser)\r\n parser.StartElementHandler = tracker.start_element\r\n parser.EndElementHandler = tracker.end_element\r\n parser.Parse(input)\r\n\r\n # Treat the input as UTF-8 or else column numbers will be wrong.\r\n input_lines = input.decode('utf-8').splitlines(True)\r\n\r\n # Extract the duplicate resource definitions, ignoring the last definition\r\n # which will take precedence and be left intact.\r\n duplicates = []\r\n for res_name, entries in tracker.resource_definitions.iteritems():\r\n if len(entries) > 1:\r\n duplicates += entries[:-1]\r\n\r\n # Sort the duplicates so that they are in order. That way we only do one pass.\r\n duplicates = sorted(duplicates, key=lambda x: x.start)\r\n\r\n last_line_no = 0\r\n last_col_no = 0\r\n output_lines = []\r\n current_line = \"\"\r\n for definition in duplicates:\r\n print \"{0}: removing duplicate resource '{1}'\".format(xml_path, definition.name)\r\n\r\n if last_line_no < definition.start[0]:\r\n # The next definition is on a new line, so write what we have\r\n # to the output.\r\n new_line = current_line + input_lines[last_line_no][last_col_no:]\r\n if not new_line.isspace():\r\n output_lines.append(new_line)\r\n current_line = \"\"\r\n last_col_no = 0\r\n last_line_no += 1\r\n\r\n # Copy all the lines up until this one.\r\n for line_to_copy in xrange(last_line_no, definition.start[0]):\r\n output_lines.append(input_lines[line_to_copy])\r\n\r\n # Add to the existing line we're building, by including the prefix of this line\r\n # and skipping the lines and characters until the end of this duplicate\r\n # definition.\r\n last_line_no = definition.start[0]\r\n current_line += input_lines[last_line_no][last_col_no:definition.start[1]]\r\n last_line_no = definition.end[0]\r\n last_col_no = definition.end[1]\r\n\r\n new_line = current_line + input_lines[last_line_no][last_col_no:]\r\n if not new_line.isspace():\r\n output_lines.append(new_line)\r\n current_line = \"\"\r\n last_line_no += 1\r\n last_col_no = 0\r\n\r\n for line_to_copy in xrange(last_line_no, len(input_lines)):\r\n output_lines.append(input_lines[line_to_copy])\r\n\r\n if len(duplicates) > 0:\r\n print \"deduped {0}\".format(xml_path)\r\n return \"\".join(output_lines).encode(\"utf-8\")\r\n return input\r\n\r\nclass Duplicate:\r\n \"\"\"A small struct to maintain the positions of a Duplicate resource definition.\"\"\"\r\n def __init__(self, name, product, depth, start, end):\r\n self.name = name\r\n self.product = product\r\n self.depth = depth\r\n self.start = start\r\n self.end = end\r\n\r\nclass ResourceDefinitionLocator:\r\n \"\"\"Callback class for xml.parsers.expat which records resource definitions and their\r\n locations.\r\n \"\"\"\r\n def __init__(self, parser):\r\n self.resource_definitions = {}\r\n self._parser = parser\r\n self._depth = 0\r\n self._current_resource = None\r\n\r\n def start_element(self, tag_name, attrs):\r\n self._depth += 1\r\n if self._depth == 2 and tag_name not in [\"public\", \"java-symbol\", \"eat-comment\", \"skip\"]:\r\n resource_name = None\r\n product = \"\"\r\n try:\r\n product = attrs[\"product\"]\r\n except KeyError:\r\n pass\r\n\r\n if tag_name == \"item\":\r\n resource_name = \"{0}/{1}\".format(attrs[\"type\"], attrs[\"name\"])\r\n else:\r\n resource_name = \"{0}/{1}\".format(tag_name, attrs[\"name\"])\r\n self._current_resource = Duplicate(\r\n resource_name,\r\n product,\r\n self._depth,\r\n (self._parser.CurrentLineNumber - 1, self._parser.CurrentColumnNumber),\r\n None)\r\n\r\n def end_element(self, tag_name):\r\n if self._current_resource and self._depth == self._current_resource.depth:\r\n # Record the end position of the element, which is the length of the name\r\n # plus the </> symbols (len(\"</>\") == 3).\r\n self._current_resource.end = (self._parser.CurrentLineNumber - 1,\r\n self._parser.CurrentColumnNumber + 3 + len(tag_name))\r\n key_name = \"{0}:{1}\".format(self._current_resource.name,\r\n self._current_resource.product)\r\n try:\r\n self.resource_definitions[key_name] += [self._current_resource]\r\n except KeyError:\r\n self.resource_definitions[key_name] = [self._current_resource]\r\n self._current_resource = None\r\n self._depth -= 1\r\n" }, { "alpha_fraction": 0.6767809987068176, "alphanum_fraction": 0.6928319931030273, "avg_line_length": 37.91228103637695, "blob_id": "354252c2eb777f1f05f9fe3a9782a3f0d691e750", "content_id": "2123d1ec62e07c632313e7dc97b5db98c5ebd652", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4548, "license_type": "permissive", "max_line_length": 99, "num_lines": 114, "path": "/tests/net/common/java/android/net/NattKeepalivePacketDataTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net\r\n\r\nimport android.net.InvalidPacketException.ERROR_INVALID_IP_ADDRESS\r\nimport android.net.InvalidPacketException.ERROR_INVALID_PORT\r\nimport android.net.NattSocketKeepalive.NATT_PORT\r\nimport android.os.Build\r\nimport androidx.test.filters.SmallTest\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.testutils.assertEqualBothWays\r\nimport com.android.testutils.assertFieldCountEquals\r\nimport com.android.testutils.assertParcelSane\r\nimport com.android.testutils.DevSdkIgnoreRule\r\nimport com.android.testutils.DevSdkIgnoreRule.IgnoreUpTo\r\nimport com.android.testutils.parcelingRoundTrip\r\nimport java.net.InetAddress\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Assert.assertNotEquals\r\nimport org.junit.Assert.fail\r\nimport org.junit.Rule\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\n@RunWith(AndroidJUnit4::class)\r\n@SmallTest\r\nclass NattKeepalivePacketDataTest {\r\n @Rule @JvmField\r\n val ignoreRule: DevSdkIgnoreRule = DevSdkIgnoreRule()\r\n\r\n /* Refer to the definition in {@code NattKeepalivePacketData} */\r\n private val IPV4_HEADER_LENGTH = 20\r\n private val UDP_HEADER_LENGTH = 8\r\n\r\n private val TEST_PORT = 4243\r\n private val TEST_PORT2 = 4244\r\n private val TEST_SRC_ADDRV4 = \"198.168.0.2\".address()\r\n private val TEST_DST_ADDRV4 = \"198.168.0.1\".address()\r\n private val TEST_ADDRV6 = \"2001:db8::1\".address()\r\n\r\n private fun String.address() = InetAddresses.parseNumericAddress(this)\r\n private fun nattKeepalivePacket(\r\n srcAddress: InetAddress? = TEST_SRC_ADDRV4,\r\n srcPort: Int = TEST_PORT,\r\n dstAddress: InetAddress? = TEST_DST_ADDRV4,\r\n dstPort: Int = NATT_PORT\r\n ) = NattKeepalivePacketData.nattKeepalivePacket(srcAddress, srcPort, dstAddress, dstPort)\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testConstructor() {\r\n try {\r\n nattKeepalivePacket(dstPort = TEST_PORT)\r\n fail(\"Dst port is not NATT port should cause exception\")\r\n } catch (e: InvalidPacketException) {\r\n assertEquals(e.error, ERROR_INVALID_PORT)\r\n }\r\n\r\n try {\r\n nattKeepalivePacket(srcAddress = TEST_ADDRV6)\r\n fail(\"A v6 srcAddress should cause exception\")\r\n } catch (e: InvalidPacketException) {\r\n assertEquals(e.error, ERROR_INVALID_IP_ADDRESS)\r\n }\r\n\r\n try {\r\n nattKeepalivePacket(dstAddress = TEST_ADDRV6)\r\n fail(\"A v6 dstAddress should cause exception\")\r\n } catch (e: InvalidPacketException) {\r\n assertEquals(e.error, ERROR_INVALID_IP_ADDRESS)\r\n }\r\n\r\n try {\r\n parcelingRoundTrip(\r\n NattKeepalivePacketData(TEST_SRC_ADDRV4, TEST_PORT, TEST_DST_ADDRV4, TEST_PORT,\r\n byteArrayOf(12, 31, 22, 44)))\r\n fail(\"Invalid data should cause exception\")\r\n } catch (e: IllegalArgumentException) { }\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testParcel() {\r\n assertParcelSane(nattKeepalivePacket(), 0)\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testEquals() {\r\n assertEqualBothWays(nattKeepalivePacket(), nattKeepalivePacket())\r\n assertNotEquals(nattKeepalivePacket(dstAddress = TEST_SRC_ADDRV4), nattKeepalivePacket())\r\n assertNotEquals(nattKeepalivePacket(srcAddress = TEST_DST_ADDRV4), nattKeepalivePacket())\r\n // Test src port only because dst port have to be NATT_PORT\r\n assertNotEquals(nattKeepalivePacket(srcPort = TEST_PORT2), nattKeepalivePacket())\r\n // Make sure the parceling test is updated if fields are added in the base class.\r\n assertFieldCountEquals(5, KeepalivePacketData::class.java)\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testHashCode() {\r\n assertEquals(nattKeepalivePacket().hashCode(), nattKeepalivePacket().hashCode())\r\n }\r\n}" }, { "alpha_fraction": 0.6866346597671509, "alphanum_fraction": 0.6928510665893555, "avg_line_length": 35.6489372253418, "blob_id": "32ebc08bc419efeec85fbc2d2c57ac9b611d4e8f", "content_id": "46a8a2af0d7e48cc497c42e83643370766599d24", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3539, "license_type": "permissive", "max_line_length": 100, "num_lines": 94, "path": "/cmds/statsd/src/state/StateTracker.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019, The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n#pragma once\r\n\r\n#include <utils/RefBase.h>\r\n#include \"HashableDimensionKey.h\"\r\n#include \"logd/LogEvent.h\"\r\n\r\n#include \"state/StateListener.h\"\r\n\r\n#include <unordered_map>\r\n\r\nnamespace android {\r\nnamespace os {\r\nnamespace statsd {\r\n\r\nclass StateTracker : public virtual RefBase {\r\npublic:\r\n StateTracker(const int32_t atomId);\r\n\r\n virtual ~StateTracker(){};\r\n\r\n // Updates state map and notifies all listeners if a state change occurs.\r\n // Checks if a state change has occurred by getting the state value from\r\n // the log event and comparing the old and new states.\r\n void onLogEvent(const LogEvent& event);\r\n\r\n // Adds new listeners to set of StateListeners. If a listener is already\r\n // registered, it is ignored.\r\n void registerListener(wp<StateListener> listener);\r\n\r\n void unregisterListener(wp<StateListener> listener);\r\n\r\n // The output is a FieldValue object that has mStateField as the field and\r\n // the original state value (found using the given query key) as the value.\r\n //\r\n // If the key isn't mapped to a state or the key size doesn't match the\r\n // number of primary fields, the output value is set to kStateUnknown.\r\n bool getStateValue(const HashableDimensionKey& queryKey, FieldValue* output) const;\r\n\r\n inline int getListenersCount() const {\r\n return mListeners.size();\r\n }\r\n\r\n const static int kStateUnknown = -1;\r\n\r\nprivate:\r\n struct StateValueInfo {\r\n int32_t state = kStateUnknown; // state value\r\n int count = 0; // nested count (only used for binary states)\r\n };\r\n\r\n Field mField;\r\n\r\n // Maps primary key to state value info\r\n std::unordered_map<HashableDimensionKey, StateValueInfo> mStateMap;\r\n\r\n // Set of all StateListeners (objects listening for state changes)\r\n std::set<wp<StateListener>> mListeners;\r\n\r\n // Reset all state values in map to the given state.\r\n void handleReset(const int64_t eventTimeNs, const FieldValue& newState);\r\n\r\n // Clears the state value mapped to the given primary key by setting it to kStateUnknown.\r\n void clearStateForPrimaryKey(const int64_t eventTimeNs, const HashableDimensionKey& primaryKey);\r\n\r\n // Update the StateMap based on the received state value.\r\n void updateStateForPrimaryKey(const int64_t eventTimeNs, const HashableDimensionKey& primaryKey,\r\n const FieldValue& newState, const bool nested,\r\n StateValueInfo* stateValueInfo);\r\n\r\n // Notify registered state listeners of state change.\r\n void notifyListeners(const int64_t eventTimeNs, const HashableDimensionKey& primaryKey,\r\n const FieldValue& oldState, const FieldValue& newState);\r\n};\r\n\r\nbool getStateFieldValueFromLogEvent(const LogEvent& event, FieldValue* output);\r\n\r\n} // namespace statsd\r\n} // namespace os\r\n} // namespace android\r\n" }, { "alpha_fraction": 0.6346351504325867, "alphanum_fraction": 0.6441418528556824, "avg_line_length": 34.37383270263672, "blob_id": "a8a193940d7f14241293c5cc5e6526bfe1ae2709", "content_id": "1ff6c046b7952394380bd76baf759ba597c4d35c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3892, "license_type": "permissive", "max_line_length": 93, "num_lines": 107, "path": "/wifi/java/android/net/wifi/p2p/nsd/WifiP2pDnsSdServiceRequest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net.wifi.p2p.nsd;\r\n\r\nimport android.net.wifi.p2p.WifiP2pManager;\r\n\r\n/**\r\n * A class for creating a Bonjour service discovery request for use with\r\n * {@link WifiP2pManager#addServiceRequest} and {@link WifiP2pManager#removeServiceRequest}\r\n *\r\n * {@see WifiP2pManager}\r\n * {@see WifiP2pServiceRequest}\r\n * {@see WifiP2pUpnpServiceRequest}\r\n */\r\npublic class WifiP2pDnsSdServiceRequest extends WifiP2pServiceRequest {\r\n\r\n /**\r\n * This constructor is only used in newInstance().\r\n *\r\n * @param query The part of service specific query.\r\n * @hide\r\n */\r\n private WifiP2pDnsSdServiceRequest(String query) {\r\n super(WifiP2pServiceInfo.SERVICE_TYPE_BONJOUR, query);\r\n }\r\n\r\n /**\r\n * This constructor is only used in newInstance().\r\n * @hide\r\n */\r\n private WifiP2pDnsSdServiceRequest() {\r\n super(WifiP2pServiceInfo.SERVICE_TYPE_BONJOUR, null);\r\n }\r\n\r\n private WifiP2pDnsSdServiceRequest(String dnsQuery, int dnsType, int version) {\r\n super(WifiP2pServiceInfo.SERVICE_TYPE_BONJOUR, WifiP2pDnsSdServiceInfo.createRequest(\r\n dnsQuery,\r\n dnsType,\r\n version));\r\n }\r\n\r\n /**\r\n * Create a service discovery request to search all Bonjour services.\r\n *\r\n * @return service request for Bonjour.\r\n */\r\n public static WifiP2pDnsSdServiceRequest newInstance() {\r\n return new WifiP2pDnsSdServiceRequest();\r\n }\r\n\r\n /**\r\n * Create a service discovery to search for Bonjour services with the specified\r\n * service type.\r\n *\r\n * @param serviceType service type. Cannot be null <br>\r\n * \"_afpovertcp._tcp.\"(Apple File Sharing over TCP)<br>\r\n * \"_ipp._tcp\" (IP Printing over TCP)<br>\r\n * \"_http._tcp\" (http service)\r\n * @return service request for DnsSd.\r\n */\r\n public static WifiP2pDnsSdServiceRequest newInstance(String serviceType) {\r\n if (serviceType == null) {\r\n throw new IllegalArgumentException(\"service type cannot be null\");\r\n }\r\n return new WifiP2pDnsSdServiceRequest(serviceType + \".local.\",\r\n WifiP2pDnsSdServiceInfo.DNS_TYPE_PTR,\r\n WifiP2pDnsSdServiceInfo.VERSION_1);\r\n }\r\n\r\n /**\r\n * Create a service discovery request to get the TXT data from the specified\r\n * Bonjour service.\r\n *\r\n * @param instanceName instance name. Cannot be null. <br>\r\n * \"MyPrinter\"\r\n * @param serviceType service type. Cannot be null. <br>\r\n * e.g) <br>\r\n * \"_afpovertcp._tcp\"(Apple File Sharing over TCP)<br>\r\n * \"_ipp._tcp\" (IP Printing over TCP)<br>\r\n * @return service request for Bonjour.\r\n */\r\n public static WifiP2pDnsSdServiceRequest newInstance(String instanceName,\r\n String serviceType) {\r\n if (instanceName == null || serviceType == null) {\r\n throw new IllegalArgumentException(\r\n \"instance name or service type cannot be null\");\r\n }\r\n String fullDomainName = instanceName + \".\" + serviceType + \".local.\";\r\n return new WifiP2pDnsSdServiceRequest(fullDomainName,\r\n WifiP2pDnsSdServiceInfo.DNS_TYPE_TXT,\r\n WifiP2pDnsSdServiceInfo.VERSION_1);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5829834938049316, "alphanum_fraction": 0.5930590629577637, "avg_line_length": 27.286884307861328, "blob_id": "eb9a3961392417c3f3427fb0da841f5becbeea23", "content_id": "69d42fc3ee8f15ba94050bd66529512afb41cda0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3573, "license_type": "permissive", "max_line_length": 75, "num_lines": 122, "path": "/core/tests/benchmarks/src/android/os/ParcelArrayBenchmark.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.os;\r\n\r\nimport com.google.caliper.AfterExperiment;\r\nimport com.google.caliper.BeforeExperiment;\r\nimport com.google.caliper.Param;\r\n\r\npublic class ParcelArrayBenchmark {\r\n\r\n @Param({ \"1\", \"10\", \"100\", \"1000\" })\r\n private int mSize;\r\n\r\n private Parcel mWriteParcel;\r\n\r\n private byte[] mByteArray;\r\n private int[] mIntArray;\r\n private long[] mLongArray;\r\n\r\n private Parcel mByteParcel;\r\n private Parcel mIntParcel;\r\n private Parcel mLongParcel;\r\n\r\n @BeforeExperiment\r\n protected void setUp() {\r\n mWriteParcel = Parcel.obtain();\r\n\r\n mByteArray = new byte[mSize];\r\n mIntArray = new int[mSize];\r\n mLongArray = new long[mSize];\r\n\r\n mByteParcel = Parcel.obtain();\r\n mByteParcel.writeByteArray(mByteArray);\r\n mIntParcel = Parcel.obtain();\r\n mIntParcel.writeIntArray(mIntArray);\r\n mLongParcel = Parcel.obtain();\r\n mLongParcel.writeLongArray(mLongArray);\r\n }\r\n\r\n @AfterExperiment\r\n protected void tearDown() {\r\n mWriteParcel.recycle();\r\n mWriteParcel = null;\r\n }\r\n\r\n public void timeWriteByteArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mWriteParcel.setDataPosition(0);\r\n mWriteParcel.writeByteArray(mByteArray);\r\n }\r\n }\r\n\r\n public void timeCreateByteArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mByteParcel.setDataPosition(0);\r\n mByteParcel.createByteArray();\r\n }\r\n }\r\n\r\n public void timeReadByteArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mByteParcel.setDataPosition(0);\r\n mByteParcel.readByteArray(mByteArray);\r\n }\r\n }\r\n\r\n public void timeWriteIntArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mWriteParcel.setDataPosition(0);\r\n mWriteParcel.writeIntArray(mIntArray);\r\n }\r\n }\r\n\r\n public void timeCreateIntArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mIntParcel.setDataPosition(0);\r\n mIntParcel.createIntArray();\r\n }\r\n }\r\n\r\n public void timeReadIntArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mIntParcel.setDataPosition(0);\r\n mIntParcel.readIntArray(mIntArray);\r\n }\r\n }\r\n\r\n public void timeWriteLongArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mWriteParcel.setDataPosition(0);\r\n mWriteParcel.writeLongArray(mLongArray);\r\n }\r\n }\r\n\r\n public void timeCreateLongArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mLongParcel.setDataPosition(0);\r\n mLongParcel.createLongArray();\r\n }\r\n }\r\n\r\n public void timeReadLongArray(int reps) {\r\n for (int i = 0; i < reps; i++) {\r\n mLongParcel.setDataPosition(0);\r\n mLongParcel.readLongArray(mLongArray);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6852508783340454, "alphanum_fraction": 0.6893056035041809, "avg_line_length": 30.883333206176758, "blob_id": "828d67a81ac65ebcfac52d7e76b478bc6d2c0df3", "content_id": "76059284b3cfd7f94bfc2e904614dd13f6aa19db", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1973, "license_type": "permissive", "max_line_length": 91, "num_lines": 60, "path": "/packages/PrintSpooler/src/com/android/printspooler/util/BitmapSerializeUtils.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.printspooler.util;\r\n\r\nimport android.graphics.Bitmap;\r\nimport android.os.ParcelFileDescriptor;\r\n\r\n/**\r\n * Helper for serialization of bitmaps in the very specific\r\n * use case of having the same bitmap on both ends and just\r\n * marshaling the pixels from one side to the other.\r\n */\r\npublic final class BitmapSerializeUtils {\r\n\r\n static {\r\n System.loadLibrary(\"printspooler_jni\");\r\n }\r\n\r\n private BitmapSerializeUtils() {\r\n /* do nothing */\r\n }\r\n\r\n /**\r\n * Reads a bitmap pixels from a file descriptor.\r\n *\r\n * @param bitmap A bitmap whose pixels to populate.\r\n * @param source The source file descriptor.\r\n */\r\n public static void readBitmapPixels(Bitmap bitmap, ParcelFileDescriptor source) {\r\n nativeReadBitmapPixels(bitmap, source.getFd());\r\n }\r\n\r\n /**\r\n * Writes a bitmap pixels to a file descriptor.\r\n *\r\n * @param bitmap The bitmap.\r\n * @param destination The destination file descriptor.\r\n */\r\n public static void writeBitmapPixels(Bitmap bitmap, ParcelFileDescriptor destination) {\r\n nativeWriteBitmapPixels(bitmap, destination.getFd());\r\n }\r\n\r\n private static native void nativeReadBitmapPixels(Bitmap bitmap, int fd);\r\n\r\n private static native void nativeWriteBitmapPixels(Bitmap bitmap, int fd);\r\n}\r\n" }, { "alpha_fraction": 0.6525096297264099, "alphanum_fraction": 0.6660231947898865, "avg_line_length": 31.636363983154297, "blob_id": "31ce7270f09c60b225f937dd846c26e86d371b2a", "content_id": "785bcaf6e387dd409705730f8f4023c8bdba9cd0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2590, "license_type": "permissive", "max_line_length": 99, "num_lines": 77, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/MipMapActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.graphics.PorterDuff;\r\nimport android.graphics.PorterDuffXfermode;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.Gravity;\r\nimport android.view.View;\r\nimport android.view.animation.Animation;\r\nimport android.view.animation.ScaleAnimation;\r\nimport android.widget.FrameLayout;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class MipMapActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final BitmapsView view = new BitmapsView(this);\r\n setContentView(view);\r\n }\r\n\r\n static class BitmapsView extends View {\r\n private Paint mBitmapPaint;\r\n private final Bitmap mBitmap1;\r\n private final Bitmap mBitmap2;\r\n\r\n BitmapsView(Context c) {\r\n super(c);\r\n\r\n mBitmap1 = BitmapFactory.decodeResource(c.getResources(), R.drawable.very_large_photo);\r\n mBitmap2 = BitmapFactory.decodeResource(c.getResources(), R.drawable.very_large_photo);\r\n\r\n mBitmap1.setHasMipMap(true);\r\n\r\n mBitmapPaint = new Paint();\r\n mBitmapPaint.setFilterBitmap(true);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n canvas.save();\r\n canvas.scale(0.3f, 0.3f);\r\n canvas.drawBitmap(mBitmap1, 0, 0, mBitmapPaint);\r\n canvas.restore();\r\n\r\n canvas.save();\r\n canvas.translate(mBitmap1.getWidth() * 0.3f + 96.0f, 0.0f);\r\n canvas.scale(0.3f, 0.3f);\r\n canvas.drawBitmap(mBitmap2, 0, 0, mBitmapPaint);\r\n canvas.restore();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6296647787094116, "alphanum_fraction": 0.633776068687439, "avg_line_length": 33.52809143066406, "blob_id": "b8dd96250103cf56572fa31518b9ee49cbe79fda", "content_id": "aacbb906deaec4368e5383c91da52222c44c6703", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6324, "license_type": "permissive", "max_line_length": 98, "num_lines": 178, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/TrackDecoder.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage androidx.media.filterfw.decoder;\r\n\r\nimport android.annotation.TargetApi;\r\nimport android.media.MediaCodec;\r\nimport android.media.MediaCodec.BufferInfo;\r\nimport android.media.MediaExtractor;\r\nimport android.media.MediaFormat;\r\nimport android.util.Log;\r\n\r\nimport java.nio.ByteBuffer;\r\n\r\n@TargetApi(16)\r\nabstract class TrackDecoder {\r\n\r\n interface Listener {\r\n void onDecodedOutputAvailable(TrackDecoder decoder);\r\n\r\n void onEndOfStream(TrackDecoder decoder);\r\n }\r\n\r\n private static final String LOG_TAG = \"TrackDecoder\";\r\n\r\n private static final long TIMEOUT_US = 50; // Timeout for en-queueing and de-queueing buffers.\r\n\r\n private static final int NO_INPUT_BUFFER = -1;\r\n\r\n private final int mTrackIndex;\r\n private final MediaFormat mMediaFormat;\r\n private final Listener mListener;\r\n\r\n private MediaCodec mMediaCodec;\r\n private MediaFormat mOutputFormat;\r\n\r\n private ByteBuffer[] mCodecInputBuffers;\r\n private ByteBuffer[] mCodecOutputBuffers;\r\n\r\n private boolean mShouldEnqueueEndOfStream;\r\n\r\n /**\r\n * @return a configured {@link MediaCodec}.\r\n */\r\n protected abstract MediaCodec initMediaCodec(MediaFormat format);\r\n\r\n /**\r\n * Called when decoded output is available. The implementer is responsible for releasing the\r\n * assigned buffer.\r\n *\r\n * @return {@code true} if any further decoding should be attempted at the moment.\r\n */\r\n protected abstract boolean onDataAvailable(\r\n MediaCodec codec, ByteBuffer[] buffers, int bufferIndex, BufferInfo info);\r\n\r\n protected TrackDecoder(int trackIndex, MediaFormat mediaFormat, Listener listener) {\r\n mTrackIndex = trackIndex;\r\n\r\n if (mediaFormat == null) {\r\n throw new NullPointerException(\"mediaFormat cannot be null\");\r\n }\r\n mMediaFormat = mediaFormat;\r\n\r\n if (listener == null) {\r\n throw new NullPointerException(\"listener cannot be null\");\r\n }\r\n mListener = listener;\r\n }\r\n\r\n public void init() {\r\n mMediaCodec = initMediaCodec(mMediaFormat);\r\n mMediaCodec.start();\r\n mCodecInputBuffers = mMediaCodec.getInputBuffers();\r\n mCodecOutputBuffers = mMediaCodec.getOutputBuffers();\r\n }\r\n\r\n public void signalEndOfInput() {\r\n mShouldEnqueueEndOfStream = true;\r\n tryEnqueueEndOfStream();\r\n }\r\n\r\n public void release() {\r\n if (mMediaCodec != null) {\r\n mMediaCodec.stop();\r\n mMediaCodec.release();\r\n }\r\n }\r\n\r\n protected MediaCodec getMediaCodec() {\r\n return mMediaCodec;\r\n }\r\n\r\n protected void notifyListener() {\r\n mListener.onDecodedOutputAvailable(this);\r\n }\r\n\r\n public boolean feedInput(MediaExtractor mediaExtractor) {\r\n long presentationTimeUs = 0;\r\n\r\n int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_US);\r\n if (inputBufferIndex != NO_INPUT_BUFFER) {\r\n ByteBuffer destinationBuffer = mCodecInputBuffers[inputBufferIndex];\r\n int sampleSize = mediaExtractor.readSampleData(destinationBuffer, 0);\r\n // We don't expect to get a sample without any data, so this should never happen.\r\n if (sampleSize < 0) {\r\n Log.w(LOG_TAG, \"Media extractor had sample but no data.\");\r\n\r\n // Signal the end of the track immediately anyway, using the buffer.\r\n mMediaCodec.queueInputBuffer(\r\n inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);\r\n return false;\r\n }\r\n\r\n presentationTimeUs = mediaExtractor.getSampleTime();\r\n mMediaCodec.queueInputBuffer(\r\n inputBufferIndex,\r\n 0,\r\n sampleSize,\r\n presentationTimeUs,\r\n 0);\r\n\r\n return mediaExtractor.advance()\r\n && mediaExtractor.getSampleTrackIndex() == mTrackIndex;\r\n }\r\n return false;\r\n }\r\n\r\n private void tryEnqueueEndOfStream() {\r\n int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_US);\r\n // We will always eventually have an input buffer, because we keep trying until the last\r\n // decoded frame is output.\r\n // The EoS does not need to be signaled if the application stops decoding.\r\n if (inputBufferIndex != NO_INPUT_BUFFER) {\r\n mMediaCodec.queueInputBuffer(\r\n inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);\r\n mShouldEnqueueEndOfStream = false;\r\n }\r\n }\r\n\r\n public boolean drainOutputBuffer() {\r\n BufferInfo outputInfo = new BufferInfo();\r\n int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(outputInfo, TIMEOUT_US);\r\n\r\n if ((outputInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {\r\n mListener.onEndOfStream(this);\r\n return false;\r\n }\r\n if (mShouldEnqueueEndOfStream) {\r\n tryEnqueueEndOfStream();\r\n }\r\n if (outputBufferIndex >= 0) {\r\n return onDataAvailable(\r\n mMediaCodec, mCodecOutputBuffers, outputBufferIndex, outputInfo);\r\n } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {\r\n mCodecOutputBuffers = mMediaCodec.getOutputBuffers();\r\n return true;\r\n } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {\r\n mOutputFormat = mMediaCodec.getOutputFormat();\r\n Log.d(LOG_TAG, \"Output format has changed to \" + mOutputFormat);\r\n return true;\r\n }\r\n return false;\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6563335061073303, "alphanum_fraction": 0.6606093049049377, "avg_line_length": 29.183332443237305, "blob_id": "018ab1f6ae6f5c6c3f4fdf0d5a074a3b4dd9cc85", "content_id": "91ac4feb17284d1a1a44a4f6e4b2baafd48bfc92", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1871, "license_type": "permissive", "max_line_length": 78, "num_lines": 60, "path": "/packages/CarrierDefaultApp/tests/unit/src/com/android/carrierdefaultapp/TestContext.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 Google Inc.\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage com.android.carrierdefaultapp;\r\n\r\nimport android.content.Context;\r\nimport android.content.ContextWrapper;\r\nimport android.util.Log;\r\n\r\nimport java.util.HashMap;\r\n\r\npublic class TestContext extends ContextWrapper {\r\n\r\n private final String TAG = this.getClass().getSimpleName();\r\n\r\n private HashMap<String, Object> mInjectedSystemServices = new HashMap<>();\r\n\r\n public TestContext(Context base) {\r\n super(base);\r\n }\r\n\r\n public <S> void injectSystemService(Class<S> cls, S service) {\r\n final String name = getSystemServiceName(cls);\r\n mInjectedSystemServices.put(name, service);\r\n }\r\n\r\n @Override\r\n public Context getApplicationContext() {\r\n return this;\r\n }\r\n\r\n @Override\r\n public Object getSystemService(String name) {\r\n if (mInjectedSystemServices.containsKey(name)) {\r\n Log.d(TAG, \"return mocked system service for \" + name);\r\n return mInjectedSystemServices.get(name);\r\n }\r\n Log.d(TAG, \"return real system service for \" + name);\r\n return super.getSystemService(name);\r\n }\r\n\r\n public static void waitForMs(long ms) {\r\n try {\r\n Thread.sleep(ms);\r\n } catch (InterruptedException e) {\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7735849022865295, "alphanum_fraction": 0.7735849022865295, "avg_line_length": 40.400001525878906, "blob_id": "f216513e0bf8e3fee189ec9033679daa840fc526", "content_id": "293472385225d5cd88595e956879d352a9a3cd2b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 212, "license_type": "permissive", "max_line_length": 70, "num_lines": 5, "path": "/startop/apps/ColorChanging/README.md", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "This directory contains a simple Android app that is meant to help in \r\nsyncing a trace along with a video in Perfetto.\r\n\r\nThis app changes the colors of the screen that has traces to go along\r\nwith the colors.\r\n" }, { "alpha_fraction": 0.670019268989563, "alphanum_fraction": 0.6751551628112793, "avg_line_length": 39.91031265258789, "blob_id": "ffa28a09f121d9e410472f96303e5b2c221eb599", "content_id": "8805464fb70e9a552afd30d2c320553807c0bc03", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 9346, "license_type": "permissive", "max_line_length": 99, "num_lines": 223, "path": "/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraUtilsTypeReferenceTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.mediaframeworktest.unit;\r\n\r\nimport static android.hardware.camera2.utils.TypeReference.*;\r\n\r\nimport android.hardware.camera2.utils.TypeReference;\r\n\r\nimport android.test.suitebuilder.annotation.SmallTest;\r\nimport android.util.Log;\r\n\r\nimport java.lang.reflect.Type;\r\nimport java.util.List;\r\n\r\npublic class CameraUtilsTypeReferenceTest extends junit.framework.TestCase {\r\n private static final String TAG = CameraUtilsTypeReferenceTest.class.getSimpleName();\r\n private static final boolean VERBOSE = false;\r\n\r\n private class RegularClass {}\r\n private class SubClass extends RegularClass {}\r\n\r\n private class GenericClass<T> {}\r\n private class SubGenericClass<T> extends GenericClass<T> {}\r\n\r\n private class SpecificClass extends GenericClass<Integer> {}\r\n\r\n private interface RegularInterface {}\r\n private interface GenericInterface<T> {}\r\n private interface GenericInterface2<T> {}\r\n\r\n private class ImplementsRegularInterface implements RegularInterface {}\r\n private class ImplementsGenericInterface<T> implements GenericInterface<T> {}\r\n private class Implements2GenericInterface<T>\r\n implements GenericInterface<Integer>, GenericInterface2<T> {}\r\n\r\n private class GenericOuterClass<T> {\r\n class GenericInnerClass {\r\n @SuppressWarnings(\"unused\")\r\n T field;\r\n }\r\n }\r\n\r\n private static void assertContainsTypeVariable(Type type) {\r\n assertTrue(type.toString() + \" was expected to have a type variable, but it didn't\",\r\n containsTypeVariable(type));\r\n }\r\n\r\n private static void assertLacksTypeVariable(Type type) {\r\n assertFalse(type.toString() + \" was expected to *not* have a type variable, but it did\",\r\n containsTypeVariable(type));\r\n }\r\n\r\n /*\r\n * Only test classes and interfaces. Other types are not tested (e.g. fields, methods, etc).\r\n */\r\n\r\n @SmallTest\r\n public void testLacksTypeVariables() {\r\n assertLacksTypeVariable(RegularClass.class);\r\n assertLacksTypeVariable(SubClass.class);\r\n assertLacksTypeVariable(SpecificClass.class);\r\n\r\n assertLacksTypeVariable(RegularInterface.class);\r\n assertLacksTypeVariable(ImplementsRegularInterface.class);\r\n }\r\n\r\n @SmallTest\r\n public void testContainsTypeVariables() {\r\n assertContainsTypeVariable(GenericClass.class);\r\n assertContainsTypeVariable(SubGenericClass.class);\r\n\r\n assertContainsTypeVariable(GenericInterface.class);\r\n assertContainsTypeVariable(ImplementsGenericInterface.class);\r\n assertContainsTypeVariable(Implements2GenericInterface.class);\r\n\r\n assertContainsTypeVariable(GenericOuterClass.class);\r\n assertContainsTypeVariable(GenericOuterClass.GenericInnerClass.class);\r\n }\r\n\r\n /**\r\n * This should always throw an IllegalArgumentException since the\r\n * type reference to {@code T} will contain a type variable (also {@code T}).\r\n *\r\n * @throws IllegalArgumentException unconditionally\r\n */\r\n private static <T> TypeReference<T> createTypeRefWithTypeVar() {\r\n return new TypeReference<T>() {{ }};\r\n }\r\n\r\n @SmallTest\r\n public void testTypeReferences() {\r\n TypeReference<Integer> typeRefInt = new TypeReference<Integer>() {{ }};\r\n TypeReference<Integer> typeRefInt2 = new TypeReference<Integer>() {{ }};\r\n\r\n assertEquals(typeRefInt, typeRefInt2);\r\n assertEquals(\"The type ref's captured type should be the Integer class\",\r\n Integer.class, typeRefInt.getType());\r\n\r\n TypeReference<Float> typeRefFloat = new TypeReference<Float>() {{ }};\r\n assertFalse(\"Integer/Float type references must not be equal\",\r\n typeRefInt.equals(typeRefFloat));\r\n assertEquals(\"The type ref's captured type should be the Float class\",\r\n Float.class, typeRefFloat.getType());\r\n\r\n try {\r\n TypeReference<Integer> typeRefTypeVar = createTypeRefWithTypeVar();\r\n fail(\"Expected a type reference with type variables to fail\");\r\n // Unreachable. Make the warning about an unused variable go away.\r\n assertFalse(typeRefTypeVar.equals(typeRefInt));\r\n } catch (IllegalArgumentException e) {\r\n // OK. Expected behavior\r\n }\r\n }\r\n\r\n // Compare the raw type against rawClass\r\n private static <T> void assertRawTypeEquals(TypeReference<T> typeRef, Class<?> rawClass) {\r\n assertEquals(\"Expected the raw type from \" + typeRef + \" to match the class \" + rawClass,\r\n rawClass, typeRef.getRawType());\r\n }\r\n\r\n // Compare the normal type against the klass\r\n private static <T> void assertTypeReferenceEquals(TypeReference<T> typeRef, Class<?> klass) {\r\n assertEquals(\"Expected the type from \" + typeRef + \" to match the class \" + klass,\r\n klass, typeRef.getType());\r\n }\r\n\r\n @SmallTest\r\n public void testRawTypes() {\r\n TypeReference<Integer> intToken = new TypeReference<Integer>() {{ }};\r\n assertRawTypeEquals(intToken, Integer.class);\r\n\r\n TypeReference<List<Integer>> listToken = new TypeReference<List<Integer>>() {{ }};\r\n assertRawTypeEquals(listToken, List.class);\r\n\r\n TypeReference<List<List<Integer>>> listListToken =\r\n new TypeReference<List<List<Integer>>>() {{ }};\r\n assertRawTypeEquals(listListToken, List.class);\r\n\r\n TypeReference<int[]> intArrayToken = new TypeReference<int[]>() {{ }};\r\n assertRawTypeEquals(intArrayToken, int[].class);\r\n\r\n TypeReference<Integer[]> integerArrayToken = new TypeReference<Integer[]>() {{ }};\r\n assertRawTypeEquals(integerArrayToken, Integer[].class);\r\n\r\n TypeReference<List<Integer>[]> listArrayToken = new TypeReference<List<Integer>[]>() {{ }};\r\n assertRawTypeEquals(listArrayToken, List[].class);\r\n }\r\n\r\n private class IntTokenOne extends TypeReference<Integer> {}\r\n private class IntTokenTwo extends TypeReference<Integer> {}\r\n\r\n private class IntArrayToken1 extends TypeReference<Integer[]> {}\r\n private class IntArrayToken2 extends TypeReference<Integer[]> {}\r\n\r\n private class IntListToken1 extends TypeReference<List<Integer>> {}\r\n private class IntListToken2 extends TypeReference<List<Integer>> {}\r\n\r\n private class IntListArrayToken1 extends TypeReference<List<Integer>[]> {}\r\n private class IntListArrayToken2 extends TypeReference<List<Integer>[]> {}\r\n\r\n\r\n // FIXME: Equality will fail: b/14590652\r\n @SmallTest\r\n public void testEquals() {\r\n // Not an array. component type should be null.\r\n TypeReference<Integer> intToken = new TypeReference<Integer>() {{ }};\r\n assertEquals(intToken, intToken);\r\n assertEquals(intToken, new TypeReference<Integer>() {{ }});\r\n\r\n assertEquals(intToken, new IntTokenOne());\r\n assertEquals(intToken, new IntTokenTwo());\r\n assertEquals(new IntTokenOne(), new IntTokenTwo());\r\n\r\n assertEquals(new IntArrayToken1(), new IntArrayToken2());\r\n assertEquals(new IntListToken1(), new IntListToken2());\r\n assertEquals(new IntListArrayToken1(), new IntListArrayToken2());\r\n }\r\n\r\n @SmallTest\r\n public void testComponentType() {\r\n // Not an array. component type should be null.\r\n TypeReference<Integer> intToken = new TypeReference<Integer>() {{ }};\r\n assertNull(intToken.getComponentType());\r\n\r\n TypeReference<List<Integer>> listToken = new TypeReference<List<Integer>>() {{ }};\r\n assertNull(listToken.getComponentType());\r\n\r\n TypeReference<List<List<Integer>>> listListToken =\r\n new TypeReference<List<List<Integer>>>() {{ }};\r\n assertNull(listListToken.getComponentType());\r\n\r\n // Check arrays. Component types should be what we expect.\r\n TypeReference<int[]> intArrayToken = new TypeReference<int[]>() {{ }};\r\n assertTypeReferenceEquals(intArrayToken.getComponentType(), int.class);\r\n\r\n TypeReference<Integer[]> integerArrayToken = new TypeReference<Integer[]>() {{ }};\r\n assertTypeReferenceEquals(integerArrayToken.getComponentType(), Integer.class);\r\n\r\n assertEquals(new IntArrayToken1().getComponentType(),\r\n new IntArrayToken2().getComponentType());\r\n\r\n assertEquals(new IntListArrayToken1().getComponentType(),\r\n new IntListArrayToken2().getComponentType());\r\n\r\n // FIXME: Equality will fail: b/14590652\r\n TypeReference<List<Integer>[]> listArrayToken = new TypeReference<List<Integer>[]>() {{ }};\r\n assertEquals(listToken, listArrayToken.getComponentType());\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6499121189117432, "alphanum_fraction": 0.6748681664466858, "avg_line_length": 33.12345504760742, "blob_id": "e3991ead4fffa3079d1a1726706adf66a0c75287", "content_id": "14e57eb5c7867bdac9ca1535f5f76bd68c23b28c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2845, "license_type": "permissive", "max_line_length": 96, "num_lines": 81, "path": "/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/power/MediaPlayerPowerTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\r\n * use this file except in compliance with the License. You may obtain a copy of\r\n * the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\r\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r\n * License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\n\r\npackage com.android.mediaframeworktest.power;\r\n\r\nimport com.android.mediaframeworktest.MediaFrameworkTest;\r\nimport com.android.mediaframeworktest.MediaNames;\r\nimport android.media.MediaPlayer;\r\nimport android.os.Environment;\r\nimport android.test.ActivityInstrumentationTestCase2;\r\nimport android.util.Log;\r\n\r\nimport java.io.File;\r\n\r\n/**\r\n * Junit / Instrumentation test case for the power measurment the media player\r\n */\r\npublic class MediaPlayerPowerTest extends ActivityInstrumentationTestCase2<MediaFrameworkTest> {\r\n private String TAG = \"MediaPlayerPowerTest\";\r\n private String MP3_POWERTEST =\r\n Environment.getExternalStorageDirectory().toString() + \"/power_sample_mp3.mp3\";\r\n private String MP3_STREAM = \"http://75.17.48.204:10088/power_media/power_sample_mp3.mp3\";\r\n private String OGG_STREAM = \"http://75.17.48.204:10088/power_media/power_sample_ogg.mp3\";\r\n private String AAC_STREAM = \"http://75.17.48.204:10088/power_media/power_sample_aac.mp3\";\r\n\r\n public MediaPlayerPowerTest() {\r\n super(\"com.android.mediaframeworktest\", MediaFrameworkTest.class);\r\n }\r\n\r\n protected void setUp() throws Exception {\r\n getActivity();\r\n super.setUp();\r\n\r\n }\r\n\r\n public void audioPlayback(String filePath) {\r\n try {\r\n MediaPlayer mp = new MediaPlayer();\r\n mp.setDataSource(filePath);\r\n mp.prepare();\r\n mp.start();\r\n Thread.sleep(200000);\r\n mp.stop();\r\n mp.release();\r\n } catch (Exception e) {\r\n Log.v(TAG, e.toString());\r\n assertTrue(\"MP3 Playback\", false);\r\n }\r\n }\r\n\r\n // A very simple test case which start the audio player.\r\n // Power measurment will be done in other application.\r\n public void testPowerLocalMP3Playback() throws Exception {\r\n audioPlayback(MP3_POWERTEST);\r\n }\r\n\r\n public void testPowerStreamMP3Playback() throws Exception {\r\n audioPlayback(MP3_STREAM);\r\n }\r\n\r\n public void testPowerStreamOGGPlayback() throws Exception {\r\n audioPlayback(OGG_STREAM);\r\n }\r\n\r\n public void testPowerStreamAACPlayback() throws Exception {\r\n audioPlayback(AAC_STREAM);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6809523701667786, "alphanum_fraction": 0.686904788017273, "avg_line_length": 26, "blob_id": "a0924a285a0052cbbc724459162e660fed490409", "content_id": "e5f5177c9f4df27be6948a57bd5905e2f586cd4e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1680, "license_type": "permissive", "max_line_length": 75, "num_lines": 60, "path": "/tools/aapt2/compile/Pseudolocalizer.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2015 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef AAPT_COMPILE_PSEUDOLOCALIZE_H\r\n#define AAPT_COMPILE_PSEUDOLOCALIZE_H\r\n\r\n#include <memory>\r\n\r\n#include \"android-base/macros.h\"\r\n#include \"androidfw/StringPiece.h\"\r\n\r\n#include \"ResourceValues.h\"\r\n#include \"StringPool.h\"\r\n\r\nnamespace aapt {\r\n\r\nclass PseudoMethodImpl {\r\n public:\r\n virtual ~PseudoMethodImpl() {}\r\n virtual std::string Start() { return {}; }\r\n virtual std::string End() { return {}; }\r\n virtual std::string Text(const android::StringPiece& text) = 0;\r\n virtual std::string Placeholder(const android::StringPiece& text) = 0;\r\n};\r\n\r\nclass Pseudolocalizer {\r\n public:\r\n enum class Method {\r\n kNone,\r\n kAccent,\r\n kBidi,\r\n };\r\n\r\n explicit Pseudolocalizer(Method method);\r\n void SetMethod(Method method);\r\n std::string Start() { return impl_->Start(); }\r\n std::string End() { return impl_->End(); }\r\n std::string Text(const android::StringPiece& text);\r\n\r\n private:\r\n std::unique_ptr<PseudoMethodImpl> impl_;\r\n size_t last_depth_;\r\n};\r\n\r\n} // namespace aapt\r\n\r\n#endif /* AAPT_COMPILE_PSEUDOLOCALIZE_H */\r\n" }, { "alpha_fraction": 0.6955249309539795, "alphanum_fraction": 0.6972243189811707, "avg_line_length": 41.9502067565918, "blob_id": "2f68958d96066b559dcdedfd37ef91a83c48eea6", "content_id": "f253c3173cef93dcf6bfc75f133aa447eb40b80e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 10592, "license_type": "permissive", "max_line_length": 99, "num_lines": 241, "path": "/packages/SystemUI/tests/src/com/android/systemui/media/MediaDataManagerTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.systemui.media\r\n\r\nimport android.app.Notification.MediaStyle\r\nimport android.app.PendingIntent\r\nimport android.media.MediaDescription\r\nimport android.media.MediaMetadata\r\nimport android.media.session.MediaController\r\nimport android.media.session.MediaSession\r\nimport android.service.notification.StatusBarNotification\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper.RunWithLooper\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.broadcast.BroadcastDispatcher\r\nimport com.android.systemui.dump.DumpManager\r\nimport com.android.systemui.statusbar.SbnBuilder\r\nimport com.android.systemui.util.concurrency.FakeExecutor\r\nimport com.android.systemui.util.mockito.eq\r\nimport com.android.systemui.util.time.FakeSystemClock\r\nimport com.google.common.truth.Truth.assertThat\r\nimport org.junit.After\r\nimport org.junit.Before\r\nimport org.junit.Rule\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito\r\nimport org.mockito.Mockito.mock\r\nimport org.mockito.Mockito.verify\r\nimport org.mockito.junit.MockitoJUnit\r\nimport org.mockito.Mockito.`when` as whenever\r\n\r\nprivate const val KEY = \"KEY\"\r\nprivate const val KEY_2 = \"KEY_2\"\r\nprivate const val PACKAGE_NAME = \"com.android.systemui\"\r\nprivate const val APP_NAME = \"SystemUI\"\r\nprivate const val SESSION_ARTIST = \"artist\"\r\nprivate const val SESSION_TITLE = \"title\"\r\nprivate const val USER_ID = 0\r\n\r\nprivate fun <T> anyObject(): T {\r\n return Mockito.anyObject<T>()\r\n}\r\n\r\n@SmallTest\r\n@RunWithLooper(setAsMainLooper = true)\r\n@RunWith(AndroidTestingRunner::class)\r\nclass MediaDataManagerTest : SysuiTestCase() {\r\n\r\n @Mock lateinit var mediaControllerFactory: MediaControllerFactory\r\n @Mock lateinit var controller: MediaController\r\n lateinit var session: MediaSession\r\n lateinit var metadataBuilder: MediaMetadata.Builder\r\n lateinit var backgroundExecutor: FakeExecutor\r\n lateinit var foregroundExecutor: FakeExecutor\r\n @Mock lateinit var dumpManager: DumpManager\r\n @Mock lateinit var broadcastDispatcher: BroadcastDispatcher\r\n @Mock lateinit var mediaTimeoutListener: MediaTimeoutListener\r\n @Mock lateinit var mediaResumeListener: MediaResumeListener\r\n @Mock lateinit var pendingIntent: PendingIntent\r\n @JvmField @Rule val mockito = MockitoJUnit.rule()\r\n lateinit var mediaDataManager: MediaDataManager\r\n lateinit var mediaNotification: StatusBarNotification\r\n\r\n @Before\r\n fun setup() {\r\n foregroundExecutor = FakeExecutor(FakeSystemClock())\r\n backgroundExecutor = FakeExecutor(FakeSystemClock())\r\n mediaDataManager = MediaDataManager(context, backgroundExecutor, foregroundExecutor,\r\n mediaControllerFactory, broadcastDispatcher, dumpManager,\r\n mediaTimeoutListener, mediaResumeListener, useMediaResumption = true,\r\n useQsMediaPlayer = true)\r\n session = MediaSession(context, \"MediaDataManagerTestSession\")\r\n mediaNotification = SbnBuilder().run {\r\n setPkg(PACKAGE_NAME)\r\n modifyNotification(context).also {\r\n it.setSmallIcon(android.R.drawable.ic_media_pause)\r\n it.setStyle(MediaStyle().apply { setMediaSession(session.sessionToken) })\r\n }\r\n build()\r\n }\r\n metadataBuilder = MediaMetadata.Builder().apply {\r\n putString(MediaMetadata.METADATA_KEY_ARTIST, SESSION_ARTIST)\r\n putString(MediaMetadata.METADATA_KEY_TITLE, SESSION_TITLE)\r\n }\r\n whenever(mediaControllerFactory.create(eq(session.sessionToken))).thenReturn(controller)\r\n }\r\n\r\n @After\r\n fun tearDown() {\r\n session.release()\r\n mediaDataManager.destroy()\r\n }\r\n\r\n @Test\r\n fun testSetTimedOut_deactivatesMedia() {\r\n val data = MediaData(userId = USER_ID, initialized = true, backgroundColor = 0, app = null,\r\n appIcon = null, artist = null, song = null, artwork = null, actions = emptyList(),\r\n actionsToShowInCompact = emptyList(), packageName = \"INVALID\", token = null,\r\n clickIntent = null, device = null, active = true, resumeAction = null)\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n mediaDataManager.onMediaDataLoaded(KEY, oldKey = null, data = data)\r\n\r\n mediaDataManager.setTimedOut(KEY, timedOut = true)\r\n assertThat(data.active).isFalse()\r\n }\r\n\r\n @Test\r\n fun testLoadsMetadataOnBackground() {\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n assertThat(backgroundExecutor.numPending()).isEqualTo(1)\r\n }\r\n\r\n @Test\r\n fun testOnMetaDataLoaded_callsListener() {\r\n val listener = mock(MediaDataManager.Listener::class.java)\r\n mediaDataManager.addListener(listener)\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n mediaDataManager.onMediaDataLoaded(KEY, oldKey = null, data = mock(MediaData::class.java))\r\n verify(listener).onMediaDataLoaded(eq(KEY), eq(null), anyObject())\r\n }\r\n\r\n @Test\r\n fun testOnMetaDataLoaded_conservesActiveFlag() {\r\n val listener = TestListener()\r\n whenever(mediaControllerFactory.create(anyObject())).thenReturn(controller)\r\n whenever(controller.metadata).thenReturn(metadataBuilder.build())\r\n mediaDataManager.addListener(listener)\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n assertThat(backgroundExecutor.runAllReady()).isEqualTo(1)\r\n assertThat(foregroundExecutor.runAllReady()).isEqualTo(1)\r\n assertThat(listener.data!!.active).isTrue()\r\n }\r\n\r\n @Test\r\n fun testOnNotificationRemoved_callsListener() {\r\n val listener = mock(MediaDataManager.Listener::class.java)\r\n mediaDataManager.addListener(listener)\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n mediaDataManager.onMediaDataLoaded(KEY, oldKey = null, data = mock(MediaData::class.java))\r\n mediaDataManager.onNotificationRemoved(KEY)\r\n\r\n verify(listener).onMediaDataRemoved(eq(KEY))\r\n }\r\n\r\n @Test\r\n fun testOnNotificationRemoved_withResumption() {\r\n // GIVEN that the manager has a notification with a resume action\r\n val listener = TestListener()\r\n mediaDataManager.addListener(listener)\r\n whenever(controller.metadata).thenReturn(metadataBuilder.build())\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n assertThat(backgroundExecutor.runAllReady()).isEqualTo(1)\r\n assertThat(foregroundExecutor.runAllReady()).isEqualTo(1)\r\n val data = listener.data!!\r\n assertThat(data.resumption).isFalse()\r\n mediaDataManager.onMediaDataLoaded(KEY, null, data.copy(resumeAction = Runnable {}))\r\n // WHEN the notification is removed\r\n mediaDataManager.onNotificationRemoved(KEY)\r\n // THEN the media data indicates that it is for resumption\r\n assertThat(listener.data!!.resumption).isTrue()\r\n // AND the new key is the package name\r\n assertThat(listener.key!!).isEqualTo(PACKAGE_NAME)\r\n assertThat(listener.oldKey!!).isEqualTo(KEY)\r\n assertThat(listener.removedKey).isNull()\r\n }\r\n\r\n @Test\r\n fun testOnNotificationRemoved_twoWithResumption() {\r\n // GIVEN that the manager has two notifications with resume actions\r\n val listener = TestListener()\r\n mediaDataManager.addListener(listener)\r\n whenever(controller.metadata).thenReturn(metadataBuilder.build())\r\n mediaDataManager.onNotificationAdded(KEY, mediaNotification)\r\n mediaDataManager.onNotificationAdded(KEY_2, mediaNotification)\r\n assertThat(backgroundExecutor.runAllReady()).isEqualTo(2)\r\n assertThat(foregroundExecutor.runAllReady()).isEqualTo(2)\r\n val data = listener.data!!\r\n assertThat(data.resumption).isFalse()\r\n val resumableData = data.copy(resumeAction = Runnable {})\r\n mediaDataManager.onMediaDataLoaded(KEY, null, resumableData)\r\n mediaDataManager.onMediaDataLoaded(KEY_2, null, resumableData)\r\n // WHEN the first is removed\r\n mediaDataManager.onNotificationRemoved(KEY)\r\n // THEN the data is for resumption and the key is migrated to the package name\r\n assertThat(listener.data!!.resumption).isTrue()\r\n assertThat(listener.key!!).isEqualTo(PACKAGE_NAME)\r\n assertThat(listener.oldKey!!).isEqualTo(KEY)\r\n assertThat(listener.removedKey).isNull()\r\n // WHEN the second is removed\r\n mediaDataManager.onNotificationRemoved(KEY_2)\r\n // THEN the data is for resumption and the second key is removed\r\n assertThat(listener.data!!.resumption).isTrue()\r\n assertThat(listener.key!!).isEqualTo(PACKAGE_NAME)\r\n assertThat(listener.oldKey!!).isEqualTo(PACKAGE_NAME)\r\n assertThat(listener.removedKey!!).isEqualTo(KEY_2)\r\n }\r\n\r\n @Test\r\n fun testAddResumptionControls() {\r\n val listener = TestListener()\r\n mediaDataManager.addListener(listener)\r\n // WHEN resumption controls are added`\r\n val desc = MediaDescription.Builder().run {\r\n setTitle(SESSION_TITLE)\r\n build()\r\n }\r\n mediaDataManager.addResumptionControls(USER_ID, desc, Runnable {}, session.sessionToken,\r\n APP_NAME, pendingIntent, PACKAGE_NAME)\r\n assertThat(backgroundExecutor.runAllReady()).isEqualTo(1)\r\n assertThat(foregroundExecutor.runAllReady()).isEqualTo(1)\r\n // THEN the media data indicates that it is for resumption\r\n val data = listener.data!!\r\n assertThat(data.resumption).isTrue()\r\n assertThat(data.song).isEqualTo(SESSION_TITLE)\r\n assertThat(data.app).isEqualTo(APP_NAME)\r\n assertThat(data.actions).hasSize(1)\r\n }\r\n\r\n /**\r\n * Simple implementation of [MediaDataManager.Listener] for the test.\r\n *\r\n * Giving up on trying to get a mock Listener and ArgumentCaptor to work.\r\n */\r\n private class TestListener : MediaDataManager.Listener {\r\n var data: MediaData? = null\r\n var key: String? = null\r\n var oldKey: String? = null\r\n var removedKey: String? = null\r\n\r\n override fun onMediaDataLoaded(key: String, oldKey: String?, data: MediaData) {\r\n this.key = key\r\n this.oldKey = oldKey\r\n this.data = data\r\n }\r\n\r\n override fun onMediaDataRemoved(key: String) {\r\n removedKey = key\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5295358896255493, "alphanum_fraction": 0.5675105452537537, "avg_line_length": 25.764705657958984, "blob_id": "33723efc1b46845476d5e09981d61fda88f959ce", "content_id": "3ab15898ce1924fc1237bb092edd35e793784125", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 474, "license_type": "permissive", "max_line_length": 77, "num_lines": 17, "path": "/libs/androidfw/tests/data/sparse/gen_strings.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n\r\nOUTPUT_default=res/values/strings.xml\r\nOUTPUT_v26=res/values-v26/strings.xml\r\n\r\necho \"<resources>\" > $OUTPUT_default\r\necho \"<resources>\" > $OUTPUT_v26\r\nfor i in {0..999}\r\ndo\r\n echo \" <string name=\\\"foo_$i\\\">$i</string>\" >> $OUTPUT_default\r\n if [ \"$(($i % 3))\" -eq \"0\" ]\r\n then\r\n echo \" <string name=\\\"foo_$i\\\">$(($i * 10))</string>\" >> $OUTPUT_v26\r\n fi\r\ndone\r\necho \"</resources>\" >> $OUTPUT_default\r\necho \"</resources>\" >> $OUTPUT_v26\r\n\r\n" }, { "alpha_fraction": 0.7348111867904663, "alphanum_fraction": 0.7413793206214905, "avg_line_length": 36.0625, "blob_id": "41bc3f3b337e695e6e2543091809d695ef20b1d2", "content_id": "ff7d6adab2188729fc89a37e2c801c623930bba7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1218, "license_type": "permissive", "max_line_length": 92, "num_lines": 32, "path": "/packages/SystemUI/plugin_core/src/com/android/systemui/plugins/annotations/DependsOn.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.plugins.annotations;\r\n\r\nimport java.lang.annotation.Repeatable;\r\nimport java.lang.annotation.Retention;\r\nimport java.lang.annotation.RetentionPolicy;\r\n\r\n/**\r\n * Used to indicate that an interface in the plugin library needs another\r\n * interface to function properly. When this is added, it will be enforced\r\n * that all plugins that @Requires the annotated interface also @Requires\r\n * the specified class as well.\r\n */\r\n@Retention(RetentionPolicy.RUNTIME)\r\n@Repeatable(value = Dependencies.class)\r\npublic @interface DependsOn {\r\n Class<?> target();\r\n\r\n}\r\n" }, { "alpha_fraction": 0.7047343850135803, "alphanum_fraction": 0.7105168104171753, "avg_line_length": 36.43055725097656, "blob_id": "b163eafbf0d5859913d08530c118dd1a708083d3", "content_id": "78af5286fa6faa1eb4faa4bac731eacfccd0005e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2767, "license_type": "permissive", "max_line_length": 79, "num_lines": 72, "path": "/tests/net/common/java/android/net/NetworkAgentConfigTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net\r\n\r\nimport android.os.Build\r\nimport androidx.test.filters.SmallTest\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.testutils.DevSdkIgnoreRule\r\nimport com.android.testutils.DevSdkIgnoreRule.IgnoreUpTo\r\nimport com.android.testutils.assertParcelSane\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Assert.assertFalse\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Rule\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\n@RunWith(AndroidJUnit4::class)\r\n@SmallTest\r\nclass NetworkAgentConfigTest {\r\n @Rule @JvmField\r\n val ignoreRule = DevSdkIgnoreRule()\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testParcelNetworkAgentConfig() {\r\n val config = NetworkAgentConfig.Builder().apply {\r\n setExplicitlySelected(true)\r\n setLegacyType(ConnectivityManager.TYPE_ETHERNET)\r\n setSubscriberId(\"MySubId\")\r\n setPartialConnectivityAcceptable(false)\r\n setUnvalidatedConnectivityAcceptable(true)\r\n }.build()\r\n assertParcelSane(config, 10)\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testBuilder() {\r\n val config = NetworkAgentConfig.Builder().apply {\r\n setExplicitlySelected(true)\r\n setLegacyType(ConnectivityManager.TYPE_ETHERNET)\r\n setSubscriberId(\"MySubId\")\r\n setPartialConnectivityAcceptable(false)\r\n setUnvalidatedConnectivityAcceptable(true)\r\n setLegacyTypeName(\"TEST_NETWORK\")\r\n disableNat64Detection()\r\n disableProvisioningNotification()\r\n }.build()\r\n\r\n assertTrue(config.isExplicitlySelected())\r\n assertEquals(ConnectivityManager.TYPE_ETHERNET, config.getLegacyType())\r\n assertEquals(\"MySubId\", config.getSubscriberId())\r\n assertFalse(config.isPartialConnectivityAcceptable())\r\n assertTrue(config.isUnvalidatedConnectivityAcceptable())\r\n assertEquals(\"TEST_NETWORK\", config.getLegacyTypeName())\r\n assertFalse(config.isNat64DetectionEnabled())\r\n assertFalse(config.isProvisioningNotificationEnabled())\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7447552680969238, "alphanum_fraction": 0.7447552680969238, "avg_line_length": 38.85714340209961, "blob_id": "58321b3da0139a5e8494156a1e990ec6221ff7a0", "content_id": "e65d8541658cd443380663ce42a93feae83c7a9c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 286, "license_type": "permissive", "max_line_length": 95, "num_lines": 7, "path": "/core/java/android/text/format/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<HTML>\r\n<BODY>\r\nThis package contains alternative classes for some text formatting classes\r\ndefined in {@link java.util} and {@link java.text}. It also contains additional text formatting\r\nclasses for situations not covered by {@link java.util} or {@link java.text}.\r\n</BODY>\r\n</HTML>\r\n" }, { "alpha_fraction": 0.7123711109161377, "alphanum_fraction": 0.7164948582649231, "avg_line_length": 33.96296310424805, "blob_id": "1d5df1a6e15ff0182ebbe265f47d77cf41fabe4f", "content_id": "7f00bbaa839a766c6e1ff727f74603f2ced84701", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1940, "license_type": "permissive", "max_line_length": 89, "num_lines": 54, "path": "/media/tests/CameraBrowser/src/com/android/camerabrowser/DeviceDisconnectedReceiver.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.camerabrowser;\r\n\r\nimport android.app.Activity;\r\nimport android.content.BroadcastReceiver;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.content.IntentFilter;\r\nimport android.hardware.usb.UsbDevice;\r\nimport android.hardware.usb.UsbManager;\r\nimport android.util.Log;\r\n\r\npublic class DeviceDisconnectedReceiver extends BroadcastReceiver {\r\n\r\n private static final String TAG = \"DeviceDisconnectedReceiver\";\r\n\r\n private final Activity mActivity;\r\n private final String mDeviceName;\r\n\r\n public DeviceDisconnectedReceiver(Activity activity, String deviceName) {\r\n mActivity = activity;\r\n mDeviceName = deviceName;\r\n\r\n IntentFilter filter = new IntentFilter(UsbManager.ACTION_USB_DEVICE_DETACHED);\r\n activity.registerReceiver(this, filter);\r\n }\r\n\r\n @Override\r\n public void onReceive(Context context, Intent intent) {\r\n UsbDevice device = (UsbDevice)intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);\r\n String deviceName = device.getDeviceName();\r\n Log.d(TAG, \"ACTION_USB_DEVICE_DETACHED \" + deviceName);\r\n\r\n // close our activity if the device it is displaying is disconnected\r\n if (deviceName.equals(mDeviceName)) {\r\n mActivity.finish();\r\n }\r\n }\r\n}" }, { "alpha_fraction": 0.7091790437698364, "alphanum_fraction": 0.7155407667160034, "avg_line_length": 34.67777633666992, "blob_id": "5dda96c089c2442f7b40d04be7c55956bb7511d2", "content_id": "ff2f0910a1eed316b07be54117fde00bbe16deef", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3301, "license_type": "permissive", "max_line_length": 98, "num_lines": 90, "path": "/tests/net/common/java/android/net/util/SocketUtilsTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net.util\r\n\r\nimport android.os.Build\r\nimport android.system.NetlinkSocketAddress\r\nimport android.system.Os\r\nimport android.system.OsConstants.AF_INET\r\nimport android.system.OsConstants.ETH_P_ALL\r\nimport android.system.OsConstants.IPPROTO_UDP\r\nimport android.system.OsConstants.RTMGRP_NEIGH\r\nimport android.system.OsConstants.SOCK_DGRAM\r\nimport android.system.PacketSocketAddress\r\nimport androidx.test.filters.SmallTest\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.testutils.DevSdkIgnoreRule\r\nimport com.android.testutils.DevSdkIgnoreRule.IgnoreUpTo\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Assert.assertFalse\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Assert.fail\r\nimport org.junit.Rule\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\nprivate const val TEST_INDEX = 123\r\nprivate const val TEST_PORT = 555\r\nprivate const val FF_BYTE = 0xff.toByte()\r\n\r\n@RunWith(AndroidJUnit4::class)\r\n@SmallTest\r\nclass SocketUtilsTest {\r\n @Rule @JvmField\r\n val ignoreRule = DevSdkIgnoreRule()\r\n\r\n @Test\r\n fun testMakeNetlinkSocketAddress() {\r\n val nlAddress = SocketUtils.makeNetlinkSocketAddress(TEST_PORT, RTMGRP_NEIGH)\r\n if (nlAddress is NetlinkSocketAddress) {\r\n assertEquals(TEST_PORT, nlAddress.getPortId())\r\n assertEquals(RTMGRP_NEIGH, nlAddress.getGroupsMask())\r\n } else {\r\n fail(\"Not NetlinkSocketAddress object\")\r\n }\r\n }\r\n\r\n @Test\r\n fun testMakePacketSocketAddress_Q() {\r\n val pkAddress = SocketUtils.makePacketSocketAddress(ETH_P_ALL, TEST_INDEX)\r\n assertTrue(\"Not PacketSocketAddress object\", pkAddress is PacketSocketAddress)\r\n\r\n val pkAddress2 = SocketUtils.makePacketSocketAddress(TEST_INDEX, ByteArray(6) { FF_BYTE })\r\n assertTrue(\"Not PacketSocketAddress object\", pkAddress2 is PacketSocketAddress)\r\n }\r\n\r\n @Test @IgnoreUpTo(Build.VERSION_CODES.Q)\r\n fun testMakePacketSocketAddress() {\r\n val pkAddress = SocketUtils.makePacketSocketAddress(\r\n ETH_P_ALL, TEST_INDEX, ByteArray(6) { FF_BYTE })\r\n assertTrue(\"Not PacketSocketAddress object\", pkAddress is PacketSocketAddress)\r\n }\r\n\r\n @Test\r\n fun testCloseSocket() {\r\n // Expect no exception happening with null object.\r\n SocketUtils.closeSocket(null)\r\n\r\n val fd = Os.socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)\r\n assertTrue(fd.valid())\r\n SocketUtils.closeSocket(fd)\r\n assertFalse(fd.valid())\r\n // Expecting socket should be still invalid even closed socket again.\r\n SocketUtils.closeSocket(fd)\r\n assertFalse(fd.valid())\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6865136027336121, "alphanum_fraction": 0.6972740292549133, "avg_line_length": 34.68421173095703, "blob_id": "1e7a6324fd65a7cd8ea7a44b5c62276f56112c8a", "content_id": "b5a24bde8060978dbf852d81bb60e6026ca7f21e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1394, "license_type": "permissive", "max_line_length": 99, "num_lines": 38, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/DatePickerActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.graphics.drawable.ColorDrawable;\r\nimport android.os.Bundle;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class DatePickerActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n DatePicker picker = new DatePicker(this);\r\n picker.init(2012, 3, 3, true, new DatePicker.OnDateChangedListener() {\r\n @Override\r\n public void onDateChanged(DatePicker view, int year, int monthOfYear, int dayOfMonth) {\r\n }\r\n });\r\n setContentView(picker);\r\n getWindow().setBackgroundDrawable(new ColorDrawable(0xffffffff));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6790770888328552, "alphanum_fraction": 0.6796460151672363, "avg_line_length": 33.3125, "blob_id": "6bc60a5ee22a44b2daaf739f13e221af46063533", "content_id": "8bf409bd09099068be0fc1e545c71b58a10bc167", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 15820, "license_type": "permissive", "max_line_length": 93, "num_lines": 448, "path": "/packages/SystemUI/tests/src/com/android/systemui/doze/DozeMachineTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.doze;\r\n\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_AOD;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_AOD_DOCKED;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_PULSE_DONE;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_PULSING;\r\nimport static com.android.systemui.doze.DozeMachine.State.DOZE_REQUEST_PULSE;\r\nimport static com.android.systemui.doze.DozeMachine.State.FINISH;\r\nimport static com.android.systemui.doze.DozeMachine.State.INITIALIZED;\r\nimport static com.android.systemui.doze.DozeMachine.State.UNINITIALIZED;\r\n\r\nimport static org.junit.Assert.assertEquals;\r\nimport static org.junit.Assert.assertFalse;\r\nimport static org.junit.Assert.assertTrue;\r\nimport static org.mockito.ArgumentMatchers.any;\r\nimport static org.mockito.ArgumentMatchers.anyInt;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.doAnswer;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.never;\r\nimport static org.mockito.Mockito.reset;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.hardware.display.AmbientDisplayConfiguration;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.UiThreadTest;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.dock.DockManager;\r\nimport com.android.systemui.keyguard.WakefulnessLifecycle;\r\nimport com.android.systemui.statusbar.policy.BatteryController;\r\nimport com.android.systemui.util.wakelock.WakeLockFake;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\n@UiThreadTest\r\npublic class DozeMachineTest extends SysuiTestCase {\r\n\r\n DozeMachine mMachine;\r\n\r\n @Mock\r\n private WakefulnessLifecycle mWakefulnessLifecycle;\r\n @Mock\r\n private DozeLog mDozeLog;\r\n @Mock private DockManager mDockManager;\r\n @Mock\r\n private DozeHost mHost;\r\n private DozeServiceFake mServiceFake;\r\n private WakeLockFake mWakeLockFake;\r\n private AmbientDisplayConfiguration mConfigMock;\r\n private DozeMachine.Part mPartMock;\r\n\r\n @Before\r\n public void setUp() {\r\n MockitoAnnotations.initMocks(this);\r\n mServiceFake = new DozeServiceFake();\r\n mWakeLockFake = new WakeLockFake();\r\n mConfigMock = mock(AmbientDisplayConfiguration.class);\r\n mPartMock = mock(DozeMachine.Part.class);\r\n when(mDockManager.isDocked()).thenReturn(false);\r\n when(mDockManager.isHidden()).thenReturn(false);\r\n\r\n mMachine = new DozeMachine(mServiceFake, mConfigMock, mWakeLockFake,\r\n mWakefulnessLifecycle, mock(BatteryController.class), mDozeLog, mDockManager,\r\n mHost);\r\n mMachine.setParts(new DozeMachine.Part[]{mPartMock});\r\n }\r\n\r\n @Test\r\n public void testInitialize_initializesParts() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(UNINITIALIZED, INITIALIZED);\r\n }\r\n\r\n @Test\r\n public void testInitialize_goesToDoze() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(false);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_goesToAod() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE_AOD);\r\n assertEquals(DOZE_AOD, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_afterDocked_goesToDockedAod() {\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE_AOD_DOCKED);\r\n assertEquals(DOZE_AOD_DOCKED, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_afterDockPaused_goesToDoze() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n when(mDockManager.isHidden()).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_dozeSuppressed_alwaysOnDisabled_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(false);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_dozeSuppressed_alwaysOnEnabled_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_dozeSuppressed_afterDocked_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_dozeSuppressed_alwaysOnDisabled_afterDockPaused_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(false);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n when(mDockManager.isHidden()).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testInitialize_dozeSuppressed_alwaysOnEnabled_afterDockPaused_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n when(mDockManager.isHidden()).thenReturn(true);\r\n\r\n mMachine.requestState(INITIALIZED);\r\n\r\n verify(mPartMock).transitionTo(INITIALIZED, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_goesToDoze() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(false);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_goesToAoD() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE_AOD);\r\n assertEquals(DOZE_AOD, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_dozeSuppressed_goesToSuppressed() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_afterDocked_goesToDockedAoD() {\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE_AOD_DOCKED);\r\n assertEquals(DOZE_AOD_DOCKED, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_whileDockedAoD_staysDockedAod() {\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestState(DOZE_AOD_DOCKED);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock, never()).transitionTo(DOZE_AOD_DOCKED, DOZE_PULSE_DONE);\r\n }\r\n\r\n @Test\r\n public void testPulseDone_dozeSuppressed_afterDocked_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_afterDockPaused_goesToDoze() {\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n when(mDockManager.isHidden()).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseDone_dozeSuppressed_afterDockPaused_goesToDoze() {\r\n when(mHost.isDozeSuppressed()).thenReturn(true);\r\n when(mConfigMock.alwaysOnEnabled(anyInt())).thenReturn(true);\r\n when(mDockManager.isDocked()).thenReturn(true);\r\n when(mDockManager.isHidden()).thenReturn(true);\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n verify(mPartMock).transitionTo(DOZE_PULSE_DONE, DOZE);\r\n assertEquals(DOZE, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testFinished_staysFinished() {\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestState(FINISH);\r\n reset(mPartMock);\r\n\r\n mMachine.requestState(DOZE);\r\n\r\n verify(mPartMock, never()).transitionTo(any(), any());\r\n assertEquals(FINISH, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testFinish_finishesService() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestState(FINISH);\r\n\r\n assertTrue(mServiceFake.finished);\r\n }\r\n\r\n @Test\r\n public void testWakeLock_heldInTransition() {\r\n doAnswer((inv) -> {\r\n assertTrue(mWakeLockFake.isHeld());\r\n return null;\r\n }).when(mPartMock).transitionTo(any(), any());\r\n\r\n mMachine.requestState(INITIALIZED);\r\n }\r\n\r\n @Test\r\n public void testWakeLock_heldInPulseStates() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n assertTrue(mWakeLockFake.isHeld());\r\n\r\n mMachine.requestState(DOZE_PULSING);\r\n assertTrue(mWakeLockFake.isHeld());\r\n }\r\n\r\n @Test\r\n public void testWakeLock_notHeldInDozeStates() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestState(DOZE);\r\n assertFalse(mWakeLockFake.isHeld());\r\n\r\n mMachine.requestState(DOZE_AOD);\r\n assertFalse(mWakeLockFake.isHeld());\r\n }\r\n\r\n @Test\r\n public void testWakeLock_releasedAfterPulse() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestState(DOZE);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n\r\n assertFalse(mWakeLockFake.isHeld());\r\n }\r\n\r\n @Test\r\n public void testPulseDuringPulse_doesntCrash() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestState(DOZE);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n }\r\n\r\n @Test\r\n public void testSuppressingPulse_doesntCrash() {\r\n mMachine.requestState(INITIALIZED);\r\n\r\n mMachine.requestState(DOZE);\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n }\r\n\r\n @Test\r\n public void testTransitions_canRequestTransitions() {\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestState(DOZE);\r\n doAnswer(inv -> {\r\n mMachine.requestState(DOZE_PULSING);\r\n return null;\r\n }).when(mPartMock).transitionTo(any(), eq(DOZE_REQUEST_PULSE));\r\n\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n\r\n assertEquals(DOZE_PULSING, mMachine.getState());\r\n }\r\n\r\n @Test\r\n public void testPulseReason_getMatchesRequest() {\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestState(DOZE);\r\n mMachine.requestPulse(DozeLog.REASON_SENSOR_DOUBLE_TAP);\r\n\r\n assertEquals(DozeLog.REASON_SENSOR_DOUBLE_TAP, mMachine.getPulseReason());\r\n }\r\n\r\n @Test\r\n public void testPulseReason_getFromTransition() {\r\n mMachine.requestState(INITIALIZED);\r\n mMachine.requestState(DOZE);\r\n doAnswer(inv -> {\r\n DozeMachine.State newState = inv.getArgument(1);\r\n if (newState == DOZE_REQUEST_PULSE\r\n || newState == DOZE_PULSING\r\n || newState == DOZE_PULSE_DONE) {\r\n assertEquals(DozeLog.PULSE_REASON_NOTIFICATION, mMachine.getPulseReason());\r\n } else {\r\n assertTrue(\"unexpected state \" + newState,\r\n newState == DOZE || newState == DOZE_AOD);\r\n }\r\n return null;\r\n }).when(mPartMock).transitionTo(any(), any());\r\n\r\n mMachine.requestPulse(DozeLog.PULSE_REASON_NOTIFICATION);\r\n mMachine.requestState(DOZE_PULSING);\r\n mMachine.requestState(DOZE_PULSE_DONE);\r\n }\r\n\r\n @Test\r\n public void testWakeUp_wakesUp() {\r\n mMachine.wakeUp();\r\n\r\n assertTrue(mServiceFake.requestedWakeup);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7167187929153442, "alphanum_fraction": 0.7193256616592407, "avg_line_length": 36.37333297729492, "blob_id": "3193c880f99bace24839241e41f85fa0b5967065", "content_id": "6d25c6480d3ffd3a6ba09067a8d51878ed3b1aea", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": true, "language": "Kotlin", "length_bytes": 5754, "license_type": "permissive", "max_line_length": 88, "num_lines": 150, "path": "/packages/SystemUI/tests/src/com/android/systemui/qs/external/CustomTileTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2019 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.systemui.qs.external\r\n\r\nimport android.content.ComponentName\r\nimport android.content.Context\r\nimport android.content.pm.ApplicationInfo\r\nimport android.content.pm.PackageManager\r\nimport android.content.pm.ServiceInfo\r\nimport android.graphics.drawable.Drawable\r\nimport android.graphics.drawable.Icon\r\nimport android.service.quicksettings.IQSTileService\r\nimport android.service.quicksettings.Tile\r\nimport android.test.suitebuilder.annotation.SmallTest\r\nimport android.view.IWindowManager\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.plugins.qs.QSTile\r\nimport com.android.systemui.qs.QSHost\r\nimport junit.framework.Assert.assertFalse\r\nimport junit.framework.Assert.assertTrue\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.ArgumentMatchers.anyInt\r\nimport org.mockito.ArgumentMatchers.anyString\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito.`when`\r\nimport org.mockito.Mockito.any\r\nimport org.mockito.Mockito.mock\r\nimport org.mockito.MockitoAnnotations\r\n\r\n@SmallTest\r\n@RunWith(AndroidJUnit4::class)\r\nclass CustomTileTest : SysuiTestCase() {\r\n\r\n companion object {\r\n const val packageName = \"test_package\"\r\n const val className = \"test_class\"\r\n val componentName = ComponentName(packageName, className)\r\n val TILE_SPEC = CustomTile.toSpec(componentName)\r\n }\r\n\r\n @Mock private lateinit var mTileHost: QSHost\r\n @Mock private lateinit var mTileService: IQSTileService\r\n @Mock private lateinit var mTileServices: TileServices\r\n @Mock private lateinit var mTileServiceManager: TileServiceManager\r\n @Mock private lateinit var mWindowService: IWindowManager\r\n @Mock private lateinit var mPackageManager: PackageManager\r\n @Mock private lateinit var mApplicationInfo: ApplicationInfo\r\n @Mock private lateinit var mServiceInfo: ServiceInfo\r\n\r\n private lateinit var customTile: CustomTile\r\n\r\n @Before\r\n fun setUp() {\r\n MockitoAnnotations.initMocks(this)\r\n\r\n mContext.addMockSystemService(\"window\", mWindowService)\r\n mContext.setMockPackageManager(mPackageManager)\r\n `when`(mTileHost.tileServices).thenReturn(mTileServices)\r\n `when`(mTileHost.context).thenReturn(mContext)\r\n `when`(mTileServices.getTileWrapper(any(CustomTile::class.java)))\r\n .thenReturn(mTileServiceManager)\r\n `when`(mTileServiceManager.tileService).thenReturn(mTileService)\r\n `when`(mPackageManager.getApplicationInfo(anyString(), anyInt()))\r\n .thenReturn(mApplicationInfo)\r\n\r\n `when`(mPackageManager.getServiceInfo(any(ComponentName::class.java), anyInt()))\r\n .thenReturn(mServiceInfo)\r\n mServiceInfo.applicationInfo = mApplicationInfo\r\n\r\n customTile = CustomTile.create(mTileHost, TILE_SPEC, mContext)\r\n }\r\n\r\n @Test\r\n fun testCorrectUser() {\r\n assertEquals(0, customTile.user)\r\n\r\n val userContext = mock(Context::class.java)\r\n `when`(userContext.packageManager).thenReturn(mPackageManager)\r\n `when`(userContext.userId).thenReturn(10)\r\n\r\n val tile = CustomTile.create(mTileHost, TILE_SPEC, userContext)\r\n\r\n assertEquals(10, tile.user)\r\n }\r\n\r\n @Test\r\n fun testToggleableTileHasBooleanState() {\r\n `when`(mTileServiceManager.isToggleableTile).thenReturn(true)\r\n customTile = CustomTile.create(mTileHost, TILE_SPEC, mContext)\r\n\r\n assertTrue(customTile.state is QSTile.BooleanState)\r\n assertTrue(customTile.newTileState() is QSTile.BooleanState)\r\n }\r\n\r\n @Test\r\n fun testRegularTileHasNotBooleanState() {\r\n assertFalse(customTile.state is QSTile.BooleanState)\r\n assertFalse(customTile.newTileState() is QSTile.BooleanState)\r\n }\r\n\r\n @Test\r\n fun testValueUpdatedInBooleanTile() {\r\n `when`(mTileServiceManager.isToggleableTile).thenReturn(true)\r\n customTile = CustomTile.create(mTileHost, TILE_SPEC, mContext)\r\n customTile.qsTile.icon = mock(Icon::class.java)\r\n `when`(customTile.qsTile.icon.loadDrawable(any(Context::class.java)))\r\n .thenReturn(mock(Drawable::class.java))\r\n\r\n val state = customTile.newTileState()\r\n assertTrue(state is QSTile.BooleanState)\r\n\r\n customTile.qsTile.state = Tile.STATE_INACTIVE\r\n customTile.handleUpdateState(state, null)\r\n assertFalse((state as QSTile.BooleanState).value)\r\n\r\n customTile.qsTile.state = Tile.STATE_ACTIVE\r\n customTile.handleUpdateState(state, null)\r\n assertTrue(state.value)\r\n\r\n customTile.qsTile.state = Tile.STATE_UNAVAILABLE\r\n customTile.handleUpdateState(state, null)\r\n assertFalse(state.value)\r\n }\r\n\r\n @Test\r\n fun testNoCrashOnNullDrawable() {\r\n customTile.qsTile.icon = mock(Icon::class.java)\r\n `when`(customTile.qsTile.icon.loadDrawable(any(Context::class.java)))\r\n .thenReturn(null)\r\n customTile.handleUpdateState(customTile.newTileState(), null)\r\n }\r\n}" }, { "alpha_fraction": 0.7292287349700928, "alphanum_fraction": 0.7302533388137817, "avg_line_length": 47.24770736694336, "blob_id": "3202e2d24b9a9d4746fc88a8e64e2ecb4e498da9", "content_id": "4d4aae1f262d35158f2c8a524503266362cb4009", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 10736, "license_type": "permissive", "max_line_length": 99, "num_lines": 218, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/phone/StatusBarNotificationPresenterTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2018 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file\r\n * except in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the\r\n * License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\r\n * KIND, either express or implied. See the License for the specific language governing\r\n * permissions and limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.statusbar.phone;\r\n\r\nimport static android.view.Display.DEFAULT_DISPLAY;\r\n\r\nimport static org.junit.Assert.assertTrue;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.app.Notification;\r\nimport android.app.StatusBarManager;\r\nimport android.content.Context;\r\nimport android.metrics.LogMaker;\r\nimport android.support.test.metricshelper.MetricsAsserts;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper;\r\nimport android.testing.TestableLooper.RunWithLooper;\r\nimport android.view.ViewGroup;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.internal.logging.MetricsLogger;\r\nimport com.android.internal.logging.nano.MetricsProto.MetricsEvent;\r\nimport com.android.internal.logging.testing.FakeMetricsLogger;\r\nimport com.android.systemui.ForegroundServiceNotificationListener;\r\nimport com.android.systemui.InitController;\r\nimport com.android.systemui.SysuiTestCase;\r\nimport com.android.systemui.plugins.statusbar.StatusBarStateController;\r\nimport com.android.systemui.statusbar.CommandQueue;\r\nimport com.android.systemui.statusbar.KeyguardIndicationController;\r\nimport com.android.systemui.statusbar.NotificationLockscreenUserManager;\r\nimport com.android.systemui.statusbar.NotificationMediaManager;\r\nimport com.android.systemui.statusbar.NotificationRemoteInputManager;\r\nimport com.android.systemui.statusbar.NotificationViewHierarchyManager;\r\nimport com.android.systemui.statusbar.RemoteInputController;\r\nimport com.android.systemui.statusbar.SysuiStatusBarStateController;\r\nimport com.android.systemui.statusbar.notification.ActivityLaunchAnimator;\r\nimport com.android.systemui.statusbar.notification.DynamicPrivacyController;\r\nimport com.android.systemui.statusbar.notification.NotificationEntryManager;\r\nimport com.android.systemui.statusbar.notification.VisualStabilityManager;\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntry;\r\nimport com.android.systemui.statusbar.notification.collection.NotificationEntryBuilder;\r\nimport com.android.systemui.statusbar.notification.interruption.NotificationInterruptStateProvider;\r\nimport com.android.systemui.statusbar.notification.interruption.NotificationInterruptSuppressor;\r\nimport com.android.systemui.statusbar.notification.row.ActivatableNotificationView;\r\nimport com.android.systemui.statusbar.notification.row.NotificationGutsManager;\r\nimport com.android.systemui.statusbar.notification.stack.NotificationListContainer;\r\nimport com.android.systemui.statusbar.policy.KeyguardStateController;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.ArgumentCaptor;\r\n\r\nimport java.util.ArrayList;\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner.class)\r\n@RunWithLooper()\r\npublic class StatusBarNotificationPresenterTest extends SysuiTestCase {\r\n\r\n\r\n private StatusBarNotificationPresenter mStatusBarNotificationPresenter;\r\n private NotificationInterruptStateProvider mNotificationInterruptStateProvider =\r\n mock(NotificationInterruptStateProvider.class);\r\n private NotificationInterruptSuppressor mInterruptSuppressor;\r\n private CommandQueue mCommandQueue;\r\n private FakeMetricsLogger mMetricsLogger;\r\n private ShadeController mShadeController = mock(ShadeController.class);\r\n private StatusBar mStatusBar = mock(StatusBar.class);\r\n private InitController mInitController = new InitController();\r\n\r\n @Before\r\n public void setup() {\r\n NotificationRemoteInputManager notificationRemoteInputManager =\r\n mock(NotificationRemoteInputManager.class);\r\n when(notificationRemoteInputManager.getController())\r\n .thenReturn(mock(RemoteInputController.class));\r\n mMetricsLogger = new FakeMetricsLogger();\r\n mDependency.injectTestDependency(MetricsLogger.class, mMetricsLogger);\r\n mCommandQueue = new CommandQueue(mContext);\r\n mDependency.injectTestDependency(StatusBarStateController.class,\r\n mock(SysuiStatusBarStateController.class));\r\n mDependency.injectTestDependency(ShadeController.class, mShadeController);\r\n mDependency.injectTestDependency(NotificationRemoteInputManager.class,\r\n notificationRemoteInputManager);\r\n mDependency.injectMockDependency(NotificationViewHierarchyManager.class);\r\n mDependency.injectMockDependency(NotificationRemoteInputManager.Callback.class);\r\n mDependency.injectMockDependency(NotificationLockscreenUserManager.class);\r\n mDependency.injectMockDependency(NotificationMediaManager.class);\r\n mDependency.injectMockDependency(VisualStabilityManager.class);\r\n mDependency.injectMockDependency(NotificationGutsManager.class);\r\n mDependency.injectMockDependency(NotificationShadeWindowController.class);\r\n mDependency.injectMockDependency(ForegroundServiceNotificationListener.class);\r\n NotificationEntryManager entryManager =\r\n mDependency.injectMockDependency(NotificationEntryManager.class);\r\n when(entryManager.getActiveNotificationsForCurrentUser()).thenReturn(new ArrayList<>());\r\n\r\n NotificationShadeWindowView notificationShadeWindowView =\r\n mock(NotificationShadeWindowView.class);\r\n when(notificationShadeWindowView.getResources()).thenReturn(mContext.getResources());\r\n\r\n mStatusBarNotificationPresenter = new StatusBarNotificationPresenter(mContext,\r\n mock(NotificationPanelViewController.class), mock(HeadsUpManagerPhone.class),\r\n notificationShadeWindowView, mock(NotificationListContainerViewGroup.class),\r\n mock(DozeScrimController.class), mock(ScrimController.class),\r\n mock(ActivityLaunchAnimator.class), mock(DynamicPrivacyController.class),\r\n mock(KeyguardStateController.class),\r\n mock(KeyguardIndicationController.class), mStatusBar,\r\n mock(ShadeControllerImpl.class), mCommandQueue, mInitController,\r\n mNotificationInterruptStateProvider);\r\n mInitController.executePostInitTasks();\r\n ArgumentCaptor<NotificationInterruptSuppressor> suppressorCaptor =\r\n ArgumentCaptor.forClass(NotificationInterruptSuppressor.class);\r\n verify(mNotificationInterruptStateProvider).addSuppressor(suppressorCaptor.capture());\r\n mInterruptSuppressor = suppressorCaptor.getValue();\r\n }\r\n\r\n @Test\r\n public void testSuppressHeadsUp_disabledStatusBar() {\r\n Notification n = new Notification.Builder(getContext(), \"a\").build();\r\n NotificationEntry entry = new NotificationEntryBuilder()\r\n .setPkg(\"a\")\r\n .setOpPkg(\"a\")\r\n .setTag(\"a\")\r\n .setNotification(n)\r\n .build();\r\n mCommandQueue.disable(DEFAULT_DISPLAY, StatusBarManager.DISABLE_EXPAND, 0,\r\n false /* animate */);\r\n TestableLooper.get(this).processAllMessages();\r\n\r\n assertTrue(\"The panel should suppress heads up while disabled\",\r\n mInterruptSuppressor.suppressAwakeHeadsUp(entry));\r\n }\r\n\r\n @Test\r\n public void testSuppressHeadsUp_disabledNotificationShade() {\r\n Notification n = new Notification.Builder(getContext(), \"a\").build();\r\n NotificationEntry entry = new NotificationEntryBuilder()\r\n .setPkg(\"a\")\r\n .setOpPkg(\"a\")\r\n .setTag(\"a\")\r\n .setNotification(n)\r\n .build();\r\n mCommandQueue.disable(DEFAULT_DISPLAY, 0, StatusBarManager.DISABLE2_NOTIFICATION_SHADE,\r\n false /* animate */);\r\n TestableLooper.get(this).processAllMessages();\r\n\r\n assertTrue(\"The panel should suppress interruptions while notification shade \"\r\n + \"disabled\",\r\n mInterruptSuppressor.suppressAwakeHeadsUp(entry));\r\n }\r\n\r\n @Test\r\n public void testSuppressInterruptions_vrMode() {\r\n Notification n = new Notification.Builder(getContext(), \"a\").build();\r\n NotificationEntry entry = new NotificationEntryBuilder()\r\n .setPkg(\"a\")\r\n .setOpPkg(\"a\")\r\n .setTag(\"a\")\r\n .setNotification(n)\r\n .build();\r\n mStatusBarNotificationPresenter.mVrMode = true;\r\n\r\n assertTrue(\"Vr mode should suppress interruptions\",\r\n mInterruptSuppressor.suppressAwakeInterruptions(entry));\r\n }\r\n\r\n @Test\r\n public void testSuppressInterruptions_statusBarAlertsDisabled() {\r\n Notification n = new Notification.Builder(getContext(), \"a\").build();\r\n NotificationEntry entry = new NotificationEntryBuilder()\r\n .setPkg(\"a\")\r\n .setOpPkg(\"a\")\r\n .setTag(\"a\")\r\n .setNotification(n)\r\n .build();\r\n when(mStatusBar.areNotificationAlertsDisabled()).thenReturn(true);\r\n\r\n assertTrue(\"StatusBar alerts disabled shouldn't allow interruptions\",\r\n mInterruptSuppressor.suppressInterruptions(entry));\r\n }\r\n\r\n @Test\r\n public void onActivatedMetrics() {\r\n ActivatableNotificationView view = mock(ActivatableNotificationView.class);\r\n mStatusBarNotificationPresenter.onActivated(view);\r\n\r\n MetricsAsserts.assertHasLog(\"missing lockscreen note tap log\",\r\n mMetricsLogger.getLogs(),\r\n new LogMaker(MetricsEvent.ACTION_LS_NOTE)\r\n .setType(MetricsEvent.TYPE_ACTION));\r\n }\r\n\r\n // We need this because mockito doesn't know how to construct a mock that extends ViewGroup\r\n // and implements NotificationListContainer without it because of classloader issues.\r\n private abstract static class NotificationListContainerViewGroup extends ViewGroup\r\n implements NotificationListContainer {\r\n\r\n public NotificationListContainerViewGroup(Context context) {\r\n super(context);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7321428656578064, "alphanum_fraction": 0.7425876259803772, "avg_line_length": 36.0512809753418, "blob_id": "d87c7887ca6da21b4871560cc51bebbe3d143eb8", "content_id": "23f90b0f3cb9f09aa0111a4447b73093c155476a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 2968, "license_type": "permissive", "max_line_length": 74, "num_lines": 78, "path": "/core/tests/overlaytests/host/test-apps/UpdateOverlay/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Copyright (C) 2018 The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nLOCAL_PATH := $(call my-dir)\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE_TAGS := tests\r\nLOCAL_SRC_FILES := $(call all-java-files-under,src)\r\nLOCAL_PACKAGE_NAME := OverlayHostTests_UpdateOverlay\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\nLOCAL_STATIC_JAVA_LIBRARIES := androidx.test.rules\r\nLOCAL_USE_AAPT2 := true\r\nLOCAL_AAPT_FLAGS := --no-resource-removal\r\ninclude $(BUILD_PACKAGE)\r\n\r\nmy_package_prefix := com.android.server.om.hosttest.framework_overlay\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE_TAGS := tests\r\nLOCAL_PACKAGE_NAME := OverlayHostTests_FrameworkOverlayV1\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\nLOCAL_CERTIFICATE := platform\r\nLOCAL_AAPT_FLAGS := --custom-package $(my_package_prefix)_v1\r\nLOCAL_AAPT_FLAGS += --version-code 1 --version-name v1\r\nLOCAL_RESOURCE_DIR := $(LOCAL_PATH)/framework/v1/res\r\nLOCAL_MANIFEST_FILE := framework/AndroidManifest.xml\r\ninclude $(BUILD_PACKAGE)\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE_TAGS := tests\r\nLOCAL_PACKAGE_NAME := OverlayHostTests_FrameworkOverlayV2\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\nLOCAL_CERTIFICATE := platform\r\nLOCAL_AAPT_FLAGS := --custom-package $(my_package_prefix)_v2\r\nLOCAL_AAPT_FLAGS += --version-code 2 --version-name v2\r\nLOCAL_RESOURCE_DIR := $(LOCAL_PATH)/framework/v2/res\r\nLOCAL_MANIFEST_FILE := framework/AndroidManifest.xml\r\ninclude $(BUILD_PACKAGE)\r\n\r\nmy_package_prefix := com.android.server.om.hosttest.app_overlay\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE_TAGS := tests\r\nLOCAL_PACKAGE_NAME := OverlayHostTests_AppOverlayV1\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\nLOCAL_AAPT_FLAGS := --custom-package $(my_package_prefix)_v1\r\nLOCAL_AAPT_FLAGS += --version-code 1 --version-name v1\r\nLOCAL_RESOURCE_DIR := $(LOCAL_PATH)/app/v1/res\r\nLOCAL_MANIFEST_FILE := app/v1/AndroidManifest.xml\r\ninclude $(BUILD_PACKAGE)\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE_TAGS := tests\r\nLOCAL_PACKAGE_NAME := OverlayHostTests_AppOverlayV2\r\nLOCAL_SDK_VERSION := current\r\nLOCAL_COMPATIBILITY_SUITE := device-tests\r\nLOCAL_AAPT_FLAGS := --custom-package $(my_package_prefix)_v2\r\nLOCAL_AAPT_FLAGS += --version-code 2 --version-name v2\r\nLOCAL_RESOURCE_DIR := $(LOCAL_PATH)/app/v2/res\r\nLOCAL_MANIFEST_FILE := app/v2/AndroidManifest.xml\r\ninclude $(BUILD_PACKAGE)\r\n\r\nmy_package_prefix :=\r\n" }, { "alpha_fraction": 0.678787887096405, "alphanum_fraction": 0.6836363673210144, "avg_line_length": 30.27450942993164, "blob_id": "bdfe35e4ff6ac8cb9d4287dbfad89cb4252738c9", "content_id": "3413ee8716f0af96c5386f8309b248a0646802b9", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1650, "license_type": "permissive", "max_line_length": 75, "num_lines": 51, "path": "/packages/WAPPushManager/tests/src/com/android/smspush/unitTests/DrmReceiver.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.smspush.unitTests;\r\n\r\nimport android.content.BroadcastReceiver;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.util.Log;\r\n\r\nimport com.android.internal.util.HexDump;\r\n\r\n/**\r\n * A sample wap push receiver application for existing framework\r\n * This class is listening for \"application/vnd.oma.drm.rights+xml\" message\r\n */\r\npublic class DrmReceiver extends BroadcastReceiver {\r\n private static final String LOG_TAG = \"WAP PUSH\";\r\n\r\n @Override\r\n public void onReceive(Context context, Intent intent) {\r\n Log.d(LOG_TAG, \"DrmReceiver received.\");\r\n\r\n byte[] body;\r\n byte[] header;\r\n\r\n body = intent.getByteArrayExtra(\"data\");\r\n header = intent.getByteArrayExtra(\"header\");\r\n\r\n Log.d(LOG_TAG, \"header:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(header));\r\n Log.d(LOG_TAG, \"body:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(body));\r\n\r\n DataVerify.SetLastReceivedPdu(body);\r\n }\r\n\r\n}\r\n\r\n\r\n" }, { "alpha_fraction": 0.6182126402854919, "alphanum_fraction": 0.6397058963775635, "avg_line_length": 32.66666793823242, "blob_id": "869e5979dbce8820fd66de7c65311829091d6e1a", "content_id": "37f4ff918ca0ee97dae996730fc3d3ef8675b0bb", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1768, "license_type": "permissive", "max_line_length": 98, "num_lines": 51, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/jni/contrast.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n// Native function to extract contrast ratio from image (handed down as ByteBuffer).\r\n\r\n#include \"contrast.h\"\r\n\r\n#include <math.h>\r\n#include <string.h>\r\n#include <jni.h>\r\n#include <unistd.h>\r\n#include <android/log.h>\r\n\r\njfloat\r\nJava_androidx_media_filterfw_samples_simplecamera_ContrastRatioFilter_contrastOperator(\r\n JNIEnv* env, jclass clazz, jint width, jint height, jobject imageBuffer) {\r\n\r\n if (imageBuffer == 0) {\r\n return 0.0f;\r\n }\r\n float total = 0;\r\n const int numPixels = width * height;\r\n unsigned char* srcPtr = static_cast<unsigned char*>(env->GetDirectBufferAddress(imageBuffer));\r\n float* lumArray = new float[numPixels];\r\n for (int i = 0; i < numPixels; i++) {\r\n lumArray[i] = (0.2126f * *(srcPtr + 4 * i) + 0.7152f *\r\n *(srcPtr + 4 * i + 1) + 0.0722f * *(srcPtr + 4 * i + 2)) / 255;\r\n total += lumArray[i];\r\n }\r\n const float avg = total / numPixels;\r\n float sum = 0;\r\n\r\n for (int i = 0; i < numPixels; i++) {\r\n sum += (lumArray[i] - avg) * (lumArray[i] - avg);\r\n }\r\n delete[] lumArray;\r\n return ((float) sqrt(sum / numPixels));\r\n}\r\n" }, { "alpha_fraction": 0.7235063314437866, "alphanum_fraction": 0.7291757464408875, "avg_line_length": 36.25, "blob_id": "306d56d548d58dfc51e22a634151da4c786f17cb", "content_id": "f59139a4001084a91743e46e81a8e1d8c903832b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2293, "license_type": "permissive", "max_line_length": 94, "num_lines": 60, "path": "/packages/SystemUI/tests/src/com/android/systemui/statusbar/StatusBarStateControllerImplTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.statusbar\r\n\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.internal.logging.testing.UiEventLoggerFake\r\nimport com.android.systemui.SysuiTestCase\r\nimport org.junit.Assert.assertEquals\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner::class)\r\[email protected]\r\nclass StatusBarStateControllerImplTest : SysuiTestCase() {\r\n\r\n private lateinit var controller: StatusBarStateControllerImpl\r\n private lateinit var uiEventLogger: UiEventLoggerFake\r\n\r\n @Before\r\n fun setUp() {\r\n uiEventLogger = UiEventLoggerFake()\r\n controller = StatusBarStateControllerImpl(uiEventLogger)\r\n }\r\n\r\n @Test\r\n fun testChangeState_logged() {\r\n TestableLooper.get(this).runWithLooper {\r\n controller.state = StatusBarState.FULLSCREEN_USER_SWITCHER\r\n controller.state = StatusBarState.KEYGUARD\r\n controller.state = StatusBarState.SHADE\r\n controller.state = StatusBarState.SHADE_LOCKED\r\n }\r\n\r\n val logs = uiEventLogger.logs\r\n assertEquals(4, logs.size)\r\n val ids = logs.map(UiEventLoggerFake.FakeUiEvent::eventId)\r\n assertEquals(StatusBarStateEvent.STATUS_BAR_STATE_FULLSCREEN_USER_SWITCHER.id, ids[0])\r\n assertEquals(StatusBarStateEvent.STATUS_BAR_STATE_KEYGUARD.id, ids[1])\r\n assertEquals(StatusBarStateEvent.STATUS_BAR_STATE_SHADE.id, ids[2])\r\n assertEquals(StatusBarStateEvent.STATUS_BAR_STATE_SHADE_LOCKED.id, ids[3])\r\n }\r\n}" }, { "alpha_fraction": 0.815315306186676, "alphanum_fraction": 0.815315306186676, "avg_line_length": 53.5, "blob_id": "abe8bbf4820c6346494c9da892cc0e0d5a8ebd07", "content_id": "797c68b435a26a4f2b2616401573e88c11be4c8a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 222, "license_type": "permissive", "max_line_length": 98, "num_lines": 4, "path": "/tests/HierarchyViewerTest/run_tests.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\r\n# Runs the tests in this apk\r\nadb install $OUT/data/app/HierarchyViewerTest/HierarchyViewerTest.apk\r\nadb shell am instrument -w com.android.test.hierarchyviewer/android.test.InstrumentationTestRunner\r\n" }, { "alpha_fraction": 0.6518177390098572, "alphanum_fraction": 0.6733230948448181, "avg_line_length": 32.29824447631836, "blob_id": "9a2f59b3cd52d53c88758b175d64fd7d446a813b", "content_id": "0c052119777fbe450daf54fe2dc0601a428c066c", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1953, "license_type": "permissive", "max_line_length": 86, "num_lines": 57, "path": "/tests/VectorDrawableTest/src/com/android/test/dynamic/BoundsCheckTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.dynamic;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.content.res.Resources;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.drawable.BitmapDrawable;\r\nimport android.graphics.drawable.VectorDrawable;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\n\r\npublic class BoundsCheckTest extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final BitmapsView view = new BitmapsView(this);\r\n setContentView(view);\r\n }\r\n\r\n static class BitmapsView extends View {\r\n private final BitmapDrawable mBitmap1;\r\n private final VectorDrawable mVector1;\r\n\r\n BitmapsView(Context c) {\r\n super(c);\r\n Resources res = c.getResources();\r\n mBitmap1 = (BitmapDrawable) res.getDrawable(R.drawable.icon);\r\n mVector1 = (VectorDrawable) res.getDrawable(R.drawable.vector_drawable28);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n mBitmap1.setBounds(100, 100, 400, 400);\r\n mBitmap1.draw(canvas);\r\n\r\n mVector1.setBounds(100, 100, 400, 400);\r\n mVector1.draw(canvas);\r\n }\r\n }\r\n}" }, { "alpha_fraction": 0.6386363506317139, "alphanum_fraction": 0.6731534004211426, "avg_line_length": 40.92683029174805, "blob_id": "e87276022c1294f5264f7ddc546920f4acadefce", "content_id": "3626357f04aeee45603204b1b1cbdaa812175733", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 7040, "license_type": "permissive", "max_line_length": 99, "num_lines": 164, "path": "/services/tests/servicestests/src/com/android/server/Vector3Test.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server;\r\n\r\nimport android.test.AndroidTestCase;\r\n\r\nimport java.lang.Exception;\r\nimport java.lang.Math;\r\n\r\n/**\r\n * Tests for {@link com.android.server.AnyMotionDetector.Vector3}\r\n */\r\npublic class Vector3Test extends AndroidTestCase {\r\n private static final float tolerance = 1.0f / (1 << 12);\r\n private static final float STATIONARY_ANGLE_THRESHOLD = 0.05f;\r\n\r\n private AnyMotionDetector.Vector3 unitXAxis;\r\n private AnyMotionDetector.Vector3 unitYAxis;\r\n private AnyMotionDetector.Vector3 unitZAxis;\r\n private AnyMotionDetector.Vector3 x3;\r\n private AnyMotionDetector.Vector3 case1A;\r\n private AnyMotionDetector.Vector3 case1B;\r\n private AnyMotionDetector.Vector3 case2A;\r\n private AnyMotionDetector.Vector3 case2B;\r\n private AnyMotionDetector.Vector3 x1y1;\r\n private AnyMotionDetector.Vector3 xn1y1;\r\n private AnyMotionDetector.Vector3 x1z1;\r\n private AnyMotionDetector.Vector3 y1z1;\r\n private AnyMotionDetector.Vector3 piOverSixUnitCircle;\r\n\r\n\r\n private boolean nearlyEqual(float a, float b) {\r\n return Math.abs(a - b) <= tolerance;\r\n }\r\n\r\n public void setUp() throws Exception {\r\n super.setUp();\r\n unitXAxis = new AnyMotionDetector.Vector3(0, 1, 0, 0);\r\n unitYAxis = new AnyMotionDetector.Vector3(0, 0, 1, 0);\r\n unitZAxis = new AnyMotionDetector.Vector3(0, 0, 0, 1);\r\n x3 = new AnyMotionDetector.Vector3(0, 3, 0, 0);\r\n x1y1 = new AnyMotionDetector.Vector3(0, 1, 1, 0);\r\n xn1y1 = new AnyMotionDetector.Vector3(0, -1, 1, 0);\r\n x1z1 = new AnyMotionDetector.Vector3(0, 1, 0, 1);\r\n y1z1 = new AnyMotionDetector.Vector3(0, 0, 1, 1);\r\n piOverSixUnitCircle = new AnyMotionDetector.Vector3(\r\n 0, (float)Math.sqrt(3)/2, (float)0.5, 0);\r\n\r\n case1A = new AnyMotionDetector.Vector3(0, -9.81f, -0.02f, 0.3f);\r\n case1B = new AnyMotionDetector.Vector3(0, -9.80f, -0.02f, 0.3f);\r\n case2A = new AnyMotionDetector.Vector3(0, 1f, 2f, 3f);\r\n case2B = new AnyMotionDetector.Vector3(0, 4f, 5f, 6f);\r\n }\r\n\r\n public void testVector3Norm() {\r\n assertTrue(nearlyEqual(unitXAxis.norm(), 1.0f));\r\n assertTrue(nearlyEqual(unitYAxis.norm(), 1.0f));\r\n assertTrue(nearlyEqual(unitZAxis.norm(), 1.0f));\r\n assertTrue(nearlyEqual(x1y1.norm(), (float)Math.sqrt(2)));\r\n }\r\n\r\n public void testVector3AngleBetween() {\r\n // Zero angle.\r\n assertTrue(nearlyEqual(unitXAxis.angleBetween(unitXAxis), 0.0f));\r\n assertTrue(nearlyEqual(unitYAxis.angleBetween(unitYAxis), 0.0f));\r\n assertTrue(nearlyEqual(unitZAxis.angleBetween(unitZAxis), 0.0f));\r\n\r\n // Unit axes should be perpendicular.\r\n assertTrue(nearlyEqual(unitXAxis.angleBetween(unitYAxis), 90.0f));\r\n assertTrue(nearlyEqual(unitXAxis.angleBetween(unitZAxis), 90.0f));\r\n assertTrue(nearlyEqual(unitYAxis.angleBetween(unitZAxis), 90.0f));\r\n\r\n // 45 degree angles.\r\n assertTrue(nearlyEqual(unitXAxis.angleBetween(x1y1), 45.0f));\r\n assertTrue(nearlyEqual(unitYAxis.angleBetween(x1y1), 45.0f));\r\n\r\n // 135 degree angles.\r\n assertTrue(nearlyEqual(xn1y1.angleBetween(unitXAxis), 135.0f));\r\n\r\n // 30 degree angles.\r\n assertTrue(nearlyEqual(piOverSixUnitCircle.angleBetween(unitXAxis), 30.0f));\r\n\r\n // These vectors are expected to be still.\r\n assertTrue(case1A.angleBetween(case1A) < STATIONARY_ANGLE_THRESHOLD);\r\n assertTrue(case1A.angleBetween(case1B) < STATIONARY_ANGLE_THRESHOLD);\r\n assertTrue(unitXAxis.angleBetween(unitXAxis) < STATIONARY_ANGLE_THRESHOLD);\r\n assertTrue(unitYAxis.angleBetween(unitYAxis) < STATIONARY_ANGLE_THRESHOLD);\r\n assertTrue(unitZAxis.angleBetween(unitZAxis) < STATIONARY_ANGLE_THRESHOLD);\r\n }\r\n\r\n public void testVector3Normalized() {\r\n AnyMotionDetector.Vector3 unitXAxisNormalized = unitXAxis.normalized();\r\n assertTrue(nearlyEqual(unitXAxisNormalized.x, unitXAxis.x));\r\n assertTrue(nearlyEqual(unitXAxisNormalized.y, unitXAxis.y));\r\n assertTrue(nearlyEqual(unitXAxisNormalized.z, unitXAxis.z));\r\n\r\n // Normalizing the vector created by multiplying the unit vector by 3 gets the unit vector.\r\n AnyMotionDetector.Vector3 x3Normalized = x3.normalized();\r\n assertTrue(nearlyEqual(x3Normalized.x, unitXAxis.x));\r\n assertTrue(nearlyEqual(x3Normalized.y, unitXAxis.y));\r\n assertTrue(nearlyEqual(x3Normalized.z, unitXAxis.z));\r\n }\r\n\r\n public void testVector3Cross() {\r\n AnyMotionDetector.Vector3 xCrossX = unitXAxis.cross(unitXAxis);\r\n assertTrue(nearlyEqual(xCrossX.x, 0f));\r\n assertTrue(nearlyEqual(xCrossX.y, 0f));\r\n assertTrue(nearlyEqual(xCrossX.z, 0f));\r\n\r\n AnyMotionDetector.Vector3 xCrossNx = unitXAxis.cross(unitXAxis.times(-1));\r\n assertTrue(nearlyEqual(xCrossNx.x, 0f));\r\n assertTrue(nearlyEqual(xCrossNx.y, 0f));\r\n assertTrue(nearlyEqual(xCrossNx.z, 0f));\r\n\r\n AnyMotionDetector.Vector3 cross2 = case2A.cross(case2B);\r\n assertTrue(nearlyEqual(cross2.x, -3));\r\n assertTrue(nearlyEqual(cross2.y, 6));\r\n assertTrue(nearlyEqual(cross2.z, -3));\r\n }\r\n\r\n public void testVector3Times() {\r\n AnyMotionDetector.Vector3 yTimes2 = unitYAxis.times(2);\r\n assertTrue(nearlyEqual(yTimes2.x, 0f));\r\n assertTrue(nearlyEqual(yTimes2.y, 2f));\r\n assertTrue(nearlyEqual(yTimes2.z, 0f));\r\n }\r\n\r\n public void testVector3Plus() {\r\n AnyMotionDetector.Vector3 xPlusY = unitXAxis.plus(unitYAxis);\r\n assertTrue(nearlyEqual(xPlusY.x, 1f));\r\n assertTrue(nearlyEqual(xPlusY.y, 1f));\r\n assertTrue(nearlyEqual(xPlusY.z, 0f));\r\n }\r\n\r\n public void testVector3Minus() {\r\n AnyMotionDetector.Vector3 xMinusY = unitXAxis.minus(unitYAxis);\r\n assertTrue(nearlyEqual(xMinusY.x, 1f));\r\n assertTrue(nearlyEqual(xMinusY.y, -1f));\r\n assertTrue(nearlyEqual(xMinusY.z, 0f));\r\n }\r\n\r\n public void testVector3DotProduct() {\r\n float xDotX = unitXAxis.dotProduct(unitXAxis);\r\n float xDotY = unitXAxis.dotProduct(unitYAxis);\r\n float xDotZ = unitXAxis.dotProduct(unitZAxis);\r\n assertTrue(nearlyEqual(xDotX, 1f));\r\n assertTrue(nearlyEqual(xDotY, 0f));\r\n assertTrue(nearlyEqual(xDotZ, 0f));\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6377358436584473, "alphanum_fraction": 0.6431266665458679, "avg_line_length": 34.73267364501953, "blob_id": "9f7681722f708f49c49b6d8107e88300baba82e5", "content_id": "ddd62b9c3466b2e15ac17bf9022e8e9ad2899e67", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3710, "license_type": "permissive", "max_line_length": 98, "num_lines": 101, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/TextGammaActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.PorterDuff;\r\nimport android.graphics.drawable.ColorDrawable;\r\nimport android.os.Bundle;\r\nimport android.view.LayoutInflater;\r\nimport android.widget.ImageView;\r\nimport android.widget.LinearLayout;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class TextGammaActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n final LinearLayout layout = new LinearLayout(this);\r\n layout.setOrientation(LinearLayout.VERTICAL);\r\n\r\n final GammaTextView gamma = new GammaTextView(this);\r\n layout.addView(gamma, new LinearLayout.LayoutParams(\r\n LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT\r\n ));\r\n\r\n setContentView(layout);\r\n\r\n layout.post(new Runnable() {\r\n public void run() {\r\n Bitmap b = Bitmap.createBitmap(gamma.getWidth(), gamma.getHeight(),\r\n Bitmap.Config.ARGB_8888);\r\n Canvas c = new Canvas(b);\r\n c.drawColor(0, PorterDuff.Mode.CLEAR);\r\n gamma.draw(c);\r\n\r\n ImageView image = new ImageView(TextGammaActivity.this);\r\n image.setImageBitmap(b);\r\n\r\n layout.addView(image, new LinearLayout.LayoutParams(\r\n LinearLayout.LayoutParams.WRAP_CONTENT,\r\n LinearLayout.LayoutParams.WRAP_CONTENT\r\n ));\r\n\r\n startActivity(new Intent(TextGammaActivity.this, SubGammaActivity.class));\r\n }\r\n });\r\n\r\n getWindow().setBackgroundDrawable(new ColorDrawable(0xffffffff));\r\n }\r\n\r\n static class GammaTextView extends LinearLayout {\r\n GammaTextView(Context c) {\r\n super(c);\r\n\r\n setBackgroundColor(0xffffffff);\r\n\r\n final LayoutInflater inflater = LayoutInflater.from(c);\r\n inflater.inflate(R.layout.text_large, this, true);\r\n inflater.inflate(R.layout.text_medium, this, true);\r\n inflater.inflate(R.layout.text_small, this, true);\r\n }\r\n }\r\n\r\n public static class SubGammaActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n final LinearLayout layout = new LinearLayout(this);\r\n layout.setOrientation(LinearLayout.VERTICAL);\r\n\r\n final GammaTextView gamma = new GammaTextView(this);\r\n final LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(\r\n LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT\r\n );\r\n lp.setMargins(0, 74, 0, 0);\r\n layout.addView(gamma, lp);\r\n\r\n setContentView(layout);\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.610859751701355, "alphanum_fraction": 0.6244344115257263, "avg_line_length": 15, "blob_id": "bef2c15bcc612a681a29020da9750a7c4bbf1497", "content_id": "cc54187d3523b664fddef1a8f2f81e1274f4c830", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 221, "license_type": "permissive", "max_line_length": 78, "num_lines": 13, "path": "/tools/lock_agent/start_with_lockagent.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\n\r\nAGENT_OPTIONS=\r\nif [[ \"$1\" == --agent-options ]] ; then\r\n shift\r\n AGENT_OPTIONS=\"=$1\"\r\n shift\r\nfi\r\n\r\nAPP=$1\r\nshift\r\n\r\n$APP -Xplugin:libopenjdkjvmti.so \"-agentpath:liblockagent.so$AGENT_OPTIONS\" $@\r\n" }, { "alpha_fraction": 0.7075076103210449, "alphanum_fraction": 0.7219731211662292, "avg_line_length": 42.31410217285156, "blob_id": "b1cb968df5001bfd93bde583e0bf7868d4dafdf7", "content_id": "c3eb6a5d147f64484f643cf8977b86e2d01d85a1", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6913, "license_type": "permissive", "max_line_length": 98, "num_lines": 156, "path": "/packages/Tethering/tests/unit/src/com/android/networkstack/tethering/IPv6TetheringCoordinatorTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.networkstack.tethering;\r\n\r\nimport static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;\r\nimport static android.net.RouteInfo.RTN_UNICAST;\r\nimport static android.net.ip.IpServer.STATE_LOCAL_ONLY;\r\nimport static android.net.ip.IpServer.STATE_TETHERED;\r\n\r\nimport static org.junit.Assert.assertEquals;\r\nimport static org.junit.Assert.assertFalse;\r\nimport static org.junit.Assert.assertNotEquals;\r\nimport static org.junit.Assert.assertNotNull;\r\nimport static org.mockito.ArgumentMatchers.anyString;\r\nimport static org.mockito.ArgumentMatchers.eq;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.reset;\r\nimport static org.mockito.Mockito.verify;\r\nimport static org.mockito.Mockito.verifyNoMoreInteractions;\r\nimport static org.mockito.Mockito.when;\r\n\r\nimport android.net.InetAddresses;\r\nimport android.net.IpPrefix;\r\nimport android.net.LinkAddress;\r\nimport android.net.LinkProperties;\r\nimport android.net.Network;\r\nimport android.net.NetworkCapabilities;\r\nimport android.net.RouteInfo;\r\nimport android.net.ip.IpServer;\r\nimport android.net.util.SharedLog;\r\n\r\nimport androidx.test.filters.SmallTest;\r\nimport androidx.test.runner.AndroidJUnit4;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.ArgumentCaptor;\r\nimport org.mockito.Mock;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\nimport java.net.InetAddress;\r\nimport java.util.ArrayList;\r\nimport java.util.List;\r\n\r\n@RunWith(AndroidJUnit4.class)\r\n@SmallTest\r\npublic class IPv6TetheringCoordinatorTest {\r\n private static final String TEST_DNS_SERVER = \"2001:4860:4860::8888\";\r\n private static final String TEST_INTERFACE = \"test_rmnet0\";\r\n private static final String TEST_IPV6_ADDRESS = \"2001:db8::1/64\";\r\n private static final String TEST_IPV4_ADDRESS = \"192.168.100.1/24\";\r\n\r\n private IPv6TetheringCoordinator mIPv6TetheringCoordinator;\r\n private ArrayList<IpServer> mNotifyList;\r\n\r\n @Mock private SharedLog mSharedLog;\r\n\r\n @Before\r\n public void setUp() throws Exception {\r\n MockitoAnnotations.initMocks(this);\r\n when(mSharedLog.forSubComponent(anyString())).thenReturn(mSharedLog);\r\n mNotifyList = new ArrayList<IpServer>();\r\n mIPv6TetheringCoordinator = new IPv6TetheringCoordinator(mNotifyList, mSharedLog);\r\n }\r\n\r\n private UpstreamNetworkState createDualStackUpstream(final int transportType) {\r\n final Network network = mock(Network.class);\r\n final NetworkCapabilities netCap =\r\n new NetworkCapabilities.Builder().addTransportType(transportType).build();\r\n final InetAddress dns = InetAddresses.parseNumericAddress(TEST_DNS_SERVER);\r\n final LinkProperties linkProp = new LinkProperties();\r\n linkProp.setInterfaceName(TEST_INTERFACE);\r\n linkProp.addLinkAddress(new LinkAddress(TEST_IPV6_ADDRESS));\r\n linkProp.addLinkAddress(new LinkAddress(TEST_IPV4_ADDRESS));\r\n linkProp.addRoute(new RouteInfo(new IpPrefix(\"::/0\"), null, TEST_INTERFACE, RTN_UNICAST));\r\n linkProp.addRoute(new RouteInfo(new IpPrefix(\"0.0.0.0/0\"), null, TEST_INTERFACE,\r\n RTN_UNICAST));\r\n linkProp.addDnsServer(dns);\r\n return new UpstreamNetworkState(linkProp, netCap, network);\r\n }\r\n\r\n private void assertOnlyOneV6AddressAndNoV4(LinkProperties lp) {\r\n assertEquals(lp.getInterfaceName(), TEST_INTERFACE);\r\n assertFalse(lp.hasIpv4Address());\r\n final List<LinkAddress> addresses = lp.getLinkAddresses();\r\n assertEquals(addresses.size(), 1);\r\n final LinkAddress v6Address = addresses.get(0);\r\n assertEquals(v6Address, new LinkAddress(TEST_IPV6_ADDRESS));\r\n }\r\n\r\n @Test\r\n public void testUpdateIpv6Upstream() throws Exception {\r\n // 1. Add first IpServer.\r\n final IpServer firstServer = mock(IpServer.class);\r\n mNotifyList.add(firstServer);\r\n mIPv6TetheringCoordinator.addActiveDownstream(firstServer, STATE_TETHERED);\r\n verify(firstServer).sendMessage(IpServer.CMD_IPV6_TETHER_UPDATE, 0, 0, null);\r\n verifyNoMoreInteractions(firstServer);\r\n\r\n // 2. Add second IpServer and it would not have ipv6 tethering.\r\n final IpServer secondServer = mock(IpServer.class);\r\n mNotifyList.add(secondServer);\r\n mIPv6TetheringCoordinator.addActiveDownstream(secondServer, STATE_LOCAL_ONLY);\r\n verifyNoMoreInteractions(secondServer);\r\n reset(firstServer, secondServer);\r\n\r\n // 3. No upstream.\r\n mIPv6TetheringCoordinator.updateUpstreamNetworkState(null);\r\n verify(secondServer).sendMessage(IpServer.CMD_IPV6_TETHER_UPDATE, 0, 0, null);\r\n reset(firstServer, secondServer);\r\n\r\n // 4. Update ipv6 mobile upstream.\r\n final UpstreamNetworkState mobileUpstream = createDualStackUpstream(TRANSPORT_CELLULAR);\r\n final ArgumentCaptor<LinkProperties> lp = ArgumentCaptor.forClass(LinkProperties.class);\r\n mIPv6TetheringCoordinator.updateUpstreamNetworkState(mobileUpstream);\r\n verify(firstServer).sendMessage(eq(IpServer.CMD_IPV6_TETHER_UPDATE), eq(-1), eq(0),\r\n lp.capture());\r\n final LinkProperties v6OnlyLink = lp.getValue();\r\n assertOnlyOneV6AddressAndNoV4(v6OnlyLink);\r\n verifyNoMoreInteractions(firstServer);\r\n verifyNoMoreInteractions(secondServer);\r\n reset(firstServer, secondServer);\r\n\r\n // 5. Remove first IpServer.\r\n mNotifyList.remove(firstServer);\r\n mIPv6TetheringCoordinator.removeActiveDownstream(firstServer);\r\n verify(firstServer).sendMessage(IpServer.CMD_IPV6_TETHER_UPDATE, 0, 0, null);\r\n verify(secondServer).sendMessage(eq(IpServer.CMD_IPV6_TETHER_UPDATE), eq(-1), eq(0),\r\n lp.capture());\r\n final LinkProperties localOnlyLink = lp.getValue();\r\n assertNotNull(localOnlyLink);\r\n assertNotEquals(localOnlyLink, v6OnlyLink);\r\n reset(firstServer, secondServer);\r\n\r\n // 6. Remove second IpServer.\r\n mNotifyList.remove(secondServer);\r\n mIPv6TetheringCoordinator.removeActiveDownstream(secondServer);\r\n verifyNoMoreInteractions(firstServer);\r\n verify(secondServer).sendMessage(IpServer.CMD_IPV6_TETHER_UPDATE, 0, 0, null);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6667505502700806, "alphanum_fraction": 0.6700226664543152, "avg_line_length": 33.47321319580078, "blob_id": "216d3c852de89b72906e5d6fb07647d3017acccb", "content_id": "19b161c360ef358536c19bc9fb7fef377fa20981", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3973, "license_type": "permissive", "max_line_length": 110, "num_lines": 112, "path": "/test-runner/tests/src/android/test/TestCaseUtilTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2007 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.test;\r\n\r\nimport java.util.ArrayList;\r\nimport java.util.HashSet;\r\nimport junit.framework.Test;\r\nimport junit.framework.TestCase;\r\nimport junit.framework.TestSuite;\r\n\r\nimport java.util.List;\r\n\r\npublic class TestCaseUtilTest extends TestCase {\r\n\r\n @SuppressWarnings(\"unchecked\")\r\n private static List<String> getTestCaseNames(Test test) {\r\n List<Test> tests = (List<Test>) TestCaseUtil.getTests(test, false);\r\n List<String> testCaseNames = new ArrayList<>();\r\n for (Test aTest : tests) {\r\n testCaseNames.add(TestCaseUtil.getTestName(aTest));\r\n }\r\n return testCaseNames;\r\n }\r\n\r\n public void testGetTests_ForTestSuiteWithSuiteMethod() throws Exception {\r\n TestSuite testSuite = new TwoTestsInTestSuite();\r\n\r\n List<String> testCaseNames = getTestCaseNames(testSuite);\r\n\r\n assertEquals(0, testCaseNames.size());\r\n }\r\n \r\n public void testGetTests_ForTestCaseWithSuiteMethod() throws Exception {\r\n TestCase testCase = new OneTestTestCaseWithSuite();\r\n\r\n List<String> testCaseNames = getTestCaseNames(testCase);\r\n\r\n assertEquals(1, testCaseNames.size());\r\n assertTrue(testCaseNames.get(0).endsWith(\"testOne\"));\r\n }\r\n\r\n public void testInvokeSuiteMethodIfPossible_ForTestCase() throws Exception {\r\n Test test = TestCaseUtil.invokeSuiteMethodIfPossible(OneTestTestCase.class, new HashSet<>());\r\n assertNull(test);\r\n }\r\n\r\n public void testInvokeSuiteMethodIfPossible_ForTestSuiteWithSuiteMethod() throws Exception {\r\n Test test = TestCaseUtil.invokeSuiteMethodIfPossible(TwoTestsInTestSuite.class, new HashSet<>());\r\n assertNotNull(test);\r\n assertEquals(2, test.countTestCases());\r\n }\r\n\r\n public void testInvokeSuiteMethodIfPossible_ForTestCaseWithSuiteMethod() throws Exception {\r\n Test test = TestCaseUtil.invokeSuiteMethodIfPossible(OneTestTestCaseWithSuite.class, new HashSet<>());\r\n assertNotNull(test);\r\n assertEquals(1, test.countTestCases());\r\n }\r\n\r\n public void testReturnEmptyStringForTestSuiteWithNoName() throws Exception {\r\n assertEquals(\"\", TestCaseUtil.getTestName(new TestSuite()));\r\n }\r\n\r\n public static class OneTestTestCase extends TestCase {\r\n public void testOne() throws Exception {\r\n }\r\n }\r\n\r\n public static class OneTestTestCaseWithSuite extends TestCase {\r\n public static Test suite() {\r\n TestCase testCase = new OneTestTestCase();\r\n testCase.setName(\"testOne\");\r\n return testCase;\r\n }\r\n\r\n public void testOne() throws Exception {\r\n }\r\n\r\n public void testTwo() throws Exception {\r\n }\r\n }\r\n\r\n public static class OneTestTestSuite {\r\n public static Test suite() {\r\n TestSuite suite = new TestSuite(OneTestTestSuite.class.getName());\r\n suite.addTestSuite(OneTestTestCase.class);\r\n return suite;\r\n }\r\n }\r\n\r\n public static class TwoTestsInTestSuite extends TestSuite {\r\n public static Test suite() {\r\n TestSuite suite = new TestSuite(TwoTestsInTestSuite.class.getName());\r\n suite.addTestSuite(OneTestTestCase.class);\r\n suite.addTest(OneTestTestSuite.suite());\r\n return suite;\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.5262020826339722, "alphanum_fraction": 0.7547271847724915, "avg_line_length": 38.239131927490234, "blob_id": "6d13b788e64d76b4dbf0f9790a3e6ee5820661cb", "content_id": "486ddd4e052c688f750fb243752718c3bf697d1e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1851, "license_type": "permissive", "max_line_length": 82, "num_lines": 46, "path": "/libs/hwui/tests/scripts/prep_volantis.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n\r\n# Copyright (C) 2015 The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nadb root\r\nadb wait-for-device\r\nadb shell stop perfd\r\nadb shell stop thermal-engine\r\n\r\n# cpu possible frequencies\r\n# 204000 229500 255000 280500 306000 331500 357000 382500 408000 433500 459000\r\n# 484500 510000 535500 561000 586500 612000 637500 663000 688500 714000 739500\r\n# 765000 790500 816000 841500 867000 892500 918000 943500 969000 994500 1020000\r\n# 1122000 1224000 1326000 1428000 1530000 1632000 1734000 1836000 1938000\r\n# 2014500 2091000 2193000 2295000 2397000 2499000\r\n\r\nS=1326000\r\necho \"set cpu $cpu to $S hz\";\r\nadb shell \"echo userspace > /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor\"\r\nadb shell \"echo $S > /sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq\"\r\nadb shell \"echo $S > /sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq\"\r\nadb shell \"echo $S > /sys/devices/system/cpu/cpu0/cpufreq/scaling_setspeed\"\r\n\r\n#disable hotplug\r\nadb shell \"echo 0 > /sys/devices/system/cpu/cpuquiet/tegra_cpuquiet/enable\"\r\n\r\n# gbus possible rates\r\n# 72000 108000 180000 252000 324000 396000 468000 540000 612000 648000\r\n# 684000 708000 756000 804000 852000 (kHz)\r\n\r\nS=324000000\r\necho \"set gpu to $S hz\"\r\nadb shell \"echo 1 > /d/clock/override.gbus/state\"\r\nadb shell \"echo $S > /d/clock/override.gbus/rate\"\r\n" }, { "alpha_fraction": 0.764976978302002, "alphanum_fraction": 0.764976978302002, "avg_line_length": 34.16666793823242, "blob_id": "2acce286398d5c43fa3a3a2474e14f9ae055a825", "content_id": "f33da02e9c7003e75503794216803892699a62ae", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 217, "license_type": "permissive", "max_line_length": 81, "num_lines": 6, "path": "/cmds/locksettings/locksettings", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\n# Script to start \"locksettings\" on the device\r\n#\r\nbase=/system\r\nexport CLASSPATH=$base/framework/locksettings.jar\r\nexec app_process $base/bin com.android.commands.locksettings.LockSettingsCmd \"$@\"\r\n" }, { "alpha_fraction": 0.7313929200172424, "alphanum_fraction": 0.7405405640602112, "avg_line_length": 39.465518951416016, "blob_id": "e0abf216acaa7eeb681afee89ccd0a2f5cb70ed2", "content_id": "04ae7e4e12c09403609faaed7ce88951fa45a98a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2405, "license_type": "permissive", "max_line_length": 98, "num_lines": 58, "path": "/tools/aapt2/text/Unicode.h", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#ifndef AAPT_TEXT_UNICODE_H\r\n#define AAPT_TEXT_UNICODE_H\r\n\r\n#include \"androidfw/StringPiece.h\"\r\n\r\nnamespace aapt {\r\nnamespace text {\r\n\r\n// Returns true if the Unicode codepoint has the XID_Start property, meaning it can be used as the\r\n// first character of a programming language identifier.\r\n// http://unicode.org/reports/tr31/#Default_Identifier_Syntax\r\n//\r\n// XID_Start is a Unicode Derived Core Property. It is a variation of the ID_Start\r\n// Derived Core Property, accounting for a few characters that, when normalized, yield valid\r\n// characters in the ID_Start set.\r\nbool IsXidStart(char32_t codepoint);\r\n\r\n// Returns true if the Unicode codepoint has the XID_Continue property, meaning it can be used in\r\n// any position of a programming language identifier, except the first.\r\n// http://unicode.org/reports/tr31/#Default_Identifier_Syntax\r\n//\r\n// XID_Continue is a Unicode Derived Core Property. It is a variation of the ID_Continue\r\n// Derived Core Property, accounting for a few characters that, when normalized, yield valid\r\n// characters in the ID_Continue set.\r\nbool IsXidContinue(char32_t codepoint);\r\n\r\n// Returns true if the Unicode codepoint has the White_Space property.\r\n// http://unicode.org/reports/tr44/#White_Space\r\nbool IsWhitespace(char32_t codepoint);\r\n\r\n// Returns true if the UTF8 string can be used as a Java identifier.\r\n// NOTE: This does not check against the set of reserved Java keywords.\r\nbool IsJavaIdentifier(const android::StringPiece& str);\r\n\r\n// Returns true if the UTF8 string can be used as the entry name of a resource name.\r\n// This is the `entry` part of package:type/entry.\r\nbool IsValidResourceEntryName(const android::StringPiece& str);\r\n\r\n} // namespace text\r\n} // namespace aapt\r\n\r\n#endif // AAPT_TEXT_UNICODE_H\r\n" }, { "alpha_fraction": 0.702473521232605, "alphanum_fraction": 0.7081272006034851, "avg_line_length": 27.52083396911621, "blob_id": "aba1948f4b4ac8a608f4e3d998c04a40510910e2", "content_id": "b66748c54d4f0f86e1cb0491e5c272b7d205292e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1415, "license_type": "permissive", "max_line_length": 75, "num_lines": 48, "path": "/packages/SystemUI/tests/src/com/android/systemui/qs/carrier/CellSignalStateTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.qs.carrier\r\n\r\nimport android.testing.AndroidTestingRunner\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport org.junit.Assert.assertNotSame\r\nimport org.junit.Assert.assertSame\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\n\r\n@RunWith(AndroidTestingRunner::class)\r\n@SmallTest\r\nclass CellSignalStateTest : SysuiTestCase() {\r\n\r\n @Test\r\n fun testChangeVisibility_sameObject() {\r\n val c = CellSignalState()\r\n\r\n val other = c.changeVisibility(c.visible)\r\n\r\n assertSame(c, other)\r\n }\r\n\r\n @Test\r\n fun testChangeVisibility_otherObject() {\r\n val c = CellSignalState()\r\n\r\n val other = c.changeVisibility(!c.visible)\r\n\r\n assertNotSame(c, other)\r\n }\r\n}" }, { "alpha_fraction": 0.5724138021469116, "alphanum_fraction": 0.5836660861968994, "avg_line_length": 29.66666603088379, "blob_id": "28b971c04ea5319bea95b0d433a789687fd028d3", "content_id": "0f6a83486054325486df2aadf3be48b57e5d6f49", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2755, "license_type": "permissive", "max_line_length": 75, "num_lines": 87, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/PathOpsActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Color;\r\nimport android.graphics.Paint;\r\nimport android.graphics.Path;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class PathOpsActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n final PathsView view = new PathsView(this);\r\n setContentView(view);\r\n }\r\n\r\n public static class PathsView extends View {\r\n private final Paint mPaint;\r\n private Path[] mPaths;\r\n private float mSize;\r\n\r\n\r\n public PathsView(Context c) {\r\n super(c);\r\n\r\n mPaint = new Paint();\r\n mPaint.setAntiAlias(true);\r\n mPaint.setStyle(Paint.Style.FILL);\r\n mPaint.setColor(Color.RED);\r\n }\r\n\r\n @Override\r\n protected void onSizeChanged(int w, int h, int oldw, int oldh) {\r\n super.onSizeChanged(w, h, oldw, oldh);\r\n\r\n Path.Op[] ops = Path.Op.values();\r\n mPaths = new Path[ops.length];\r\n\r\n mSize = w / (ops.length * 2.0f);\r\n\r\n Path p1 = new Path();\r\n p1.addRect(0.0f, 0.0f, mSize, mSize, Path.Direction.CW);\r\n\r\n Path p2 = new Path();\r\n p2.addCircle(mSize, mSize, mSize / 2.0f, Path.Direction.CW);\r\n\r\n for (int i = 0; i < ops.length; i++) {\r\n mPaths[i] = new Path();\r\n if (!mPaths[i].op(p1, p2, ops[i])) {\r\n Log.d(\"PathOps\", ops[i].name() + \" failed!\");\r\n }\r\n }\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n\r\n canvas.translate(mSize * 0.2f, getHeight() / 2.0f);\r\n for (Path path : mPaths) {\r\n canvas.drawPath(path, mPaint);\r\n canvas.translate(mSize * 1.8f, 0.0f);\r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6202964782714844, "alphanum_fraction": 0.638540506362915, "avg_line_length": 29.321428298950195, "blob_id": "b2b389377900e160e00554988c0496f84b677455", "content_id": "e3723a7e10bc54436833f11f08b8ec24de2c6955", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1754, "license_type": "permissive", "max_line_length": 75, "num_lines": 56, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/MatrixActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Paint;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class MatrixActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n setContentView(new MatrixView(this));\r\n }\r\n\r\n static class MatrixView extends View {\r\n MatrixView(Context c) {\r\n super(c);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n canvas.drawRGB(255, 255, 255);\r\n\r\n Log.d(\"Matrix\", \"m1=\" + canvas.getMatrix());\r\n\r\n canvas.save();\r\n canvas.translate(10.0f, 10.0f);\r\n Log.d(\"Matrix\", \"m2=\" + canvas.getMatrix());\r\n canvas.translate(20.0f, 20.0f);\r\n Log.d(\"Matrix\", \"m3=\" + canvas.getMatrix());\r\n canvas.restore();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7141312956809998, "alphanum_fraction": 0.7162929177284241, "avg_line_length": 31.0625, "blob_id": "33da79eb7b869272946e86b3febda781627f6de6", "content_id": "79bfcfd5ebd31e9be1cbc4a747c38edf32297130", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3701, "license_type": "permissive", "max_line_length": 100, "num_lines": 112, "path": "/packages/SystemUI/tests/src/com/android/systemui/util/RingerModeLiveDataTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.systemui.util\r\n\r\nimport android.content.BroadcastReceiver\r\nimport android.content.IntentFilter\r\nimport android.os.UserHandle\r\nimport android.testing.AndroidTestingRunner\r\nimport android.testing.TestableLooper\r\nimport androidx.lifecycle.Observer\r\nimport androidx.test.filters.SmallTest\r\nimport com.android.systemui.SysuiTestCase\r\nimport com.android.systemui.broadcast.BroadcastDispatcher\r\nimport org.junit.After\r\nimport org.junit.Assert.assertTrue\r\nimport org.junit.Before\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport org.mockito.ArgumentCaptor\r\nimport org.mockito.Captor\r\nimport org.mockito.Mock\r\nimport org.mockito.Mockito\r\nimport org.mockito.Mockito.verify\r\nimport org.mockito.Mockito.verifyNoMoreInteractions\r\nimport org.mockito.MockitoAnnotations\r\nimport java.util.concurrent.Executor\r\n\r\n@SmallTest\r\n@RunWith(AndroidTestingRunner::class)\r\[email protected](setAsMainLooper = true)\r\nclass RingerModeLiveDataTest : SysuiTestCase() {\r\n\r\n companion object {\r\n private fun <T> capture(argumentCaptor: ArgumentCaptor<T>): T = argumentCaptor.capture()\r\n private fun <T> any(): T = Mockito.any()\r\n private fun <T> eq(value: T): T = Mockito.eq(value) ?: value\r\n private val INTENT = \"INTENT\"\r\n }\r\n\r\n @Mock\r\n private lateinit var broadcastDispatcher: BroadcastDispatcher\r\n @Mock\r\n private lateinit var valueSupplier: () -> Int\r\n @Mock\r\n private lateinit var observer: Observer<Int>\r\n @Captor\r\n private lateinit var broadcastReceiverCaptor: ArgumentCaptor<BroadcastReceiver>\r\n @Captor\r\n private lateinit var intentFilterCaptor: ArgumentCaptor<IntentFilter>\r\n\r\n // Run everything immediately\r\n private val executor = Executor { it.run() }\r\n private lateinit var liveData: RingerModeLiveData\r\n\r\n @Before\r\n fun setUp() {\r\n MockitoAnnotations.initMocks(this)\r\n\r\n liveData = RingerModeLiveData(broadcastDispatcher, executor, INTENT, valueSupplier)\r\n }\r\n\r\n @After\r\n fun tearDown() {\r\n liveData.removeObserver(observer)\r\n }\r\n\r\n @Test\r\n fun testInit_broadcastNotRegistered() {\r\n verifyNoMoreInteractions(broadcastDispatcher)\r\n }\r\n\r\n @Test\r\n fun testOnActive_broadcastRegistered() {\r\n liveData.observeForever(observer)\r\n verify(broadcastDispatcher).registerReceiver(any(), any(), eq(executor), eq(UserHandle.ALL))\r\n }\r\n\r\n @Test\r\n fun testOnActive_intentFilterHasIntent() {\r\n liveData.observeForever(observer)\r\n verify(broadcastDispatcher).registerReceiver(any(), capture(intentFilterCaptor), any(),\r\n any())\r\n assertTrue(intentFilterCaptor.value.hasAction(INTENT))\r\n }\r\n\r\n @Test\r\n fun testOnActive_valueObtained() {\r\n liveData.observeForever(observer)\r\n verify(valueSupplier).invoke()\r\n }\r\n\r\n @Test\r\n fun testOnInactive_broadcastUnregistered() {\r\n liveData.observeForever(observer)\r\n liveData.removeObserver(observer)\r\n verify(broadcastDispatcher).unregisterReceiver(any())\r\n }\r\n}" }, { "alpha_fraction": 0.6643247604370117, "alphanum_fraction": 0.6674473285675049, "avg_line_length": 34.08450698852539, "blob_id": "1d238b6a32e01acab8941acd819c1f5dc3da9e97", "content_id": "75a97768e7a8c8bcb256e0c4bda1b42d7f3f91ed", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2562, "license_type": "permissive", "max_line_length": 98, "num_lines": 71, "path": "/tests/Camera2Tests/SmartCamera/SimpleCamera/tests/src/androidx/media/filterfw/MffTestCase.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage androidx.media.filterfw;\r\n\r\nimport android.os.Handler;\r\nimport android.os.HandlerThread;\r\nimport android.test.AndroidTestCase;\r\n\r\nimport junit.framework.TestCase;\r\n\r\nimport java.util.concurrent.Callable;\r\nimport java.util.concurrent.FutureTask;\r\n\r\n/**\r\n * A {@link TestCase} for testing objects requiring {@link MffContext}. This test case can only be\r\n * used to test the functionality that does not rely on GL support and camera.\r\n */\r\npublic class MffTestCase extends AndroidTestCase {\r\n\r\n private HandlerThread mMffContextHandlerThread;\r\n private MffContext mMffContext;\r\n\r\n @Override\r\n protected void setUp() throws Exception {\r\n super.setUp();\r\n // MffContext needs to be created on a separate thread to allow MFF to post Runnable's.\r\n mMffContextHandlerThread = new HandlerThread(\"MffContextThread\");\r\n mMffContextHandlerThread.start();\r\n Handler handler = new Handler(mMffContextHandlerThread.getLooper());\r\n FutureTask<MffContext> task = new FutureTask<MffContext>(new Callable<MffContext>() {\r\n @Override\r\n public MffContext call() throws Exception {\r\n MffContext.Config config = new MffContext.Config();\r\n config.requireCamera = false;\r\n config.requireOpenGL = false;\r\n config.forceNoGL = true;\r\n return new MffContext(getContext(), config);\r\n }\r\n });\r\n handler.post(task);\r\n // Wait for the context to be created on the handler thread.\r\n mMffContext = task.get();\r\n }\r\n\r\n @Override\r\n protected void tearDown() throws Exception {\r\n mMffContextHandlerThread.getLooper().quit();\r\n mMffContextHandlerThread = null;\r\n mMffContext.release();\r\n mMffContext = null;\r\n super.tearDown();\r\n }\r\n\r\n protected MffContext getMffContext() {\r\n return mMffContext;\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6397935152053833, "alphanum_fraction": 0.6521048545837402, "avg_line_length": 41.289398193359375, "blob_id": "a843a331f0a6e231b7264b6cf0a42868a9a0f27b", "content_id": "fd7d8b6d8fa14349b0ffa319b9d41dbc6db623e6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 15108, "license_type": "permissive", "max_line_length": 100, "num_lines": 349, "path": "/services/tests/uiservicestests/src/com/android/server/notification/ScheduleConditionProviderTest.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "package com.android.server.notification;\r\n\r\nimport static org.junit.Assert.assertEquals;\r\nimport static org.junit.Assert.assertFalse;\r\nimport static org.junit.Assert.assertTrue;\r\nimport static org.mockito.Mockito.mock;\r\nimport static org.mockito.Mockito.spy;\r\n\r\nimport android.app.Application;\r\nimport android.content.Intent;\r\nimport android.net.Uri;\r\nimport android.service.notification.Condition;\r\nimport android.service.notification.ScheduleCalendar;\r\nimport android.service.notification.ZenModeConfig;\r\nimport android.testing.AndroidTestingRunner;\r\nimport android.testing.TestableLooper.RunWithLooper;\r\n\r\nimport androidx.test.filters.SmallTest;\r\n\r\nimport com.android.server.UiServiceTestCase;\r\n\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport org.junit.runner.RunWith;\r\nimport org.mockito.MockitoAnnotations;\r\n\r\nimport java.util.Calendar;\r\nimport java.util.GregorianCalendar;\r\n\r\n@RunWith(AndroidTestingRunner.class)\r\n@SmallTest\r\n@RunWithLooper\r\npublic class ScheduleConditionProviderTest extends UiServiceTestCase {\r\n\r\n ScheduleConditionProvider mService;\r\n\r\n @Before\r\n public void setUp() throws Exception {\r\n MockitoAnnotations.initMocks(this);\r\n\r\n Intent startIntent =\r\n new Intent(\"com.android.server.notification.ScheduleConditionProvider\");\r\n startIntent.setPackage(\"android\");\r\n ScheduleConditionProvider service = new ScheduleConditionProvider();\r\n service.attach(\r\n getContext(),\r\n null, // ActivityThread not actually used in Service\r\n ScheduleConditionProvider.class.getName(),\r\n null, // token not needed when not talking with the activity manager\r\n mock(Application.class),\r\n null // mocked services don't talk with the activity manager\r\n );\r\n service.onCreate();\r\n service.onBind(startIntent);\r\n mService = spy(service);\r\n }\r\n\r\n @Test\r\n public void testIsValidConditionId_incomplete() throws Exception {\r\n Uri badConditionId = Uri.EMPTY;\r\n assertFalse(mService.isValidConditionId(badConditionId));\r\n assertEquals(Condition.STATE_ERROR,\r\n mService.evaluateSubscriptionLocked(badConditionId, null, 0, 1000).state);\r\n }\r\n\r\n @Test\r\n public void testIsValidConditionId() throws Exception {\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {1, 2, 4};\r\n info.startHour = 8;\r\n info.startMinute = 56;\r\n info.nextAlarm = 1000;\r\n info.exitAtAlarm = true;\r\n info.endHour = 12;\r\n info.endMinute = 9;\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n assertTrue(mService.isValidConditionId(conditionId));\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_noAlarmExit_InSchedule() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {Calendar.FRIDAY};\r\n info.startHour = now.get(Calendar.HOUR_OF_DAY);\r\n info.startMinute = now.get(Calendar.MINUTE);\r\n info.nextAlarm = 0;\r\n info.exitAtAlarm = false;\r\n info.endHour = now.get(Calendar.HOUR_OF_DAY) + 1;\r\n info.endMinute = info.startMinute;\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis()));\r\n\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 1000);\r\n\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_noAlarmExit_InScheduleSnoozed() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {Calendar.FRIDAY};\r\n info.startHour = now.get(Calendar.HOUR_OF_DAY);\r\n info.startMinute = now.get(Calendar.MINUTE);\r\n info.nextAlarm = 0;\r\n info.exitAtAlarm = false;\r\n info.endHour = now.get(Calendar.HOUR_OF_DAY) + 1;\r\n info.endMinute = info.startMinute;\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis()));\r\n\r\n mService.addSnoozed(conditionId);\r\n\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 1000);\r\n\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_noAlarmExit_beforeSchedule() {\r\n Calendar now = new GregorianCalendar();\r\n now.set(Calendar.HOUR_OF_DAY, 14);\r\n now.set(Calendar.MINUTE, 15);\r\n now.set(Calendar.SECOND, 59);\r\n now.set(Calendar.MILLISECOND, 0);\r\n now.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);\r\n\r\n // Schedule - 1 hour long; starts in 1 second\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {Calendar.FRIDAY};\r\n info.startHour = now.get(Calendar.HOUR_OF_DAY);\r\n info.startMinute = now.get(Calendar.MINUTE) + 1;\r\n info.nextAlarm = 0;\r\n info.exitAtAlarm = false;\r\n info.endHour = now.get(Calendar.HOUR_OF_DAY) + 1;\r\n info.endMinute = info.startMinute;\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 1000);\r\n\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_noAlarmExit_endSchedule() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; ends now\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {Calendar.FRIDAY};\r\n info.startHour = now.get(Calendar.HOUR_OF_DAY) - 1;\r\n info.startMinute = now.get(Calendar.MINUTE);\r\n info.nextAlarm = 0;\r\n info.exitAtAlarm = false;\r\n info.endHour = now.get(Calendar.HOUR_OF_DAY);\r\n info.endMinute = now.get(Calendar.MINUTE);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 1000);\r\n\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_alarmSetBeforeInSchedule() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now, ends with alarm\r\n ZenModeConfig.ScheduleInfo info = getScheduleEndsInHour(now);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n // an hour before start, update with an alarm that will fire during the schedule\r\n mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() - 1000, now.getTimeInMillis() + 1000);\r\n\r\n // at start, should be in dnd\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 1000);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // at alarm fire time, should exit dnd\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis() + 1000));\r\n assertTrue(\"\" + info.nextAlarm + \" \" + now.getTimeInMillis(),\r\n cal.shouldExitForAlarm(now.getTimeInMillis() + 1000));\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 1000, 0);\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_alarmSetInSchedule() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now, ends with alarm\r\n ZenModeConfig.ScheduleInfo info = getScheduleEndsInHour(now);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n // at start, should be in dnd\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), 0);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // in schedule, update with alarm time, should be in dnd\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 500, now.getTimeInMillis() + 1000);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // at alarm fire time, should exit dnd\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis() + 1000));\r\n assertTrue(\"\" + info.nextAlarm + \" \" + now.getTimeInMillis(),\r\n cal.shouldExitForAlarm(now.getTimeInMillis() + 1000));\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 1000, 0);\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_earlierAlarmSet() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now, ends with alarm\r\n ZenModeConfig.ScheduleInfo info = getScheduleEndsInHour(now);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n // at start, should be in dnd, alarm in 2000 ms\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 2000);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // in schedule, update with earlier alarm time, should be in dnd\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 500, now.getTimeInMillis() + 1000);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // at earliest alarm fire time, should exit dnd\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis() + 1000));\r\n assertTrue(\"\" + info.nextAlarm + \" \" + now.getTimeInMillis(),\r\n cal.shouldExitForAlarm(now.getTimeInMillis() + 1000));\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 1000, 0);\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_laterAlarmSet() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now, ends with alarm\r\n ZenModeConfig.ScheduleInfo info = getScheduleEndsInHour(now);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n // at start, should be in dnd, alarm in 500 ms\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 500);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // in schedule, update with later alarm time, should be in dnd\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 250, now.getTimeInMillis() + 1000);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // at earliest alarm fire time, should exit dnd\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis() + 500));\r\n assertTrue(\"\" + info.nextAlarm + \" \" + now.getTimeInMillis(),\r\n cal.shouldExitForAlarm(now.getTimeInMillis() + 500));\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 500, 0);\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n @Test\r\n public void testEvaluateSubscription_alarmCanceled() {\r\n Calendar now = getNow();\r\n\r\n // Schedule - 1 hour long; starts now, ends with alarm\r\n ZenModeConfig.ScheduleInfo info = getScheduleEndsInHour(now);\r\n Uri conditionId = ZenModeConfig.toScheduleConditionId(info);\r\n ScheduleCalendar cal = new ScheduleCalendar();\r\n cal.setSchedule(info);\r\n\r\n // at start, should be in dnd, alarm in 500 ms\r\n Condition condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis(), now.getTimeInMillis() + 500);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // in schedule, cancel alarm\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 250, 0);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // at previous alarm time, should not exit DND\r\n assertTrue(cal.isInSchedule(now.getTimeInMillis() + 500));\r\n assertFalse(cal.shouldExitForAlarm(now.getTimeInMillis() + 500));\r\n condition = mService.evaluateSubscriptionLocked(\r\n conditionId, cal, now.getTimeInMillis() + 500, 0);\r\n assertEquals(Condition.STATE_TRUE, condition.state);\r\n\r\n // end of schedule, exit DND\r\n now.add(Calendar.HOUR_OF_DAY, 1);\r\n condition = mService.evaluateSubscriptionLocked(conditionId, cal, now.getTimeInMillis(), 0);\r\n assertEquals(Condition.STATE_FALSE, condition.state);\r\n }\r\n\r\n private Calendar getNow() {\r\n Calendar now = new GregorianCalendar();\r\n now.set(Calendar.HOUR_OF_DAY, 14);\r\n now.set(Calendar.MINUTE, 16);\r\n now.set(Calendar.SECOND, 0);\r\n now.set(Calendar.MILLISECOND, 0);\r\n now.set(Calendar.DAY_OF_WEEK, Calendar.FRIDAY);\r\n return now;\r\n }\r\n\r\n private ZenModeConfig.ScheduleInfo getScheduleEndsInHour(Calendar now) {\r\n ZenModeConfig.ScheduleInfo info = new ZenModeConfig.ScheduleInfo();\r\n info.days = new int[] {Calendar.FRIDAY};\r\n info.startHour = now.get(Calendar.HOUR_OF_DAY);\r\n info.startMinute = now.get(Calendar.MINUTE);\r\n info.exitAtAlarm = true;\r\n info.endHour = now.get(Calendar.HOUR_OF_DAY) + 1;\r\n info.endMinute = now.get(Calendar.MINUTE);\r\n return info;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7094339728355408, "alphanum_fraction": 0.7245283126831055, "avg_line_length": 24.5, "blob_id": "f13212ff1b73211834947ea1fe12f89137d79de6", "content_id": "67d9412d84a19867072608de33e3f55734766335", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 265, "license_type": "permissive", "max_line_length": 48, "num_lines": 10, "path": "/cmds/device_config/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Copyright 2018 The Android Open Source Project\r\n#\r\nLOCAL_PATH:= $(call my-dir)\r\n\r\ninclude $(CLEAR_VARS)\r\nLOCAL_MODULE := device_config\r\nLOCAL_SRC_FILES := device_config\r\nLOCAL_MODULE_CLASS := EXECUTABLES\r\nLOCAL_MODULE_TAGS := optional\r\ninclude $(BUILD_PREBUILT)\r\n" }, { "alpha_fraction": 0.6054502129554749, "alphanum_fraction": 0.6338862776756287, "avg_line_length": 21.44444465637207, "blob_id": "b6b0293bf3cbbbf9138aae8f4fccaabc28713f71", "content_id": "5ad364dedf75232fad4f0f6e220e88f9d52c48e8", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 844, "license_type": "permissive", "max_line_length": 110, "num_lines": 36, "path": "/tools/signedconfig/verify_b64.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\n\r\n# Script to verify signatures, with both signature & data given in b64\r\n# Args:\r\n# 1. data (base64 encoded)\r\n# 2. signature (base64 encoded)\r\n# The arg values can be taken from the debug log for SignedConfigService when verbose logging is\r\n# enabled.\r\n\r\nfunction verify() {\r\n D=${1}\r\n S=${2}\r\n K=${3}\r\n echo Trying ${K}\r\n openssl dgst -sha256 -verify $(dirname $0)/${K} -signature <(echo ${S} | base64 -d) <(echo ${D} | base64 -d)\r\n}\r\n\r\n\r\nPROD_KEY_NAME=prod_public.pem\r\nDEBUG_KEY_NAME=debug_public.pem\r\nSIGNATURE=\"$2\"\r\nDATA=\"$1\"\r\n\r\necho DATA: ${DATA}\r\necho SIGNATURE: ${SIGNATURE}\r\n\r\nif verify \"${DATA}\" \"${SIGNATURE}\" \"${PROD_KEY_NAME}\"; then\r\n echo Verified with ${PROD_KEY_NAME}\r\n exit 0\r\nfi\r\n\r\nif verify \"${DATA}\" \"${SIGNATURE}\" \"${DEBUG_KEY_NAME}\"; then\r\n echo Verified with ${DEBUG_KEY_NAME}\r\n exit 0\r\nfi\r\nexit 1\r\n" }, { "alpha_fraction": 0.7837445735931396, "alphanum_fraction": 0.7837445735931396, "avg_line_length": 52.342105865478516, "blob_id": "26950b30bba4768ba58d08f353b997b112068a95", "content_id": "8c34fa218b644b5d2e4f22aa8f028dab460885d7", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 2067, "license_type": "permissive", "max_line_length": 99, "num_lines": 38, "path": "/core/java/android/os/health/package.html", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "<html>\r\n<body>\r\n\r\nThe android.os.health package contains a set of classes to provide data\r\nto track the system resources of applications.\r\n<p>\r\nApplications running in the background are responsible for a significant amount \r\nof battery usage on a typical android device. There are several things that \r\napplications can do in order to reduce their impact. For example, by using \r\n{@link android.app.job.JobScheduler JobScheduler}, an application does not need \r\nto independently monitor whether the network is available, whether the device is \r\nplugged in, etc. In addition to being simpler to use, the application's \r\nservices are only started when the required conditions have been met. But even \r\nwhen using the appropriate helper APIs, applications still can reduce their \r\nfootprint. This package provides more insight into what is going on behind the \r\nscenes when an application is running.\r\n<p>\r\nApplication data is tracked by which user id (uid) is using particular \r\nresources. A snapshot of an application's measurements can be taken with the\r\n{@link android.os.health.SystemHealthManager#takeMyUidSnapshot() SystemHealth.takeMyUidSnapshot()} \r\nmethod. The {@link android.os.health.HealthStats} object returned contains the \r\nstatistics.\r\n<p>\r\n<b>HealthStats</b><br>\r\nIn order to be returned efficiently, the {@link android.os.health.HealthStats} \r\nclass uses a set of int keys to identify the data returned. The\r\n{@link android.os.health.UidHealthStats}, {@link android.os.health.PidHealthStats},\r\n{@link android.os.health.PackageHealthStats} , {@link android.os.health.ProcessHealthStats},\r\nand {@link android.os.health.ServiceHealthStats} classes provide those constants.\r\nEach {@link android.os.health.HealthStats} object will be associated with\r\nexactly one of those classes. The object returned from\r\n{@link android.os.health.SystemHealthManager#takeMyUidSnapshot() SystemHealth.takeMyUidSnapshot()}\r\nwill be using the {@link android.os.health.UidHealthStats} keys, as it contains all\r\nof the data available for that uid.\r\n\r\n\r\n</body>\r\n</html>\r\n\r\n" }, { "alpha_fraction": 0.6877636909484863, "alphanum_fraction": 0.6925858855247498, "avg_line_length": 34.065216064453125, "blob_id": "cdacc0f6d83589f44b7d0bdeb3030892d8af2959", "content_id": "d2f1bb978cffc4db0cfe3ecb2679a785f662c3a3", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1659, "license_type": "permissive", "max_line_length": 100, "num_lines": 46, "path": "/tests/VectorDrawableTest/src/com/android/test/dynamic/VectorDrawableAnimation.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\r\n * in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the License\r\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\r\n * or implied. See the License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\n\r\npackage com.android.test.dynamic;\r\n\r\nimport android.animation.ValueAnimator;\r\nimport android.app.Activity;\r\nimport android.graphics.drawable.AnimationDrawable;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\n\r\npublic class VectorDrawableAnimation extends Activity {\r\n private static final String LOGCAT = \"VectorDrawableAnimation\";\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n final Button button = new Button(this);\r\n button.setBackgroundResource(R.drawable.animation_drawable_vector);\r\n\r\n button.setOnClickListener(new View.OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n AnimationDrawable frameAnimation = (AnimationDrawable) v.getBackground();\r\n // Start the animation (looped playback by default).\r\n frameAnimation.start();\r\n }\r\n });\r\n\r\n setContentView(button);\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6178861856460571, "alphanum_fraction": 0.6412601470947266, "avg_line_length": 27.81818199157715, "blob_id": "d56c9b70e8c0ec895d921f957cfc3999579043b6", "content_id": "f652fd36c26d365b3fa12ae4f4726ebd0a063e04", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1968, "license_type": "permissive", "max_line_length": 84, "num_lines": 66, "path": "/tools/aapt2/filter/AbiFilter_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"filter/AbiFilter.h\"\r\n\r\n#include <string>\r\n\r\n#include \"gtest/gtest.h\"\r\n\r\nnamespace aapt {\r\nnamespace {\r\n\r\nusing ::aapt::configuration::Abi;\r\n\r\nstruct TestData {\r\n std::string path;\r\n bool kept;\r\n};\r\n\r\nconst TestData kTestData[] = {\r\n /* Keep. */\r\n {\"lib/mips/libnative.so\", true},\r\n {\"not/native/file.txt\", true},\r\n // Not sure if this is a valid use case.\r\n {\"lib/listing.txt\", true},\r\n {\"lib/mips/foo/bar/baz.so\", true},\r\n {\"lib/mips/x86/foo.so\", true},\r\n /* Discard. */\r\n {\"lib/mips_horse/foo.so\", false},\r\n {\"lib/horse_mips/foo.so\", false},\r\n {\"lib/mips64/armeabi-v7a/foo.so\", false},\r\n {\"lib/mips64/x86_64/x86.so\", false},\r\n {\"lib/x86/libnative.so\", false},\r\n {\"lib/x86/foo/bar/baz.so\", false},\r\n {\"lib/x86/x86/foo.so\", false},\r\n {\"lib/x86_horse/foo.so\", false},\r\n {\"lib/horse_x86/foo.so\", false},\r\n {\"lib/x86/armeabi-v7a/foo.so\", false},\r\n {\"lib/x86_64/x86_64/x86.so\", false},\r\n};\r\n\r\nclass AbiFilterTest : public ::testing::TestWithParam<TestData> {};\r\n\r\nTEST_P(AbiFilterTest, Keep) {\r\n auto mips = AbiFilter::FromAbiList({Abi::kMips});\r\n const TestData& data = GetParam();\r\n EXPECT_EQ(mips->Keep(data.path), data.kept);\r\n}\r\n\r\nINSTANTIATE_TEST_CASE_P(NativePaths, AbiFilterTest, ::testing::ValuesIn(kTestData));\r\n\r\n} // namespace\r\n} // namespace aapt\r\n" }, { "alpha_fraction": 0.5777451992034912, "alphanum_fraction": 0.5898994207382202, "avg_line_length": 38.1008415222168, "blob_id": "199d8289d12ba5da7f7565131b933a93eba0bf3f", "content_id": "2e9c40a19c97a3a9c13cc240347bbc70309e7ca9", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4772, "license_type": "permissive", "max_line_length": 77, "num_lines": 119, "path": "/tools/aapt2/optimize/VersionCollapser_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\n#include \"optimize/VersionCollapser.h\"\r\n\r\n#include \"test/Test.h\"\r\n\r\nusing android::StringPiece;\r\n\r\nnamespace aapt {\r\n\r\nstatic std::unique_ptr<ResourceTable> BuildTableWithConfigs(\r\n const StringPiece& name, std::initializer_list<std::string> list) {\r\n test::ResourceTableBuilder builder;\r\n for (const std::string& item : list) {\r\n builder.AddSimple(name, test::ParseConfigOrDie(item));\r\n }\r\n return builder.Build();\r\n}\r\n\r\nTEST(VersionCollapserTest, CollapseVersions) {\r\n std::unique_ptr<IAaptContext> context =\r\n test::ContextBuilder().SetMinSdkVersion(7).Build();\r\n\r\n const StringPiece res_name = \"@android:string/foo\";\r\n\r\n std::unique_ptr<ResourceTable> table = BuildTableWithConfigs(\r\n res_name,\r\n {\"land-v4\", \"land-v5\", \"sw600dp\", \"land-v6\", \"land-v14\", \"land-v21\"});\r\n\r\n VersionCollapser collapser;\r\n ASSERT_TRUE(collapser.Consume(context.get(), table.get()));\r\n\r\n // These should be removed.\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v4\")));\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v5\")));\r\n // This one should be removed because it was renamed to 'land', with the\r\n // version dropped.\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v6\")));\r\n\r\n // These should remain.\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"sw600dp\")));\r\n\r\n // 'land' should be present because it was renamed from 'land-v6'.\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land\")));\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v14\")));\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v21\")));\r\n}\r\n\r\nTEST(VersionCollapserTest, CollapseVersionsWhenMinSdkIsHighest) {\r\n std::unique_ptr<IAaptContext> context =\r\n test::ContextBuilder().SetMinSdkVersion(21).Build();\r\n\r\n const StringPiece res_name = \"@android:string/foo\";\r\n\r\n std::unique_ptr<ResourceTable> table = BuildTableWithConfigs(\r\n res_name, {\"land-v4\", \"land-v5\", \"sw600dp\", \"land-v6\", \"land-v14\",\r\n \"land-v21\", \"land-v22\"});\r\n VersionCollapser collapser;\r\n ASSERT_TRUE(collapser.Consume(context.get(), table.get()));\r\n\r\n // These should all be removed.\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v4\")));\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v5\")));\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v6\")));\r\n EXPECT_EQ(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v14\")));\r\n\r\n // These should remain.\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(\r\n table.get(), res_name,\r\n test::ParseConfigOrDie(\"sw600dp\").CopyWithoutSdkVersion()));\r\n\r\n // land-v21 should have been converted to land.\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land\")));\r\n // land-v22 should remain as-is.\r\n EXPECT_NE(nullptr,\r\n test::GetValueForConfig<Id>(table.get(), res_name,\r\n test::ParseConfigOrDie(\"land-v22\")));\r\n}\r\n\r\n} // namespace aapt\r\n" }, { "alpha_fraction": 0.8004273772239685, "alphanum_fraction": 0.8042734861373901, "avg_line_length": 32.92537307739258, "blob_id": "a4c7719caf3d7751d0539843161f43305631bc39", "content_id": "7a1d9199f4f1e6d98a0d6d69ca89593738f7e57b", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 2340, "license_type": "permissive", "max_line_length": 74, "num_lines": 67, "path": "/packages/overlays/Android.mk", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 The Android Open Source Project\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nLOCAL_PATH:= $(call my-dir)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_MODULE := frameworks-base-overlays\r\nLOCAL_REQUIRED_MODULES := \\\r\n\tAccentColorBlackOverlay \\\r\n\tAccentColorCinnamonOverlay \\\r\n\tAccentColorOceanOverlay \\\r\n\tAccentColorOrchidOverlay \\\r\n\tAccentColorSpaceOverlay \\\r\n\tAccentColorGreenOverlay \\\r\n\tAccentColorPurpleOverlay \\\r\n\tDisplayCutoutEmulationCornerOverlay \\\r\n\tDisplayCutoutEmulationDoubleOverlay \\\r\n DisplayCutoutEmulationHoleOverlay \\\r\n\tDisplayCutoutEmulationTallOverlay \\\r\n\tDisplayCutoutEmulationWaterfallOverlay \\\r\n\tFontNotoSerifSourceOverlay \\\r\n\tIconPackCircularAndroidOverlay \\\r\n\tIconPackCircularLauncherOverlay \\\r\n\tIconPackCircularSettingsOverlay \\\r\n\tIconPackCircularSystemUIOverlay \\\r\n\tIconPackCircularThemePickerOverlay \\\r\n\tIconPackFilledAndroidOverlay \\\r\n\tIconPackFilledLauncherOverlay \\\r\n\tIconPackFilledSettingsOverlay \\\r\n\tIconPackFilledSystemUIOverlay \\\r\n\tIconPackFilledThemePickerOverlay \\\r\n\tIconPackRoundedAndroidOverlay \\\r\n\tIconPackRoundedLauncherOverlay \\\r\n\tIconPackRoundedSettingsOverlay \\\r\n\tIconPackRoundedSystemUIOverlay \\\r\n\tIconPackRoundedThemePickerOverlay \\\r\n\tIconShapePebbleOverlay \\\r\n\tIconShapeRoundedRectOverlay \\\r\n\tIconShapeSquircleOverlay \\\r\n\tIconShapeTaperedRectOverlay \\\r\n\tIconShapeTeardropOverlay \\\r\n\tIconShapeVesselOverlay \\\r\n\tNavigationBarMode3ButtonOverlay \\\r\n\tNavigationBarModeGesturalOverlay \\\r\n\tNavigationBarModeGesturalOverlayNarrowBack \\\r\n\tNavigationBarModeGesturalOverlayWideBack \\\r\n\tNavigationBarModeGesturalOverlayExtraWideBack \\\r\n\tpreinstalled-packages-platform-overlays.xml\r\n\r\ninclude $(BUILD_PHONY_PACKAGE)\r\ninclude $(CLEAR_VARS)\r\n\r\nLOCAL_MODULE := frameworks-base-overlays-debug\r\n\r\ninclude $(BUILD_PHONY_PACKAGE)\r\ninclude $(call first-makefiles-under,$(LOCAL_PATH))\r\n" }, { "alpha_fraction": 0.6538249850273132, "alphanum_fraction": 0.6582278609275818, "avg_line_length": 27.721311569213867, "blob_id": "58209f82a5d3ed03bd8af6197bebb9266de1abcc", "content_id": "7122c570a125defca34781be3ff594c043909766", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1817, "license_type": "permissive", "max_line_length": 75, "num_lines": 61, "path": "/packages/WAPPushManager/tests/src/com/android/smspush/unitTests/ReceiverService.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.smspush.unitTests;\r\n\r\nimport android.app.Service;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.os.IBinder;\r\nimport android.util.Log;\r\n\r\nimport com.android.internal.util.HexDump;\r\n\r\n/**\r\n * Service type receiver application\r\n */\r\npublic class ReceiverService extends Service {\r\n private static final String LOG_TAG = \"WAP PUSH\";\r\n\r\n @Override\r\n public void onCreate() {\r\n super.onCreate();\r\n Log.d(LOG_TAG, \"Receiver service created\");\r\n }\r\n\r\n @Override\r\n public IBinder onBind(Intent intent) {\r\n return null;\r\n }\r\n\r\n @Override\r\n public int onStartCommand(Intent intent, int flags, int startId) {\r\n Log.d(LOG_TAG, \"Receiver service started\");\r\n\r\n byte[] body;\r\n byte[] header;\r\n body = intent.getByteArrayExtra(\"data\");\r\n header = intent.getByteArrayExtra(\"header\");\r\n\r\n Log.d(LOG_TAG, \"header:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(header));\r\n Log.d(LOG_TAG, \"body:\");\r\n Log.d(LOG_TAG, HexDump.dumpHexString(body));\r\n\r\n DataVerify.SetLastReceivedPdu(body);\r\n return START_STICKY;\r\n }\r\n}\r\n\r\n\r\n" }, { "alpha_fraction": 0.6119629144668579, "alphanum_fraction": 0.6344879865646362, "avg_line_length": 36.28260803222656, "blob_id": "81502902f549818246adf222fba6a936b972be28", "content_id": "bf5304c51dbcd7cf5ead9cccd21ea168be07d5e6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5283, "license_type": "permissive", "max_line_length": 96, "num_lines": 138, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/AdvancedBlendActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2010 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Bitmap;\r\nimport android.graphics.BitmapFactory;\r\nimport android.graphics.BitmapShader;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Color;\r\nimport android.graphics.ComposeShader;\r\nimport android.graphics.LinearGradient;\r\nimport android.graphics.Matrix;\r\nimport android.graphics.Paint;\r\nimport android.graphics.PorterDuff;\r\nimport android.graphics.Shader;\r\nimport android.os.Bundle;\r\nimport android.view.View;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class AdvancedBlendActivity extends Activity {\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n setContentView(new ShadersView(this));\r\n }\r\n\r\n public static class ShadersView extends View {\r\n private BitmapShader mScaledShader;\r\n private int mTexWidth;\r\n private int mTexHeight;\r\n private Paint mPaint;\r\n private float mDrawWidth;\r\n private float mDrawHeight;\r\n private LinearGradient mHorGradient;\r\n private ComposeShader mComposeShader;\r\n private ComposeShader mCompose2Shader;\r\n private ComposeShader mCompose3Shader;\r\n private ComposeShader mCompose4Shader;\r\n private ComposeShader mCompose5Shader;\r\n private ComposeShader mCompose6Shader;\r\n private BitmapShader mScaled2Shader;\r\n\r\n public ShadersView(Context c) {\r\n super(c);\r\n\r\n Bitmap texture = BitmapFactory.decodeResource(c.getResources(), R.drawable.sunset1);\r\n mTexWidth = texture.getWidth();\r\n mTexHeight = texture.getHeight();\r\n mDrawWidth = mTexWidth * 2.2f;\r\n mDrawHeight = mTexHeight * 1.2f;\r\n\r\n mScaledShader = new BitmapShader(texture, Shader.TileMode.MIRROR,\r\n Shader.TileMode.MIRROR);\r\n Matrix m2 = new Matrix();\r\n m2.setScale(0.5f, 0.5f);\r\n mScaledShader.setLocalMatrix(m2);\r\n\r\n mScaled2Shader = new BitmapShader(texture, Shader.TileMode.MIRROR,\r\n Shader.TileMode.MIRROR);\r\n Matrix m3 = new Matrix();\r\n m3.setScale(0.1f, 0.1f);\r\n mScaled2Shader.setLocalMatrix(m3);\r\n\r\n mHorGradient = new LinearGradient(0.0f, 0.0f, mDrawWidth, 0.0f,\r\n Color.BLACK, Color.WHITE, Shader.TileMode.CLAMP);\r\n\r\n mComposeShader = new ComposeShader(mScaledShader, mHorGradient,\r\n PorterDuff.Mode.DARKEN);\r\n mCompose2Shader = new ComposeShader(mScaledShader, mHorGradient,\r\n PorterDuff.Mode.LIGHTEN);\r\n mCompose3Shader = new ComposeShader(mScaledShader, mHorGradient,\r\n PorterDuff.Mode.MULTIPLY);\r\n mCompose4Shader = new ComposeShader(mScaledShader, mHorGradient,\r\n PorterDuff.Mode.SCREEN);\r\n mCompose5Shader = new ComposeShader(mScaledShader, mHorGradient,\r\n PorterDuff.Mode.ADD);\r\n mCompose6Shader = new ComposeShader(mHorGradient, mScaledShader,\r\n PorterDuff.Mode.OVERLAY);\r\n\r\n mPaint = new Paint();\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n super.onDraw(canvas);\r\n canvas.drawRGB(255, 255, 255);\r\n\r\n canvas.save();\r\n canvas.translate(40.0f, 40.0f);\r\n\r\n mPaint.setShader(mComposeShader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.translate(0.0f, 40.0f + mDrawHeight);\r\n mPaint.setShader(mCompose2Shader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.translate(0.0f, 40.0f + mDrawHeight);\r\n mPaint.setShader(mCompose3Shader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.restore();\r\n\r\n canvas.save();\r\n canvas.translate(40.0f + mDrawWidth + 40.0f, 40.0f);\r\n\r\n mPaint.setShader(mCompose4Shader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.translate(0.0f, 40.0f + mDrawHeight);\r\n mPaint.setShader(mCompose5Shader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.translate(0.0f, 40.0f + mDrawHeight);\r\n mPaint.setShader(mCompose6Shader);\r\n canvas.drawRect(0.0f, 0.0f, mDrawWidth, mDrawHeight, mPaint);\r\n\r\n canvas.restore();\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7783505320549011, "alphanum_fraction": 0.7783505320549011, "avg_line_length": 41.11111068725586, "blob_id": "96f70e99454f91bc0d276a2952101849a90e3223", "content_id": "9f337c7584aa9c25ea76bb9fd2e46fb059c8525d", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 388, "license_type": "permissive", "max_line_length": 74, "num_lines": 9, "path": "/cmds/hid/hid", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\n\r\n# Preload the native portion libhidcommand_jni.so to bypass the dependency\r\n# checks in the Java classloader, which prohibit dependencies that aren't\r\n# listed in system/core/rootdir/etc/public.libraries.android.txt.\r\nexport LD_PRELOAD=libhidcommand_jni.so\r\n\r\nexport CLASSPATH=/system/framework/hid.jar\r\nexec app_process /system/bin com.android.commands.hid.Hid \"$@\"\r\n" }, { "alpha_fraction": 0.6137331128120422, "alphanum_fraction": 0.6235424876213074, "avg_line_length": 35.26206970214844, "blob_id": "8c0693b6ab89e3e8557a39b0b678abcedf86e6ed", "content_id": "7702b5c5335bf009a1887a4b5739e3d8c1899470", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 5403, "license_type": "permissive", "max_line_length": 94, "num_lines": 145, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/ViewPropertyAlphaActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport android.animation.ObjectAnimator;\r\nimport android.animation.ValueAnimator;\r\nimport android.app.Activity;\r\nimport android.content.Context;\r\nimport android.graphics.Canvas;\r\nimport android.graphics.Color;\r\nimport android.graphics.Paint;\r\nimport android.os.Bundle;\r\nimport android.text.Spannable;\r\nimport android.text.SpannableStringBuilder;\r\nimport android.text.style.BackgroundColorSpan;\r\nimport android.text.style.ForegroundColorSpan;\r\nimport android.text.style.ImageSpan;\r\nimport android.text.style.SuggestionSpan;\r\nimport android.text.style.UnderlineSpan;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.EditText;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.TextView;\r\n\r\npublic class ViewPropertyAlphaActivity extends Activity {\r\n \r\n MyView myViewAlphaDefault, myViewAlphaHandled;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n\r\n setContentView(R.layout.view_properties);\r\n\r\n getWindow().getDecorView().postDelayed(new Runnable() {\r\n @Override\r\n public void run() {\r\n startAnim(R.id.button);\r\n startAnim(R.id.textview);\r\n startAnim(R.id.spantext);\r\n startAnim(R.id.edittext);\r\n startAnim(R.id.selectedtext);\r\n startAnim(R.id.textviewbackground);\r\n startAnim(R.id.layout);\r\n startAnim(R.id.imageview);\r\n startAnim(myViewAlphaDefault);\r\n startAnim(myViewAlphaHandled);\r\n EditText selectedText = findViewById(R.id.selectedtext);\r\n selectedText.setSelection(3, 8);\r\n }\r\n }, 2000);\r\n \r\n Button invalidator = findViewById(R.id.invalidateButton);\r\n invalidator.setOnClickListener(new View.OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n findViewById(R.id.textview).invalidate();\r\n findViewById(R.id.spantext).invalidate();\r\n }\r\n });\r\n\r\n TextView textView = findViewById(R.id.spantext);\r\n if (textView != null) {\r\n SpannableStringBuilder text =\r\n new SpannableStringBuilder(\"Now this is a short text message with spans\");\r\n\r\n text.setSpan(new BackgroundColorSpan(Color.RED), 0, 3,\r\n Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);\r\n text.setSpan(new ForegroundColorSpan(Color.BLUE), 4, 9,\r\n Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);\r\n text.setSpan(new SuggestionSpan(this, new String[]{\"longer\"}, 3), 11, 16,\r\n Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);\r\n text.setSpan(new UnderlineSpan(), 17, 20,\r\n Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);\r\n text.setSpan(new ImageSpan(this, R.drawable.icon), 21, 22,\r\n Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);\r\n\r\n textView.setText(text);\r\n }\r\n \r\n LinearLayout container = findViewById(R.id.container);\r\n myViewAlphaDefault = new MyView(this, false);\r\n myViewAlphaDefault.setLayoutParams(new LinearLayout.LayoutParams(75, 75));\r\n container.addView(myViewAlphaDefault);\r\n myViewAlphaHandled = new MyView(this, true);\r\n myViewAlphaHandled.setLayoutParams(new LinearLayout.LayoutParams(75, 75));\r\n container.addView(myViewAlphaHandled);\r\n }\r\n\r\n private void startAnim(View target) {\r\n ObjectAnimator anim = ObjectAnimator.ofFloat(target, View.ALPHA, 0);\r\n anim.setRepeatCount(ValueAnimator.INFINITE);\r\n anim.setRepeatMode(ValueAnimator.REVERSE);\r\n anim.setDuration(1000);\r\n anim.start();\r\n }\r\n private void startAnim(int id) {\r\n startAnim(findViewById(id));\r\n }\r\n \r\n private static class MyView extends View {\r\n private int mMyAlpha = 255;\r\n private boolean mHandleAlpha;\r\n private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);\r\n \r\n private MyView(Context context, boolean handleAlpha) {\r\n super(context);\r\n mHandleAlpha = handleAlpha;\r\n mPaint.setColor(Color.RED);\r\n }\r\n\r\n @Override\r\n protected void onDraw(Canvas canvas) {\r\n if (mHandleAlpha) {\r\n mPaint.setAlpha(mMyAlpha);\r\n }\r\n canvas.drawCircle(30, 30, 30, mPaint);\r\n }\r\n\r\n @Override\r\n protected boolean onSetAlpha(int alpha) {\r\n if (mHandleAlpha) {\r\n mMyAlpha = alpha;\r\n return true;\r\n }\r\n return super.onSetAlpha(alpha);\r\n }\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6389073729515076, "alphanum_fraction": 0.6422384977340698, "avg_line_length": 35.525001525878906, "blob_id": "e71b70e7d0b7373acd09445914d8d399391ecbb1", "content_id": "66d302dc76b861dc57a9eb30f3c004187acaae4f", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3002, "license_type": "permissive", "max_line_length": 99, "num_lines": 80, "path": "/core/java/android/speech/tts/EventLogger.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\r\n * use this file except in compliance with the License. You may obtain a copy of\r\n * the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\r\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r\n * License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage android.speech.tts;\r\n\r\nimport android.text.TextUtils;\r\n\r\n/**\r\n * Writes data about a given speech synthesis request to the event logs. The data that is logged\r\n * includes the calling app, length of the utterance, speech rate / pitch, the latency, and overall\r\n * time taken.\r\n */\r\nclass EventLogger extends AbstractEventLogger {\r\n private final SynthesisRequest mRequest;\r\n\r\n EventLogger(SynthesisRequest request, int callerUid, int callerPid, String serviceApp) {\r\n super(callerUid, callerPid, serviceApp);\r\n mRequest = request;\r\n }\r\n\r\n @Override\r\n protected void logFailure(int statusCode) {\r\n // We don't report stopped syntheses because their overall\r\n // total time spent will be inaccurate (will not correlate with\r\n // the length of the utterance).\r\n if (statusCode != TextToSpeech.STOPPED) {\r\n EventLogTags.writeTtsSpeakFailure(mServiceApp, mCallerUid, mCallerPid,\r\n getUtteranceLength(), getLocaleString(),\r\n mRequest.getSpeechRate(), mRequest.getPitch());\r\n }\r\n }\r\n\r\n @Override\r\n protected void logSuccess(long audioLatency, long engineLatency, long engineTotal) {\r\n EventLogTags.writeTtsSpeakSuccess(mServiceApp, mCallerUid, mCallerPid,\r\n getUtteranceLength(), getLocaleString(),\r\n mRequest.getSpeechRate(), mRequest.getPitch(),\r\n engineLatency, engineTotal, audioLatency);\r\n }\r\n\r\n /**\r\n * @return the length of the utterance for the given synthesis, 0\r\n * if the utterance was {@code null}.\r\n */\r\n private int getUtteranceLength() {\r\n final String utterance = mRequest.getText();\r\n return utterance == null ? 0 : utterance.length();\r\n }\r\n\r\n /**\r\n * Returns a formatted locale string from the synthesis params of the\r\n * form lang-country-variant.\r\n */\r\n private String getLocaleString() {\r\n StringBuilder sb = new StringBuilder(mRequest.getLanguage());\r\n if (!TextUtils.isEmpty(mRequest.getCountry())) {\r\n sb.append('-');\r\n sb.append(mRequest.getCountry());\r\n\r\n if (!TextUtils.isEmpty(mRequest.getVariant())) {\r\n sb.append('-');\r\n sb.append(mRequest.getVariant());\r\n }\r\n }\r\n\r\n return sb.toString();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.467576801776886, "alphanum_fraction": 0.4744027256965637, "avg_line_length": 20.538461685180664, "blob_id": "a41c41e6198d71076e8c7563caadda8830c7f344", "content_id": "e30e8629bdc045998bbaf459056a76c662a76989", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 586, "license_type": "permissive", "max_line_length": 43, "num_lines": 26, "path": "/tools/hiddenapi/sort_api.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/bin/bash\r\nset -e\r\nif [ -z \"$1\" ]; then\r\n source_list=/dev/stdin\r\n dest_list=/dev/stdout\r\nelse\r\n source_list=\"$1\"\r\n dest_list=\"$1\"\r\nfi\r\n# Load the file\r\nreadarray A < \"$source_list\"\r\n# Sort\r\nIFS=$'\\n'\r\n# Stash away comments\r\nC=( $(grep -E '^#' <<< \"${A[*]}\" || :) )\r\nA=( $(grep -v -E '^#' <<< \"${A[*]}\" || :) )\r\n# Sort entries\r\nA=( $(LC_COLLATE=C sort -f <<< \"${A[*]}\") )\r\nA=( $(uniq <<< \"${A[*]}\") )\r\n# Concatenate comments and entries\r\nA=( ${C[*]} ${A[*]} )\r\nunset IFS\r\n# Dump array back into the file\r\nif [ ${#A[@]} -ne 0 ]; then\r\n printf '%s\\n' \"${A[@]}\" > \"$dest_list\"\r\nfi\r\n" }, { "alpha_fraction": 0.6480262875556946, "alphanum_fraction": 0.6741427183151245, "avg_line_length": 29.746835708618164, "blob_id": "314d48514131d49ebfd144ddec68ecfdc95b7bd0", "content_id": "4e2de4017d19048def0c7189236b42b6e7cfeb4e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": true, "language": "C++", "length_bytes": 10032, "license_type": "permissive", "max_line_length": 99, "num_lines": 316, "path": "/cmds/statsd/tests/external/StatsPuller_test.cpp", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "// Copyright (C) 2018 The Android Open Source Project\r\n//\r\n// Licensed under the Apache License, Version 2.0 (the \"License\");\r\n// you may not use this file except in compliance with the License.\r\n// You may obtain a copy of the License at\r\n//\r\n// http://www.apache.org/licenses/LICENSE-2.0\r\n//\r\n// Unless required by applicable law or agreed to in writing, software\r\n// distributed under the License is distributed on an \"AS IS\" BASIS,\r\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n// See the License for the specific language governing permissions and\r\n// limitations under the License.\r\n\r\n#include <gmock/gmock.h>\r\n#include <gtest/gtest.h>\r\n#include <stdio.h>\r\n\r\n#include <chrono>\r\n#include <thread>\r\n#include <vector>\r\n\r\n#include \"../metrics/metrics_test_helper.h\"\r\n#include \"src/stats_log_util.h\"\r\n#include \"stats_event.h\"\r\n#include \"tests/statsd_test_util.h\"\r\n\r\n#ifdef __ANDROID__\r\n\r\nnamespace android {\r\nnamespace os {\r\nnamespace statsd {\r\n\r\nusing namespace testing;\r\nusing std::make_shared;\r\nusing std::shared_ptr;\r\nusing std::vector;\r\nusing std::this_thread::sleep_for;\r\nusing testing::Contains;\r\n\r\nnamespace {\r\nint pullTagId = 10014;\r\n\r\nbool pullSuccess;\r\nvector<std::shared_ptr<LogEvent>> pullData;\r\nlong pullDelayNs;\r\n\r\nclass FakePuller : public StatsPuller {\r\npublic:\r\n FakePuller()\r\n : StatsPuller(pullTagId, /*coolDownNs=*/MillisToNano(10), /*timeoutNs=*/MillisToNano(5)){};\r\n\r\nprivate:\r\n bool PullInternal(vector<std::shared_ptr<LogEvent>>* data) override {\r\n (*data) = pullData;\r\n sleep_for(std::chrono::nanoseconds(pullDelayNs));\r\n return pullSuccess;\r\n }\r\n};\r\n\r\nFakePuller puller;\r\n\r\nstd::unique_ptr<LogEvent> createSimpleEvent(int64_t eventTimeNs, int64_t value) {\r\n AStatsEvent* statsEvent = AStatsEvent_obtain();\r\n AStatsEvent_setAtomId(statsEvent, pullTagId);\r\n AStatsEvent_overwriteTimestamp(statsEvent, eventTimeNs);\r\n AStatsEvent_writeInt64(statsEvent, value);\r\n\r\n std::unique_ptr<LogEvent> logEvent = std::make_unique<LogEvent>(/*uid=*/0, /*pid=*/0);\r\n parseStatsEventToLogEvent(statsEvent, logEvent.get());\r\n return logEvent;\r\n}\r\n\r\nclass StatsPullerTest : public ::testing::Test {\r\npublic:\r\n StatsPullerTest() {\r\n }\r\n\r\n void SetUp() override {\r\n puller.ForceClearCache();\r\n pullSuccess = false;\r\n pullDelayNs = 0;\r\n pullData.clear();\r\n }\r\n};\r\n\r\n} // Anonymous namespace.\r\n\r\nTEST_F(StatsPullerTest, PullSuccess) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = true;\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_TRUE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n\r\n sleep_for(std::chrono::milliseconds(11));\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n pullSuccess = true;\r\n\r\n EXPECT_TRUE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(2222L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(44, dataHolder[0]->getValues()[0].mValue.int_value);\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullFailAfterSuccess) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = true;\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_TRUE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n\r\n sleep_for(std::chrono::milliseconds(11));\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n pullSuccess = false;\r\n dataHolder.clear();\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n\r\n // Fails due to hitting the cool down.\r\n pullSuccess = true;\r\n dataHolder.clear();\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\n// Test pull takes longer than timeout, 2nd pull happens shorter than cooldown\r\nTEST_F(StatsPullerTest, PullTakeTooLongAndPullFast) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n pullSuccess = true;\r\n // timeout is 5ms\r\n pullDelayNs = MillisToNano(6);\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n pullDelayNs = 0;\r\n\r\n pullSuccess = true;\r\n dataHolder.clear();\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullFail) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = false;\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullTakeTooLong) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = true;\r\n pullDelayNs = MillisToNano(6);\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullTooFast) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = true;\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_TRUE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n pullSuccess = true;\r\n\r\n dataHolder.clear();\r\n EXPECT_TRUE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullFailsAndTooFast) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = false;\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n pullSuccess = true;\r\n\r\n EXPECT_FALSE(puller.Pull(getElapsedRealtimeNs(), &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullSameEventTime) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = true;\r\n int64_t eventTimeNs = getElapsedRealtimeNs();\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_TRUE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n // Sleep to ensure the cool down expires.\r\n sleep_for(std::chrono::milliseconds(11));\r\n pullSuccess = true;\r\n\r\n dataHolder.clear();\r\n EXPECT_TRUE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(1, dataHolder.size());\r\n EXPECT_EQ(pullTagId, dataHolder[0]->GetTagId());\r\n EXPECT_EQ(1111L, dataHolder[0]->GetElapsedTimestampNs());\r\n ASSERT_EQ(1, dataHolder[0]->size());\r\n EXPECT_EQ(33, dataHolder[0]->getValues()[0].mValue.int_value);\r\n}\r\n\r\n// Test pull takes longer than timeout, 2nd pull happens at same event time\r\nTEST_F(StatsPullerTest, PullTakeTooLongAndPullSameEventTime) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n pullSuccess = true;\r\n int64_t eventTimeNs = getElapsedRealtimeNs();\r\n // timeout is 5ms\r\n pullDelayNs = MillisToNano(6);\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n\r\n // Sleep to ensure the cool down expires. 6ms is taken by the delay, so only 5 is needed here.\r\n sleep_for(std::chrono::milliseconds(5));\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n pullDelayNs = 0;\r\n\r\n pullSuccess = true;\r\n dataHolder.clear();\r\n EXPECT_FALSE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\nTEST_F(StatsPullerTest, PullFailsAndPullSameEventTime) {\r\n pullData.push_back(createSimpleEvent(1111L, 33));\r\n\r\n pullSuccess = false;\r\n int64_t eventTimeNs = getElapsedRealtimeNs();\r\n\r\n vector<std::shared_ptr<LogEvent>> dataHolder;\r\n EXPECT_FALSE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n\r\n // Sleep to ensure the cool down expires.\r\n sleep_for(std::chrono::milliseconds(11));\r\n\r\n pullData.clear();\r\n pullData.push_back(createSimpleEvent(2222L, 44));\r\n\r\n pullSuccess = true;\r\n\r\n EXPECT_FALSE(puller.Pull(eventTimeNs, &dataHolder));\r\n ASSERT_EQ(0, dataHolder.size());\r\n}\r\n\r\n} // namespace statsd\r\n} // namespace os\r\n} // namespace android\r\n#else\r\nGTEST_LOG_(INFO) << \"This test does nothing.\\n\";\r\n#endif\r\n" }, { "alpha_fraction": 0.5487673878669739, "alphanum_fraction": 0.5605573654174805, "avg_line_length": 36.69585418701172, "blob_id": "745b8649cc1275a38d5089efbd6ace7aa2b9739a", "content_id": "fbe8be01568422504b2c226daec8001bb303544e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 8397, "license_type": "permissive", "max_line_length": 102, "num_lines": 217, "path": "/tests/HwAccelerationTest/src/com/android/test/hwui/SingleFrameTextureViewTestActivity.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2016 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.test.hwui;\r\n\r\nimport static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;\r\nimport static android.opengl.GLES20.glClear;\r\nimport static android.opengl.GLES20.glClearColor;\r\n\r\nimport android.app.Activity;\r\nimport android.graphics.Color;\r\nimport android.graphics.SurfaceTexture;\r\nimport android.opengl.GLUtils;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.TextureView;\r\nimport android.view.TextureView.SurfaceTextureListener;\r\nimport android.view.View;\r\nimport android.view.ViewGroup.LayoutParams;\r\nimport android.widget.FrameLayout;\r\nimport android.widget.TextView;\r\n\r\nimport javax.microedition.khronos.egl.EGL10;\r\nimport javax.microedition.khronos.egl.EGLConfig;\r\nimport javax.microedition.khronos.egl.EGLContext;\r\nimport javax.microedition.khronos.egl.EGLDisplay;\r\nimport javax.microedition.khronos.egl.EGLSurface;\r\n\r\npublic class SingleFrameTextureViewTestActivity extends Activity implements SurfaceTextureListener {\r\n private static final String LOG_TAG = \"SingleFrameTest\";\r\n\r\n private View mPreview;\r\n private TextureView mTextureView;\r\n private Thread mGLThread;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n TextView preview = new TextView(this);\r\n preview.setText(\"This is a preview\");\r\n preview.setBackgroundColor(Color.WHITE);\r\n mPreview = preview;\r\n mTextureView = new TextureView(this);\r\n mTextureView.setSurfaceTextureListener(this);\r\n\r\n FrameLayout content = new FrameLayout(this);\r\n content.addView(mTextureView,\r\n LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);\r\n content.addView(mPreview,\r\n LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);\r\n\r\n setContentView(content);\r\n }\r\n\r\n private void stopGlThread() {\r\n if (mGLThread != null) {\r\n try {\r\n mGLThread.join();\r\n mGLThread = null;\r\n } catch (InterruptedException e) { }\r\n }\r\n }\r\n\r\n @Override\r\n public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {\r\n Log.d(LOG_TAG, \"onSurfaceAvailable\");\r\n mGLThread = new Thread() {\r\n static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;\r\n static final int EGL_OPENGL_ES2_BIT = 4;\r\n\r\n private EGL10 mEgl;\r\n private EGLDisplay mEglDisplay;\r\n private EGLConfig mEglConfig;\r\n private EGLContext mEglContext;\r\n private EGLSurface mEglSurface;\r\n\r\n @Override\r\n public void run() {\r\n initGL();\r\n try {\r\n Thread.sleep(500);\r\n } catch (InterruptedException e) {}\r\n\r\n for (int i = 0; i < 2; i++) {\r\n if (i == 0) {\r\n glClearColor(0.0f, 0.0f, 1.0f, 1.0f);\r\n } else {\r\n glClearColor(0.0f, 1.0f, 0.0f, 1.0f);\r\n }\r\n glClear(GL_COLOR_BUFFER_BIT);\r\n Log.d(LOG_TAG, \"eglSwapBuffers\");\r\n if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {\r\n throw new RuntimeException(\"Cannot swap buffers\");\r\n }\r\n try {\r\n Thread.sleep(50);\r\n } catch (InterruptedException e) {}\r\n }\r\n\r\n try {\r\n Thread.sleep(500);\r\n } catch (InterruptedException e) {}\r\n\r\n finishGL();\r\n }\r\n\r\n private void finishGL() {\r\n mEgl.eglDestroyContext(mEglDisplay, mEglContext);\r\n mEgl.eglDestroySurface(mEglDisplay, mEglSurface);\r\n }\r\n\r\n private void initGL() {\r\n mEgl = (EGL10) EGLContext.getEGL();\r\n\r\n mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);\r\n if (mEglDisplay == EGL10.EGL_NO_DISPLAY) {\r\n throw new RuntimeException(\"eglGetDisplay failed \"\r\n + GLUtils.getEGLErrorString(mEgl.eglGetError()));\r\n }\r\n\r\n int[] version = new int[2];\r\n if (!mEgl.eglInitialize(mEglDisplay, version)) {\r\n throw new RuntimeException(\"eglInitialize failed \" +\r\n GLUtils.getEGLErrorString(mEgl.eglGetError()));\r\n }\r\n\r\n mEglConfig = chooseEglConfig();\r\n if (mEglConfig == null) {\r\n throw new RuntimeException(\"eglConfig not initialized\");\r\n }\r\n\r\n mEglContext = createContext(mEgl, mEglDisplay, mEglConfig);\r\n\r\n mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, surface, null);\r\n\r\n if (mEglSurface == null || mEglSurface == EGL10.EGL_NO_SURFACE) {\r\n int error = mEgl.eglGetError();\r\n if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {\r\n Log.e(LOG_TAG, \"createWindowSurface returned EGL_BAD_NATIVE_WINDOW.\");\r\n return;\r\n }\r\n throw new RuntimeException(\"createWindowSurface failed \"\r\n + GLUtils.getEGLErrorString(error));\r\n }\r\n\r\n if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {\r\n throw new RuntimeException(\"eglMakeCurrent failed \"\r\n + GLUtils.getEGLErrorString(mEgl.eglGetError()));\r\n }\r\n }\r\n\r\n\r\n EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {\r\n int[] attrib_list = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };\r\n return egl.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);\r\n }\r\n\r\n private EGLConfig chooseEglConfig() {\r\n int[] configsCount = new int[1];\r\n EGLConfig[] configs = new EGLConfig[1];\r\n int[] configSpec = getConfig();\r\n if (!mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount)) {\r\n throw new IllegalArgumentException(\"eglChooseConfig failed \" +\r\n GLUtils.getEGLErrorString(mEgl.eglGetError()));\r\n } else if (configsCount[0] > 0) {\r\n return configs[0];\r\n }\r\n return null;\r\n }\r\n\r\n private int[] getConfig() {\r\n return new int[] {\r\n EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,\r\n EGL10.EGL_RED_SIZE, 8,\r\n EGL10.EGL_GREEN_SIZE, 8,\r\n EGL10.EGL_BLUE_SIZE, 8,\r\n EGL10.EGL_ALPHA_SIZE, 8,\r\n EGL10.EGL_DEPTH_SIZE, 0,\r\n EGL10.EGL_STENCIL_SIZE, 0,\r\n EGL10.EGL_NONE\r\n };\r\n }\r\n };\r\n mGLThread.start();\r\n }\r\n\r\n @Override\r\n public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {\r\n Log.d(LOG_TAG, \"onSurfaceTextureSizeChanged\");\r\n }\r\n\r\n @Override\r\n public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {\r\n Log.d(LOG_TAG, \"onSurfaceTextureDestroyed\");\r\n stopGlThread();\r\n return true;\r\n }\r\n\r\n @Override\r\n public void onSurfaceTextureUpdated(SurfaceTexture surface) {\r\n Log.d(LOG_TAG, \"onSurfaceTextureUpdated\");\r\n mPreview.setVisibility(View.GONE);\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.7777777910232544, "alphanum_fraction": 0.7777777910232544, "avg_line_length": 46, "blob_id": "a91644889a940de3135227812024c8de8cb46f8f", "content_id": "32b99cb248a8cf4d08c4d3727efcd036018194e6", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 144, "license_type": "permissive", "max_line_length": 74, "num_lines": 3, "path": "/cmds/appwidget/appwidget", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/system/bin/sh\r\nexport CLASSPATH=/system/framework/appwidget.jar\r\nexec app_process /system/bin com.android.commands.appwidget.AppWidget \"$@\"\r\n" }, { "alpha_fraction": 0.6224489808082581, "alphanum_fraction": 0.6297376155853271, "avg_line_length": 32.88135528564453, "blob_id": "035da71cfe2b2e963379d22ceb17061435e86808", "content_id": "f12d28cc91fc04d89d9cbdb9a00cdd8fb714a644", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2058, "license_type": "permissive", "max_line_length": 95, "num_lines": 59, "path": "/services/core/java/com/android/server/broadcastradio/hal1/Convert.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/**\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.broadcastradio.hal1;\r\n\r\nimport android.annotation.NonNull;\r\nimport android.annotation.Nullable;\r\nimport android.util.Slog;\r\n\r\nimport java.util.Map;\r\nimport java.util.Set;\r\n\r\nclass Convert {\r\n private static final String TAG = \"BroadcastRadioService.Convert\";\r\n\r\n /**\r\n * Converts string map to an array that's easily accessible by native code.\r\n *\r\n * Calling this java method once is more efficient than converting map object on the native\r\n * side, which requires several separate java calls for each element.\r\n *\r\n * @param map map to convert.\r\n * @returns array (sized the same as map) of two-element string arrays\r\n * (first element is the key, second is value).\r\n */\r\n static @NonNull String[][] stringMapToNative(@Nullable Map<String, String> map) {\r\n if (map == null) {\r\n Slog.v(TAG, \"map is null, returning zero-elements array\");\r\n return new String[0][0];\r\n }\r\n\r\n Set<Map.Entry<String, String>> entries = map.entrySet();\r\n int len = entries.size();\r\n String[][] arr = new String[len][2];\r\n\r\n int i = 0;\r\n for (Map.Entry<String, String> entry : entries) {\r\n arr[i][0] = entry.getKey();\r\n arr[i][1] = entry.getValue();\r\n i++;\r\n }\r\n\r\n Slog.v(TAG, \"converted \" + i + \" element(s)\");\r\n return arr;\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6459760069847107, "alphanum_fraction": 0.6665239930152893, "avg_line_length": 36.31147384643555, "blob_id": "37f1747fff4cd48136749d46ab9852fb5c58e5ef", "content_id": "aa2a2be7e7063d3738c01571865ff532c6c71c02", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4672, "license_type": "permissive", "max_line_length": 95, "num_lines": 122, "path": "/packages/Tethering/tests/unit/common/android/net/TetheredClientTest.kt", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2020 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net\r\n\r\nimport android.net.InetAddresses.parseNumericAddress\r\nimport android.net.TetheredClient.AddressInfo\r\nimport android.net.TetheringManager.TETHERING_BLUETOOTH\r\nimport android.net.TetheringManager.TETHERING_USB\r\nimport android.system.OsConstants.RT_SCOPE_UNIVERSE\r\nimport androidx.test.filters.SmallTest\r\nimport androidx.test.runner.AndroidJUnit4\r\nimport com.android.testutils.assertParcelSane\r\nimport org.junit.Test\r\nimport org.junit.runner.RunWith\r\nimport kotlin.test.assertEquals\r\nimport kotlin.test.assertNotEquals\r\n\r\nprivate val TEST_MACADDR = MacAddress.fromBytes(byteArrayOf(12, 23, 34, 45, 56, 67))\r\nprivate val TEST_OTHER_MACADDR = MacAddress.fromBytes(byteArrayOf(23, 34, 45, 56, 67, 78))\r\nprivate val TEST_ADDR1 = makeLinkAddress(\"192.168.113.3\", prefixLength = 24, expTime = 123L)\r\nprivate val TEST_ADDR2 = makeLinkAddress(\"fe80::1:2:3\", prefixLength = 64, expTime = 456L)\r\nprivate val TEST_HOSTNAME = \"test_hostname\"\r\nprivate val TEST_OTHER_HOSTNAME = \"test_other_hostname\"\r\nprivate val TEST_ADDRINFO1 = AddressInfo(TEST_ADDR1, TEST_HOSTNAME)\r\nprivate val TEST_ADDRINFO2 = AddressInfo(TEST_ADDR2, null)\r\n\r\nprivate fun makeLinkAddress(addr: String, prefixLength: Int, expTime: Long) = LinkAddress(\r\n parseNumericAddress(addr),\r\n prefixLength,\r\n 0 /* flags */,\r\n RT_SCOPE_UNIVERSE,\r\n expTime /* deprecationTime */,\r\n expTime /* expirationTime */)\r\n\r\n@RunWith(AndroidJUnit4::class)\r\n@SmallTest\r\nclass TetheredClientTest {\r\n @Test\r\n fun testParceling() {\r\n assertParcelSane(TEST_ADDRINFO1, fieldCount = 2)\r\n assertParcelSane(makeTestClient(), fieldCount = 3)\r\n }\r\n\r\n @Test\r\n fun testEquals() {\r\n assertEquals(makeTestClient(), makeTestClient())\r\n\r\n // Different mac address\r\n assertNotEquals(makeTestClient(), TetheredClient(\r\n TEST_OTHER_MACADDR,\r\n listOf(TEST_ADDRINFO1, TEST_ADDRINFO2),\r\n TETHERING_BLUETOOTH))\r\n\r\n // Different hostname\r\n assertNotEquals(makeTestClient(), TetheredClient(\r\n TEST_MACADDR,\r\n listOf(AddressInfo(TEST_ADDR1, TEST_OTHER_HOSTNAME), TEST_ADDRINFO2),\r\n TETHERING_BLUETOOTH))\r\n\r\n // Null hostname\r\n assertNotEquals(makeTestClient(), TetheredClient(\r\n TEST_MACADDR,\r\n listOf(AddressInfo(TEST_ADDR1, null), TEST_ADDRINFO2),\r\n TETHERING_BLUETOOTH))\r\n\r\n // Missing address\r\n assertNotEquals(makeTestClient(), TetheredClient(\r\n TEST_MACADDR,\r\n listOf(TEST_ADDRINFO2),\r\n TETHERING_BLUETOOTH))\r\n\r\n // Different type\r\n assertNotEquals(makeTestClient(), TetheredClient(\r\n TEST_MACADDR,\r\n listOf(TEST_ADDRINFO1, TEST_ADDRINFO2),\r\n TETHERING_USB))\r\n }\r\n\r\n @Test\r\n fun testAddAddresses() {\r\n val client1 = TetheredClient(TEST_MACADDR, listOf(TEST_ADDRINFO1), TETHERING_USB)\r\n val client2 = TetheredClient(TEST_OTHER_MACADDR, listOf(TEST_ADDRINFO2), TETHERING_USB)\r\n assertEquals(TetheredClient(\r\n TEST_MACADDR,\r\n listOf(TEST_ADDRINFO1, TEST_ADDRINFO2),\r\n TETHERING_USB), client1.addAddresses(client2))\r\n }\r\n\r\n @Test\r\n fun testGetters() {\r\n assertEquals(TEST_MACADDR, makeTestClient().macAddress)\r\n assertEquals(listOf(TEST_ADDRINFO1, TEST_ADDRINFO2), makeTestClient().addresses)\r\n assertEquals(TETHERING_BLUETOOTH, makeTestClient().tetheringType)\r\n }\r\n\r\n @Test\r\n fun testAddressInfo_Getters() {\r\n assertEquals(TEST_ADDR1, TEST_ADDRINFO1.address)\r\n assertEquals(TEST_ADDR2, TEST_ADDRINFO2.address)\r\n assertEquals(TEST_HOSTNAME, TEST_ADDRINFO1.hostname)\r\n assertEquals(null, TEST_ADDRINFO2.hostname)\r\n }\r\n\r\n private fun makeTestClient() = TetheredClient(\r\n TEST_MACADDR,\r\n listOf(TEST_ADDRINFO1, TEST_ADDRINFO2),\r\n TETHERING_BLUETOOTH)\r\n}" }, { "alpha_fraction": 0.718367338180542, "alphanum_fraction": 0.7251700758934021, "avg_line_length": 28.625, "blob_id": "f9d1d2371d40656b1eccf9cd7a354b141da55888", "content_id": "70df1e4e5c94e88745b222a9076acc21243c65c0", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 735, "license_type": "permissive", "max_line_length": 106, "num_lines": 24, "path": "/core/tests/utiltests/runtests.sh", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\r\n\r\nif [ -z $ANDROID_BUILD_TOP ]; then\r\n echo \"You need to source and lunch before you can use this script\"\r\n exit 1\r\nfi\r\n\r\necho \"Running tests\"\r\n\r\nset -e # fail early\r\n\r\necho \"+ mmma -j32 $ANDROID_BUILD_TOP/frameworks/base/core/tests/utiltests\"\r\n# NOTE Don't actually run the command above since this shell doesn't inherit functions from the\r\n# caller.\r\nmake -j32 -C $ANDROID_BUILD_TOP -f build/core/main.mk MODULES-IN-frameworks-base-core-tests-utiltests\r\n\r\nset -x # print commands\r\n\r\nadb root\r\nadb wait-for-device\r\n\r\nadb install -r -g \"$OUT/data/app/FrameworksUtilTests/FrameworksUtilTests.apk\"\r\n\r\nadb shell am instrument -w \"$@\" 'com.android.frameworks.utiltests/androidx.test.runner.AndroidJUnitRunner'\r\n" }, { "alpha_fraction": 0.7169550061225891, "alphanum_fraction": 0.7224913239479065, "avg_line_length": 37.054054260253906, "blob_id": "4bf277fff3838ab33d78b8df157bdc87f04add65", "content_id": "1f8c75945963529245bd67013f4240eb25b9b3a9", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1445, "license_type": "permissive", "max_line_length": 97, "num_lines": 37, "path": "/services/backup/backuplib/java/com/android/server/backup/transport/TransportConnectionListener.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License\r\n */\r\n\r\npackage com.android.server.backup.transport;\r\n\r\nimport android.annotation.Nullable;\r\n\r\nimport com.android.internal.backup.IBackupTransport;\r\n\r\n/**\r\n * Listener to be called by {@link TransportClient#connectAsync(TransportConnectionListener,\r\n * String)}.\r\n */\r\npublic interface TransportConnectionListener {\r\n /**\r\n * Called when {@link TransportClient} has a transport binder available or that it decided it\r\n * couldn't obtain one, in which case {@param transport} is null.\r\n *\r\n * @param transport A {@link IBackupTransport} transport binder or null.\r\n * @param transportClient The {@link TransportClient} used to retrieve this transport binder.\r\n */\r\n void onTransportConnectionResult(\r\n @Nullable IBackupTransport transport, TransportClient transportClient);\r\n}\r\n" }, { "alpha_fraction": 0.6168100833892822, "alphanum_fraction": 0.6240900158882141, "avg_line_length": 30.84782600402832, "blob_id": "f87707d0252b1c6e562449b85dfe48227b7a6ac5", "content_id": "d696990b97e581b2c8c9a43476f17df078097810", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1511, "license_type": "permissive", "max_line_length": 75, "num_lines": 46, "path": "/tools/preload/PrintPsTree.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2008 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\nimport java.io.IOException;\r\nimport java.io.FileInputStream;\r\nimport java.io.ObjectInputStream;\r\nimport java.io.BufferedInputStream;\r\n\r\n/**\r\n * Prints raw information in CSV format.\r\n */\r\npublic class PrintPsTree {\r\n\r\n public static void main(String[] args)\r\n throws IOException, ClassNotFoundException {\r\n if (args.length != 1) {\r\n System.err.println(\"Usage: PrintCsv [compiled log file]\");\r\n System.exit(0);\r\n }\r\n\r\n FileInputStream fin = new FileInputStream(args[0]);\r\n ObjectInputStream oin = new ObjectInputStream(\r\n new BufferedInputStream(fin));\r\n\r\n Root root = (Root) oin.readObject();\r\n\r\n for (Proc proc : root.processes.values()) {\r\n if (proc.parent == null) {\r\n proc.print(); \r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6256710886955261, "alphanum_fraction": 0.631039559841156, "avg_line_length": 30.983871459960938, "blob_id": "1087318508d36ae2befcdd26e18dbc839277f4e1", "content_id": "17c802ae426e126d5f778cb228f7be95cd00dcd8", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2049, "license_type": "permissive", "max_line_length": 87, "num_lines": 62, "path": "/core/java/com/android/internal/app/procstats/DurationsTable.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2013 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.internal.app.procstats;\r\n\r\n/**\r\n * Sparse mapping table to store durations of processes, etc running in different\r\n * states.\r\n */\r\npublic class DurationsTable extends SparseMappingTable.Table {\r\n public DurationsTable(SparseMappingTable tableData) {\r\n super(tableData);\r\n }\r\n\r\n /**\r\n * Add all of the durations from the other table into this one.\r\n * Resultant durations will be the sum of what is currently in the table\r\n * and the new value.\r\n */\r\n public void addDurations(DurationsTable from) {\r\n final int N = from.getKeyCount();\r\n for (int i=0; i<N; i++) {\r\n final int key = from.getKeyAt(i);\r\n this.addDuration(SparseMappingTable.getIdFromKey(key), from.getValue(key));\r\n }\r\n }\r\n\r\n /**\r\n * Add the value into the value stored for the state.\r\n *\r\n * Resultant duration will be the sum of what is currently in the table\r\n * and the new value.\r\n */\r\n public void addDuration(int state, long value) {\r\n final int key = getOrAddKey((byte)state, 1);\r\n setValue(key, getValue(key) + value);\r\n }\r\n\r\n /*\r\n public long getDuration(int state, long now) {\r\n final int key = getKey((byte)state);\r\n if (key != SparseMappingTable.INVALID_KEY) {\r\n return getValue(key);\r\n } else {\r\n return 0;\r\n }\r\n }\r\n */\r\n}\r\n\r\n\r\n" }, { "alpha_fraction": 0.6378066539764404, "alphanum_fraction": 0.6435786485671997, "avg_line_length": 24.150943756103516, "blob_id": "0e124c1e6b878ea3aa94c4eebc1178bc28c003c9", "content_id": "e5cdf27e2ab0b2aa2008cf817770fcec928361eb", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1386, "license_type": "permissive", "max_line_length": 93, "num_lines": 53, "path": "/lowpan/java/android/net/lowpan/LowpanEnergyScanResult.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2017 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.net.lowpan;\r\n\r\n/**\r\n * Describes the result from one channel of an energy scan.\r\n *\r\n * @hide\r\n */\r\n// @SystemApi\r\npublic class LowpanEnergyScanResult {\r\n public static final int UNKNOWN = Integer.MAX_VALUE;\r\n\r\n private int mChannel = UNKNOWN;\r\n private int mMaxRssi = UNKNOWN;\r\n\r\n LowpanEnergyScanResult() {}\r\n\r\n public int getChannel() {\r\n return mChannel;\r\n }\r\n\r\n public int getMaxRssi() {\r\n return mMaxRssi;\r\n }\r\n\r\n void setChannel(int x) {\r\n mChannel = x;\r\n }\r\n\r\n void setMaxRssi(int x) {\r\n mMaxRssi = x;\r\n }\r\n\r\n @Override\r\n public String toString() {\r\n return \"LowpanEnergyScanResult(channel: \" + mChannel + \", maxRssi:\" + mMaxRssi + \")\";\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6463648676872253, "alphanum_fraction": 0.6478395462036133, "avg_line_length": 34.8532600402832, "blob_id": "968bcfb8fc94bceee3b4102ea91295a986a425c1", "content_id": "3869dfd346c0f02991ab23e9d9392551d0a0d577", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 6781, "license_type": "permissive", "max_line_length": 93, "num_lines": 184, "path": "/core/java/android/app/MediaRouteActionProvider.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2012 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage android.app;\r\n\r\nimport android.content.Context;\r\nimport android.media.MediaRouter;\r\nimport android.media.MediaRouter.RouteInfo;\r\nimport android.util.Log;\r\nimport android.view.ActionProvider;\r\nimport android.view.MenuItem;\r\nimport android.view.View;\r\nimport android.view.ViewGroup;\r\n\r\nimport java.lang.ref.WeakReference;\r\n\r\n/**\r\n * The media route action provider displays a {@link MediaRouteButton media route button}\r\n * in the application's {@link ActionBar} to allow the user to select routes and\r\n * to control the currently selected route.\r\n * <p>\r\n * The application must specify the kinds of routes that the user should be allowed\r\n * to select by specifying the route types with the {@link #setRouteTypes} method.\r\n * </p><p>\r\n * Refer to {@link MediaRouteButton} for a description of the button that will\r\n * appear in the action bar menu. Note that instead of disabling the button\r\n * when no routes are available, the action provider will instead make the\r\n * menu item invisible. In this way, the button will only be visible when it\r\n * is possible for the user to discover and select a matching route.\r\n * </p>\r\n */\r\npublic class MediaRouteActionProvider extends ActionProvider {\r\n private static final String TAG = \"MediaRouteActionProvider\";\r\n\r\n private final Context mContext;\r\n private final MediaRouter mRouter;\r\n private final MediaRouterCallback mCallback;\r\n\r\n private int mRouteTypes;\r\n private MediaRouteButton mButton;\r\n private View.OnClickListener mExtendedSettingsListener;\r\n\r\n public MediaRouteActionProvider(Context context) {\r\n super(context);\r\n\r\n mContext = context;\r\n mRouter = (MediaRouter) context.getSystemService(Context.MEDIA_ROUTER_SERVICE);\r\n mCallback = new MediaRouterCallback(this);\r\n\r\n // Start with live audio by default.\r\n // TODO Update this when new route types are added; segment by API level\r\n // when different route types were added.\r\n setRouteTypes(MediaRouter.ROUTE_TYPE_LIVE_AUDIO);\r\n }\r\n\r\n /**\r\n * Sets the types of routes that will be shown in the media route chooser dialog\r\n * launched by this button.\r\n *\r\n * @param types The route types to match.\r\n */\r\n public void setRouteTypes(int types) {\r\n if (mRouteTypes != types) {\r\n // FIXME: We currently have no way of knowing whether the action provider\r\n // is still needed by the UI. Unfortunately this means the action provider\r\n // may leak callbacks until garbage collection occurs. This may result in\r\n // media route providers doing more work than necessary in the short term\r\n // while trying to discover routes that are no longer of interest to the\r\n // application. To solve this problem, the action provider will need some\r\n // indication from the framework that it is being destroyed.\r\n if (mRouteTypes != 0) {\r\n mRouter.removeCallback(mCallback);\r\n }\r\n mRouteTypes = types;\r\n if (types != 0) {\r\n mRouter.addCallback(types, mCallback,\r\n MediaRouter.CALLBACK_FLAG_PASSIVE_DISCOVERY);\r\n }\r\n refreshRoute();\r\n\r\n if (mButton != null) {\r\n mButton.setRouteTypes(mRouteTypes);\r\n }\r\n }\r\n }\r\n\r\n public void setExtendedSettingsClickListener(View.OnClickListener listener) {\r\n mExtendedSettingsListener = listener;\r\n if (mButton != null) {\r\n mButton.setExtendedSettingsClickListener(listener);\r\n }\r\n }\r\n\r\n @Override\r\n @SuppressWarnings(\"deprecation\")\r\n public View onCreateActionView() {\r\n throw new UnsupportedOperationException(\"Use onCreateActionView(MenuItem) instead.\");\r\n }\r\n\r\n @Override\r\n public View onCreateActionView(MenuItem item) {\r\n if (mButton != null) {\r\n Log.e(TAG, \"onCreateActionView: this ActionProvider is already associated \" +\r\n \"with a menu item. Don't reuse MediaRouteActionProvider instances! \" +\r\n \"Abandoning the old one...\");\r\n }\r\n\r\n mButton = new MediaRouteButton(mContext);\r\n mButton.setRouteTypes(mRouteTypes);\r\n mButton.setExtendedSettingsClickListener(mExtendedSettingsListener);\r\n mButton.setLayoutParams(new ViewGroup.LayoutParams(\r\n ViewGroup.LayoutParams.WRAP_CONTENT,\r\n ViewGroup.LayoutParams.MATCH_PARENT));\r\n return mButton;\r\n }\r\n\r\n @Override\r\n public boolean onPerformDefaultAction() {\r\n if (mButton != null) {\r\n return mButton.showDialogInternal();\r\n }\r\n return false;\r\n }\r\n\r\n @Override\r\n public boolean overridesItemVisibility() {\r\n return true;\r\n }\r\n\r\n @Override\r\n public boolean isVisible() {\r\n return mRouter.isRouteAvailable(mRouteTypes,\r\n MediaRouter.AVAILABILITY_FLAG_IGNORE_DEFAULT_ROUTE);\r\n }\r\n\r\n private void refreshRoute() {\r\n refreshVisibility();\r\n }\r\n\r\n private static class MediaRouterCallback extends MediaRouter.SimpleCallback {\r\n private final WeakReference<MediaRouteActionProvider> mProviderWeak;\r\n\r\n public MediaRouterCallback(MediaRouteActionProvider provider) {\r\n mProviderWeak = new WeakReference<MediaRouteActionProvider>(provider);\r\n }\r\n\r\n @Override\r\n public void onRouteAdded(MediaRouter router, RouteInfo info) {\r\n refreshRoute(router);\r\n }\r\n\r\n @Override\r\n public void onRouteRemoved(MediaRouter router, RouteInfo info) {\r\n refreshRoute(router);\r\n }\r\n\r\n @Override\r\n public void onRouteChanged(MediaRouter router, RouteInfo info) {\r\n refreshRoute(router);\r\n }\r\n\r\n private void refreshRoute(MediaRouter router) {\r\n MediaRouteActionProvider provider = mProviderWeak.get();\r\n if (provider != null) {\r\n provider.refreshRoute();\r\n } else {\r\n router.removeCallback(this);\r\n }\r\n }\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6457473039627075, "alphanum_fraction": 0.660611093044281, "avg_line_length": 32.599998474121094, "blob_id": "dc97dd6b24c7c6e7e660992a61f4099c1118f00b", "content_id": "46a368c2a512041f1a6f86f19bf2ea9d95dd2a31", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1211, "license_type": "permissive", "max_line_length": 80, "num_lines": 35, "path": "/core/tests/hosttests/test-apps/MultiDexLegacyAndException/src/com/android/multidexlegacyandexception/MiniIntermediateClass.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\r\n * use this file except in compliance with the License. You may obtain a copy of\r\n * the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\r\n * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\r\n * License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage com.android.multidexlegacyandexception;\r\n\r\npublic class MiniIntermediateClass {\r\n\r\n public static int get3(boolean condition) {\r\n ClassInSecondaryDex thrower = new ClassInSecondaryDex(condition);\r\n try {\r\n thrower.canThrow2();\r\n thrower.canThrow1();\r\n return 1;\r\n } catch (ExceptionInMainDex e) {\r\n return 10;\r\n } catch (ExceptionInSecondaryDex e) {\r\n return 11;\r\n } catch (SuperExceptionInSecondaryDex|SuperExceptionInMainDex e) {\r\n return 23;\r\n }\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.6088154315948486, "alphanum_fraction": 0.6272727251052856, "avg_line_length": 36.61701965332031, "blob_id": "71cd521b04c9f80ab3442c42e3559b5bbd07c22a", "content_id": "b75c79dbbb651fe7925e101e7b2c94004900465e", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 3630, "license_type": "permissive", "max_line_length": 100, "num_lines": 94, "path": "/tests/VectorDrawableTest/src/com/android/test/dynamic/VectorDrawable01.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\r\n * in compliance with the License. You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software distributed under the License\r\n * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\r\n * or implied. See the License for the specific language governing permissions and limitations under\r\n * the License.\r\n */\r\npackage com.android.test.dynamic;\r\n\r\nimport android.app.Activity;\r\nimport android.graphics.drawable.VectorDrawable;\r\nimport android.os.Bundle;\r\nimport android.util.Log;\r\nimport android.view.View;\r\nimport android.view.View.OnClickListener;\r\nimport android.view.ViewGroup;\r\nimport android.widget.Button;\r\nimport android.widget.CheckBox;\r\nimport android.widget.CompoundButton;\r\nimport android.widget.CompoundButton.OnCheckedChangeListener;\r\nimport android.widget.GridLayout;\r\n\r\n@SuppressWarnings({\"UnusedDeclaration\"})\r\npublic class VectorDrawable01 extends Activity {\r\n private static final String LOGCAT = \"VectorDrawable1\";\r\n int[] icon = {\r\n R.drawable.vector_drawable01,\r\n R.drawable.vector_drawable02,\r\n R.drawable.vector_drawable03,\r\n R.drawable.vector_drawable04,\r\n R.drawable.vector_drawable05,\r\n R.drawable.vector_drawable06,\r\n R.drawable.vector_drawable07,\r\n R.drawable.vector_drawable08,\r\n R.drawable.vector_drawable09,\r\n R.drawable.vector_drawable10,\r\n R.drawable.vector_drawable11,\r\n R.drawable.vector_drawable12,\r\n R.drawable.vector_drawable13,\r\n R.drawable.vector_drawable14,\r\n R.drawable.vector_drawable15,\r\n R.drawable.vector_drawable16,\r\n R.drawable.vector_drawable17,\r\n R.drawable.vector_drawable18,\r\n R.drawable.vector_drawable19,\r\n R.drawable.vector_drawable20\r\n };\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n GridLayout container = new GridLayout(this);\r\n container.setColumnCount(5);\r\n container.setBackgroundColor(0xFF888888);\r\n final Button []bArray = new Button[icon.length];\r\n\r\n CheckBox toggle = new CheckBox(this);\r\n toggle.setText(\"Toggle\");\r\n toggle.setChecked(true);\r\n toggle.setOnCheckedChangeListener(new OnCheckedChangeListener() {\r\n @Override\r\n public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {\r\n ViewGroup vg = (ViewGroup) buttonView.getParent();\r\n for (int i = 0, count = vg.getChildCount(); i < count; i++) {\r\n View child = vg.getChildAt(i);\r\n if (child != buttonView) {\r\n child.setEnabled(isChecked);\r\n }\r\n }\r\n }\r\n });\r\n container.addView(toggle);\r\n\r\n for (int i = 0; i < icon.length; i++) {\r\n Button button = new Button(this);\r\n bArray[i] = button;\r\n button.setWidth(200);\r\n button.setBackgroundResource(icon[i]);\r\n container.addView(button);\r\n VectorDrawable vd = (VectorDrawable) button.getBackground();\r\n vd.setAlpha((i + 1) * (0xFF / (icon.length + 1)));\r\n }\r\n\r\n setContentView(container);\r\n\r\n }\r\n\r\n}\r\n" }, { "alpha_fraction": 0.703529417514801, "alphanum_fraction": 0.7082353234291077, "avg_line_length": 34.17021179199219, "blob_id": "d8cdbf9dcaae8e288e26d9999a99ac310ec0a02c", "content_id": "056361c8c21a033d18400ca2827a3432f2c4006a", "detected_licenses": [ "Apache-2.0", "LicenseRef-scancode-unicode" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1700, "license_type": "permissive", "max_line_length": 86, "num_lines": 47, "path": "/services/core/java/com/android/server/hdmi/HdmiAnnotations.java", "repo_name": "Ankits-lab/frameworks_base", "src_encoding": "UTF-8", "text": "/*\r\n * Copyright (C) 2014 The Android Open Source Project\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.android.server.hdmi;\r\n\r\nimport java.lang.annotation.ElementType;\r\nimport java.lang.annotation.Retention;\r\nimport java.lang.annotation.RetentionPolicy;\r\nimport java.lang.annotation.Target;\r\n\r\n/**\r\n * Annotation container for Hdmi control service package.\r\n */\r\npublic class HdmiAnnotations {\r\n /**\r\n * Annotation type to used to mark a method which should be run on service thread.\r\n * This annotation should go with {@code assertRunOnServiceThread} used to verify\r\n * whether it's called from service thread.\r\n */\r\n @Retention(RetentionPolicy.RUNTIME)\r\n @Target({ElementType.METHOD, ElementType.FIELD})\r\n public @interface ServiceThreadOnly {\r\n }\r\n\r\n /**\r\n * Annotation type to used to mark a method which should be run on io thread.\r\n * This annotation should go with {@code assertRunOnIoThread} used to verify\r\n * whether it's called from io thread.\r\n */\r\n @Retention(RetentionPolicy.RUNTIME)\r\n @Target({ElementType.METHOD, ElementType.FIELD})\r\n public @interface IoThreadOnly {\r\n }\r\n}\r\n" } ]
277
martis97/FB_Bot
https://github.com/martis97/FB_Bot
62162de24851e0aaf614056c4f72d5cd28caf3c4
ab30e647a7b6705359aab16bce14125faa9962d9
3456df416fee2b2eca35f2771bab201f48394eb0
refs/heads/master
2020-03-30T20:43:22.887879
2019-02-21T23:05:43
2019-02-21T23:05:43
151,600,984
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5835360884666443, "alphanum_fraction": 0.5882670879364014, "avg_line_length": 30.88793182373047, "blob_id": "92bc7cb5e627eae37dc0dd937ccde98b5e45d155", "content_id": "6ffd538e4c62246a42dab4d8da6d05822775bda0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7398, "license_type": "no_license", "max_line_length": 82, "num_lines": 232, "path": "/FbBot.py", "repo_name": "martis97/FB_Bot", "src_encoding": "UTF-8", "text": "# Selenium Webdriver\nfrom selenium import webdriver\n\n# Selenium Exceptions\nfrom selenium.common.exceptions import NoSuchElementException,\\\n TimeoutException, WebDriverException\n\n# Selenium Explicit wait\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n# ExplicitWait\nfrom FbBot.ExplicitWait import ExplicitWait\n\n# Misc.\nimport getpass\nimport time\nimport random\n\n\nclass FBBot(object):\n \"\"\"Facebook bot class defining actions required to log in to Facebook,\n find a page and like the first 30 (default amount) posts on the timeline.\n\n Attributes:\n browser: Webdriver instance of Chrome used to drive automation.\n timeout: Amount of seconds the WebDriver will wait for a \n specified condition.\n url: Facebook URL \n email: Email used to log in to Facebook\n password: Password used to authenticate the user\n Wait: Instance of ExcplicitWait class\n \"\"\"\n\n def __init__(self, email, password):\n \"\"\"Class initialisation.\"\"\"\n\n self.browser = self.create_browser()\n self.timeout = 60\n self.url = \"https://www.facebook.com/\"\n self.email = email\n self.password = password\n self.Wait = ExplicitWait(self.browser, self.timeout)\n\n def create_browser(self, notifications_off=True): \n \"\"\"Creates a Webdriver instance of Chrome to drive the automation.\n \n Args:\n notifications_off: (Default: True) Boolean value if browser \n required with notifications off. FB requests access to \n notifications upon a first user's visit to the website.\n\n Returns:\n browser: Webdriver instance of Chrome used to drive automation.\n Set to be fullscreen.\n TODO: Add more Chrome settings \n \"\"\"\n\n if notifications_off: \n # Changing settings to disable notifications\n chrome_options = webdriver.ChromeOptions()\n prefs = \\\n {\"profile.default_content_setting_values.notifications\" : 2}\n chrome_options.add_experimental_option(\"prefs\",prefs)\n self.browser = webdriver.Chrome(chrome_options=chrome_options)\n\n return self.browser.fullscreen_window()\n \n else:\n self.browser = webdriver.Chrome()\n return self.browser.fullscreen_window()\n\n def navigate_to_fb(self):\n \"\"\"Navigates to the web page. \"\"\"\n\n self.browser.get(self.url)\n\n def login(self):\n \"\"\"Enters email and password to the respective fields and\n presses 'Log in'.\n \"\"\"\n\n email_entry = self.Wait.id_visible(\"email\")\n email_entry.send_keys(self.email)\n\n password_entry = self.Wait.id_visible(\"pass\")\n password_entry.send_keys(self.password)\n\n login_btn = self.Wait.id_visible(\"loginbutton\")\n login_btn.click()\n\n try:\n WebDriverWait(self.browser, 4) \\\n .until(EC.visibility_of_element_located \\\n ((By.CLASS_NAME, \"_4rbf\")))\n\n print(\"Incorrect credentials have been entered!\")\n self.browser.quit()\n\n except TimeoutException:\n pass\n\n def enter_to_search(self, page_name):\n \"\"\"Looks for a Facebook using a search bar.\n \n Args:\n page_name: (str) Text to send to the search box.\n \"\"\"\n\n search_bar_element = '//input[@placeholder=\"Search\"]'\n\n WebDriverWait(self.browser, 4) \\\n .until(EC.visibility_of_element_located \\\n ((By.XPATH, search_bar_element)))\n \n search_bar = self.browser. \\\n find_element_by_xpath(search_bar_element)\n search_bar.send_keys(page_name)\n\n def press_search(self):\n \"\"\"Initiating the search by pressing the 'Search' button.\"\"\"\n\n search_btn = self.Wait.class_name_clickable(\"_585_\")\n search_btn.click()\n\n def select_page_index(self,number):\n \"\"\"Selects the search result by its position, starting from 0.\n\n Args:\n number: (int) Order number of available pages. \n 0 for the fist page, 1 for second etc.\n \"\"\"\n\n self.Wait.class_name_clickable(\"_52eh\")\n all_pages = self.browser.find_elements_by_class_name(\"_52eh\")\n all_pages[number].click()\n\n def select_page_name(self, page_name):\n \"\"\"Selects the search result, given expected page name.\n\n Args:\n page_name: Name of the page to search for.\n \"\"\"\n\n self.Wait.class_name_clickable(\"_52eh\") \n all_pages = self.browser.find_elements_by_class_name(\"_52eh\")\n\n for page in all_pages: \n if page.text == page_name:\n page.click()\n else:\n continue\n\n def unlike_all_posts(self): \n \"\"\"Unlike any posts that have been liked already\"\"\"\n\n liked_xpath = '//a[@aria-pressed = \"true\"]'\n\n try:\n WebDriverWait(self.browser, 5) \\\n .until(EC.element_to_be_clickable((By.XPATH, liked_xpath)))\n liked_btns = self.browser.find_elements_by_xpath(liked_xpath)\n\n print(\"Unliking pages that have been already liked..\")\n for like_button in liked_btns:\n if like_button.get_attribute(\"data-testid\") == 'fb-ufi-likelink':\n like_button.click()\n time.sleep(float(\"%.2f\" % random.uniform(1, 3)))\n else:\n continue\n except TimeoutException: \n print(\"No liked posts found\")\n \n def like_posts(self, posts_to_like): \n \"\"\"Likes the last 30 posts on the timeline.\n\n Args:\n posts_to_like: The amount of posts to like, starting from the\n beginning.\n \"\"\"\n\n not_liked_btns_xpath = '//a[@aria-pressed = \"false\"]'\n self.Wait.xpath_clickable(not_liked_btns_xpath)\n\n like_count = 0\n not_liked_btns = self.browser.find_elements_by_xpath(not_liked_btns_xpath)\n print(f\"Liking the latest {posts_to_like} posts on the timeline\")\n for like_button in not_liked_btns:\n if like_count == posts_to_like:\n print(f\"{like_count} most recent posts have been liked\")\n break\n if like_button.get_attribute(\"data-testid\") == 'fb-ufi-likelink':\n like_button.click()\n time.sleep(float(\"%.2f\" % random.uniform(1, 3)))\n like_count += 1\n else:\n continue\n \n\n\ndef mr_robot(page_name=\"Crazy Programmer\", posts_to_like=25):\n \"\"\"Function call and parameter definition.\n \n Args:\n page_name: Name of the page to search for. Set default to \n \"Crazy Programmer\".\n posts_to_like: The amount of posts to like, starting from the\n beginning.\n \"\"\"\n \n # Get Credentials\n creds = \\\n {\n \"email\" : input(\"Enter email: \"),\n \"password\" : getpass.getpass(\"Enter password : \")\n }\n\n # Class instance\n fb = FBBot(**creds)\n\n # Orchestra\n fb.navigate_to_fb()\n fb.login()\n fb.enter_to_search(page_name)\n fb.press_search()\n fb.select_page_name(page_name)\n fb.unlike_all_posts()\n fb.like_posts(posts_to_like)\n fb.browser.quit()\n\nmr_robot()\n" }, { "alpha_fraction": 0.5755903720855713, "alphanum_fraction": 0.5755903720855713, "avg_line_length": 25.890207290649414, "blob_id": "9403c8d4c88e92d233ce757537f7f67f20f89c11", "content_id": "6e2bdc21e45229fd31ca8fe227434dbf5b4a5f08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9062, "license_type": "no_license", "max_line_length": 71, "num_lines": 337, "path": "/ExplicitWait.py", "repo_name": "martis97/FB_Bot", "src_encoding": "UTF-8", "text": "# Selenium Imports\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n\nclass ExplicitWait(object):\n \"\"\"This class holds methods of explicit waits for elements and \n certain conditions.\n \n Attributes:\n browser: Webdriver instance of Chrome used to drive automation.\n timeout: Amount of seconds the WebDriver will wait for a \n specified condition.\n \"\"\"\n\n def __init__(self, browser, timeout):\n \"\"\"Class Initialisation\"\"\"\n self.browser = browser\n self.timeout = timeout\n\n def class_name_visible(self, element):\n \"\"\"Waits for an element identified by the class name to be\n visible.\n\n Args:\n element: The element's class name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.CLASS_NAME, element)))\n\n return element\n\n def class_name_multiple_visible(self, element):\n \"\"\"Waits for multiple elements identified by the class name to\n be visible.\n\n Args:\n element: The element's class name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_all_elements_located(\n (By.CLASS_NAME, element)))\n\n return element\n\n def class_name_clickable(self, element):\n \"\"\"Waits for an element identified by the class name to be\n clickable.\n\n Args:\n element: The element's class name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.element_to_be_clickable(\n (By.CLASS_NAME, element)))\n\n return element\n\n def class_name_invisible(self, element):\n \"\"\"Waits for an element identified by the class name to be\n invisible.\n\n Args:\n element: The element's class name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.invisibility_of_element_located(\n (By.CLASS_NAME, element)))\n\n return element\n\n def any_class_name_multiple_visible(self, element):\n \"\"\"Waits for any elements identified by the class name to be\n visible.\n\n Args:\n element: The element's class name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_any_elements_located(\n (By.CLASS_NAME, element)))\n\n return element\n\n def css_selector_visible(self, element):\n \"\"\"Waits for an element identified by the CSS selector to be \n visible.\n\n Args:\n element: The element's CSS selector.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.CSS_SELECTOR, element)))\n\n return element\n\n def css_selector_multiple_visible(self, element):\n \"\"\"Waits for multiple elements identified by the CSS selector \n to be visible. \n\n Args:\n element: The element's CSS selector.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_all_elements_located(\n (By.CSS_SELECTOR, element)))\n\n return element\n\n def css_selector_clickable(self, element):\n \"\"\"Waits for an element identified by the CSS selector to be\n clickable.\n\n Args:\n element: The element's CSS selector.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.element_to_be_clickable(\n (By.CSS_SELECTOR, element)))\n\n return element\n\n def css_selector_invisible(self, element):\n \"\"\"Waits for an element identified by CSS selector to be\n invisible.\n\n Args:\n element: The element's CSS selector.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.invisibility_of_element_located(\n (By.CSS_SELECTOR, element)))\n\n return element\n\n def xpath_visible(self, element):\n \"\"\"Waits for an element identified by the XPath to be visible.\n\n Args:\n element: The element's XPath.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.XPATH, element)))\n\n return element\n\n def xpath_clickable(self, element):\n \"\"\"Waits for an element identified by the XPath to be \n clickable.\n\n Args:\n element: The element's XPath.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.element_to_be_clickable(\n (By.XPATH, element)))\n\n return element\n\n def xpath_invisible(self, element):\n \"\"\"Waits for an element identified by the XPath to be \n invisible.\n\n Args:\n element: The element's XPath.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.invisibility_of_element_located(\n (By.XPATH, element)))\n\n return element\n\n def link_text_visibile(self, element):\n \"\"\"Waits for an element identified by the link text to be\n visible. \n\n Args:\n element: The element's Link Text.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.LINK_TEXT, element)))\n\n return element\n\n def link_text_clickable(self, element):\n \"\"\"Waits for an element identified by the link text to be\n clickable.\n\n Args:\n element: The element's Link Text.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.element_to_be_clickable(\n (By.LINK_TEXT, element)))\n\n return element\n\n def tag_name_visible(self, element):\n \"\"\"Waits for an element identified by the tag name to be\n visible. \n\n Args:\n element: The element's Tag Name.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.TAG_NAME, element)))\n\n return element\n\n def id_visible(self, element):\n \"\"\"Waits for an element identified by the ID to be visible. \n\n Args:\n element: The element's ID.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.visibility_of_element_located(\n (By.ID, element)))\n\n return element\n\n def id_clickable(self, element):\n \"\"\"Waits for an element identified by the ID to be clickable.\n\n Args:\n element: The element's ID.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.element_to_be_clickable(\n (By.ID, element)))\n\n return element\n\n def id_invisible(self, element):\n \"\"\"Waits for an element identified by the ID to be invisible.\n\n Args:\n element: The element's ID.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n element = WebDriverWait(self.browser, self.timeout).until \\\n (EC.invisibility_of_element_located(\n (By.ID, element)))\n\n return element\n\n def number_of_tabs(self, num_tabs):\n \"\"\"Waits for a specified amount of tabs to be available.\n \n Args:\n num_tabs: The number of tabs the WebDriver should wait to\n be available.\n\n Returns:\n element: WebDriver page element\n \"\"\"\n\n WebDriverWait(self.browser, self.timeout).until \\\n (EC.number_of_windows_to_be((num_tabs)))\n print(\"%d tabs detected.\" % num_tabs)\n" }, { "alpha_fraction": 0.5869080424308777, "alphanum_fraction": 0.5916305780410767, "avg_line_length": 33.654544830322266, "blob_id": "467e84f510f7a9d0b5a78e8242be052e03840992", "content_id": "0fea88aa151c3c21fd8775e1e2f2e450f68eda7c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7623, "license_type": "no_license", "max_line_length": 89, "num_lines": 220, "path": "/__init__.py", "repo_name": "martis97/FB_Bot", "src_encoding": "UTF-8", "text": "# Selenium Webdriver\nfrom selenium import webdriver\n\n# Selenium Exceptions\nfrom selenium.common.exceptions import NoSuchElementException,\\\n TimeoutException, WebDriverException\n\n# Explicit Wait (To be replaced with WaitFor module)\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n# Misc.\nimport getpass\nimport time\nimport random\n\n\nclass FBBot(object):\n \"\"\"Facebook bot class defining actions required to log in to Facebook,\n find a page (\"Crazy Programmer\" by default), and like the first 30 \n (default amount) posts on the timeline.\n \"\"\"\n\n def __init__(self, username, password):\n \"\"\"Class initialisation.\"\"\"\n self.browser = None \n self.timeout = 60\n self.url = \"https://www.facebook.com/\"\n self.username = username\n self.password = password\n\n def create_browser(self, notifications_off=True): \n \"\"\"Creates a Webdriver instance of Chrome to drive the automation.\n \n Args:\n notifications_off: (Default: True) Boolean value if browser \n required with notifications off. FB requires access to \n notifications when first time accessed.\n Returns:\n browser: Webdriver instance of Chrome used to drive automation.\n Set to be fullscreen.\n TODO: Add more Chrome settings \n \"\"\"\n\n if notifications_off: \n # Changing settings to disable notifications\n chrome_options = webdriver.ChromeOptions()\n prefs = \\\n {\"profile.default_content_setting_values.notifications\" : 2}\n chrome_options.add_experimental_option(\"prefs\",prefs)\n self.browser = webdriver.Chrome(chrome_options=chrome_options)\n\n return self.browser.fullscreen_window()\n \n else:\n self.browser = webdriver.Chrome()\n return self.browser.fullscreen_window()\n\n def navigate_to_url(self):\n \"\"\" Makes the browser window fullscreen and navigates to the web page\n \"\"\"\n\n self.browser.get(self.url)\n\n def login_process(self):\n \"\"\"Enters username and password to the respective fields and\n presses 'Log in'.\n \"\"\"\n\n email_entry = WebDriverWait(self.browser, self.timeout) \\\n .until(EC.presence_of_element_located((By.ID, \"email\"))) # This will\n\n email_entry.send_keys(self.username)\n\n password_entry = WebDriverWait(self.browser, self.timeout) \\\n .until(EC.presence_of_element_located((By.ID, \"pass\"))) # be replaced with\n\n password_entry.send_keys(self.password)\n\n login_btn = WebDriverWait(self.browser, self.timeout) \\\n .until(EC.element_to_be_clickable((By.ID, \"loginbutton\"))) # WaitFor module\n\n login_btn.click() \n try:\n WebDriverWait(self.browser, 4) \\\n .until(EC.visibility_of_element_located \\\n ((By.CLASS_NAME, \"_4rbf\"))) # for cleaner code\n\n incorrectCredsElement = self.browser.find_element_by_class_name(\"_4rbf\")\n\n if incorrectCredsElement.is_displayed():\n print(\"Incorrect credentials have been entered!\")\n self.browser.quit()\n\n except TimeoutException:\n pass\n\n def enter_to_search(self,search_value):\n \"\"\"Looks for a Facebook using a search bar.\n \n Args:\n search_value = (str) Text to send to the search box.\n \"\"\"\n\n try:\n search_bar_element = '//input[@placeholder = \"Search\"]'\n\n WebDriverWait(self.browser, 4) \\\n .until(EC.visibility_of_element_located \\\n ((By.XPATH, search_bar_element))) # Again needs WaitFor\n except TimeoutException:\n self.enter_to_search(search_value)\n \n search_bar = self.browser.find_element_by_xpath(search_bar_element)\n search_bar.send_keys(search_value)\n\n def press_search(self):\n \"\"\"Initiating the search by pressing the 'Search' button.\"\"\"\n\n search_btn = WebDriverWait(self.browser, self.timeout) \\\n .until(EC.element_to_be_clickable((By.CLASS_NAME, \"_585_\"))) # And again\n\n search_btn.click()\n\n def select_page_index(self,number):\n \"\"\"Selects the search result by its position, starting from 1.\n Args:\n number: (int) Positional index available pages. \n 1 for the fist page, 2 for second etc.\n \"\"\"\n\n WebDriverWait(self.browser, self.timeout) \\\n .until(EC.element_to_be_clickable \\\n ((By.CLASS_NAME, \"_52eh\"))) # And again lol\n\n all_pages = self.browser.find_elements_by_class_name(\"_52eh\")\n all_pages[number + 1].click()\n\n def select_page_name(self,name):\n \"\"\"Selects the search result, given expected page name.\"\"\"\n\n WebDriverWait(self.browser, self.timeout) \\\n .until(EC.element_to_be_clickable \\\n ((By.CLASS_NAME, \"_52eh\"))) # And again \n\n all_pages = self.browser.find_elements_by_class_name(\"_52eh\")\n\n for page in all_pages: \n if page.text == name:\n page.click()\n else:\n continue\n\n def press_like(self, num_posts): \n \"\"\"Likes the last 30 posts on the timeline.\n It will first unlike any posts that have been liked already.\n \"\"\"\n\n liked_xpath = '//a[@aria-pressed = \"true\"]'\n not_liked_xpath = '//a[@aria-pressed = \"false\"]'\n post_like = 'fb-ufi-likelink'\n random_wait = random.uniform(1, 1.99)\n\n try:\n WebDriverWait(self.browser, 5) \\\n .until(EC.element_to_be_clickable((By.XPATH, liked_xpath)))\n liked_btns = self.browser.find_elements_by_xpath(liked_xpath)\n\n print(\"Unliking pages that have been already liked...\")\n for like_button in liked_btns:\n if like_button.get_attribute(\"data-testid\") == post_like:\n like_button.click()\n time.sleep(random_wait)\n else:\n continue\n except TimeoutException: \n print(\"No liked posts found\")\n\n WebDriverWait(self.browser, self.timeout) \\\n .until(EC.element_to_be_clickable((By.XPATH, not_liked_xpath)))\n\n liked_posts = 0\n not_liked_btns = self.browser.find_elements_by_xpath(not_liked_xpath)\n print(\"Liking the latest %d posts on the timeline..\" % num_posts)\n for like_button in not_liked_btns:\n if like_button.get_attribute(\"data-testid\") == post_like:\n like_button.click()\n time.sleep(random_wait)\n liked_posts += 1\n else:\n continue\n\n if liked_posts == num_posts:\n print(\"%d most recent posts have been liked\" % num_posts)\n break\n\ndef mr_robot():\n \"\"\"Function call and parameter definition\"\"\"\n\n # Param definitions\n username = input(\"Enter username: \")\n password = getpass.getpass(\"Enter password for %s : \" % username)\n search_value = \"Crazy Programmer\"\n page_number = 1\n number_likes = 25\n\n # Class instance\n fb = FBBot(username, password)\n\n # Orchestra\n fb.create_browser()\n fb.navigate_to_url()\n fb.login_process()\n fb.enter_to_search(search_value)\n fb.press_search()\n fb.select_page_index(page_number)\n fb.press_like(number_likes)\n\nmr_robot()" } ]
3
philippslang/board
https://github.com/philippslang/board
f5fc28960386272036d47f42c98f5fb9dd259b8e
11a51ba3824d9a3c106e1b2b32baef167e82744d
8cf46cc78ab487a0b738ad9797280f52536b8946
refs/heads/master
2022-05-10T14:10:31.561644
2017-12-16T07:03:30
2017-12-16T07:03:30
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7454323768615723, "alphanum_fraction": 0.7649208307266235, "avg_line_length": 27.310344696044922, "blob_id": "fc7e8433c9738c36f895895a978b9550391ed62b", "content_id": "4057439a0daf9f928274bb047bb1048a6bddbb7d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 821, "license_type": "permissive", "max_line_length": 145, "num_lines": 29, "path": "/prototype/PaddleTensorBoardDemo/fluid/README.md", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "## Use Tensorboard to visualize PaddlePaddle Fluid data\n\nThis is a prototype to demonstrate the feasibility to use Tensorboard to display PaddlePaddle's training data metrics and graph.\n\n\n#### 1) Startup development PaddlePaddle docker image in board/PaddleTensorBoardDemo/fluid directory. Install dependencies inside docker image.\n\n```\ncd board/PaddleTensorBoardDemo\ndocker pull paddlepaddle/paddle:latest\ndocker run -p 6006:6006 -it -v `pwd`:/paddle paddlepaddle/paddle:latest /bin/bash\npip install tensorflow\n```\n\n#### 2) Run PaddlePaddle Fluid unit tests\n\n```\ncd /paddle\npython ./fluid/test_fit_a_line.py\npython ./fluid/test_recognize_digits_conv.py\n```\n\n#### 3) Launch TensorBoard after training is complete\n\n```\ntensorboard --logdir=/paddle/logs\n```\n\n#### 4) Launch browser and navigate to http://localhost:6006/\n" }, { "alpha_fraction": 0.5032235980033875, "alphanum_fraction": 0.5256192684173584, "avg_line_length": 20.8370361328125, "blob_id": "c6977349eeb5a04405eeb223f80708928ffa9cf9", "content_id": "1509ec4c6e90771e0622b86d860a518905fac51b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": true, "language": "JavaScript", "length_bytes": 2947, "license_type": "permissive", "max_line_length": 122, "num_lines": 135, "path": "/prototype/PaddleBoard/static/js/chart.js", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "function InitChart(json) {\n\n var data = JSON.parse(json);\n\n var vis = d3.select(\"#lineChart\"),\n WIDTH = 400,\n HEIGHT = 200,\n MARGINS = {\n top: 20,\n right: 20,\n bottom: 20,\n left: 50\n },\n xRange = d3.scale.linear().range([MARGINS.left, WIDTH - MARGINS.right]).domain([d3.min(data, function (d) {\n return d.x;\n }),\n d3.max(data, function (d) {\n return d.x;\n })\n ]),\n\n yRange = d3.scale.linear().range([HEIGHT - MARGINS.top, MARGINS.bottom]).domain([d3.min(data, function (d) {\n return d.y;\n }),\n d3.max(data, function (d) {\n return d.y;\n })\n ]),\n\n xAxis = d3.svg.axis()\n .scale(xRange)\n .tickSize(5)\n .tickSubdivide(true),\n\n yAxis = d3.svg.axis()\n .scale(yRange)\n .tickSize(5)\n .orient(\"left\")\n .tickSubdivide(true);\n\n\n vis.append(\"svg:g\")\n .attr(\"class\", \"x axis\")\n .attr(\"transform\", \"translate(0,\" + (HEIGHT - MARGINS.bottom) + \")\")\n .call(xAxis);\n\n vis.append(\"svg:g\")\n .attr(\"class\", \"y axis\")\n .attr(\"transform\", \"translate(\" + (MARGINS.left) + \",0)\")\n .call(yAxis);\n\n var lineFunc = d3.svg.line()\n .x(function (d) {\n return xRange(d.x);\n })\n .y(function (d) {\n return yRange(d.y);\n })\n .interpolate('basis');\n\nvis.append(\"svg:path\")\n .attr(\"d\", lineFunc(data))\n .attr(\"stroke\", \"#2D3F50\")\n .attr(\"stroke-width\", 2)\n .attr(\"fill\", \"none\");\n\n// BarChart\n\n\nvar vis = d3.select('#barChart'),\n WIDTH = 400,\n HEIGHT = 200,\n MARGINS = {\n top: 20,\n right: 20,\n bottom: 20,\n left: 50\n },\n xRange = d3.scale.ordinal().rangeRoundBands([MARGINS.left, WIDTH - MARGINS.right], 0.1).domain(data.map(function (d) {\n return d.x;\n })),\n\n\n yRange = d3.scale.linear().range([HEIGHT - MARGINS.top, MARGINS.bottom]).domain([0,\n d3.max(data, function (d) {\n return d.y;\n })\n ]),\n\n xAxis = d3.svg.axis()\n .scale(xRange)\n .tickSize(5)\n .tickSubdivide(true),\n\n yAxis = d3.svg.axis()\n .scale(yRange)\n .tickSize(5)\n .orient(\"left\")\n .tickSubdivide(true);\n\n\n vis.append('svg:g')\n .attr('class', 'x axis')\n .attr('transform', 'translate(0,' + (HEIGHT - MARGINS.bottom) + ')')\n .call(xAxis);\n\n vis.append('svg:g')\n .attr('class', 'y axis')\n .attr('transform', 'translate(' + (MARGINS.left) + ',0)')\n .call(yAxis);\n\n vis.selectAll('rect')\n .data(data)\n .enter()\n .append('rect')\n .attr('x', function (d) {\n return xRange(d.x);\n })\n .attr('y', function (d) {\n return yRange(d.y);\n })\n .attr('width', xRange.rangeBand())\n .attr('height', function (d) {\n return ((HEIGHT - MARGINS.bottom) - yRange(d.y));\n })\n .attr('fill', 'grey')\n .on('mouseover',function(d){\n d3.select(this)\n .attr('fill','#2D3F50');\n })\n .on('mouseout',function(d){\n d3.select(this)\n .attr('fill','grey');\n });\n}" }, { "alpha_fraction": 0.789287805557251, "alphanum_fraction": 0.7928193211555481, "avg_line_length": 69.75, "blob_id": "f1a7294584cd3a44417f3b27439cc7cbfd360055", "content_id": "4af79df4513caf0ce3ec199f4a20d34b20a0c291", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1699, "license_type": "permissive", "max_line_length": 424, "num_lines": 24, "path": "/prototype/PaddleTensorBoardDemo/README.md", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "## Use Tensboard to launch PaddlePaddle's data\n\nThis is a prototype to demonstrate the feasibility to use Tensorboard to display PaddlePaddle's training data metrics.\n\nThe proposed architecture design doc is here:\nhttps://github.com/PaddlePaddle/board/wiki/Architecture-design-for-using-TensorBoard-with-PaddlePaddle\n\n## Use PaddleFileWriter in Paddle script\n\nIn paddle script, initializes ```PaddleFileWriter``` and call ```write``` function to log data. Provide ```name``` to group all data into a single graph and ```step``` to plot the data properly.\n\nIn mnist.py, it is logging the cost and error evaluator every 10 batches.\n\n\n## Generate event file\n\nAfter you run the script(python mnist.py) with PaddleFileWriter write function, a event file \"events.out.tfevents.{timestamp}\" will be generated in the same program directory. The event file will first write a event with Version and each event for each time we write a value. Each event will be written as a protocol buffer (interface defined in tensorboard.proto) with a particular format of CRC mask function for checksum.\n\n\n## Launch TensorBoard\n\nAfter installing tensorflow, run command tensorboard --logdir={event_file_dir}. Go to browser and goes to localhost:6006 to view the graph. TensorBoard runs CRC checksum to verify data integrity before reading the actual data. If everything is successful, you should be able to see plotted graph(s) in Scalar tab. You can mouse over, zoom in see the details of the metrics you log. \n\nTensorBoard looks at the entire file directory and search for sub directories. You can place multiple event files in different sub directories for different projects to compare graphs on the same dashboard. \n" }, { "alpha_fraction": 0.5548800230026245, "alphanum_fraction": 0.5564799904823303, "avg_line_length": 37.592594146728516, "blob_id": "817899b3ddec2951cb161f1b5678057b653c0901", "content_id": "cb8b2ce757d43f7284ab16cae08db7a6ac47cb22", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3125, "license_type": "permissive", "max_line_length": 100, "num_lines": 81, "path": "/prototype/PaddleTensorBoardDemo/fluid/PaddleFileWriter/paddleboard_utils.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "import collections\n\nimport tensorflow as tf\n\n\ndef convert_program_to_tf_graph_def(program):\n '''\n Crude implementation of PaddlePaddle Program to Tensorflow Graph. Goes through each Paddle\n program block and creates a node for each variable and operation. Linearly parse the inputs and\n outputs of each operation and create a connected Tensorflow Graph.\n\n This graph will later be logged to Tensorflow file, which will be rendered in Tensorboard\n :param program: The PaddlePaddle Program object\n :return: Tensorflow Graph\n '''\n graph_def = tf.GraphDef()\n\n if len(program.blocks) > 0:\n op_name_counter = {}\n output_to_op_name = {}\n\n var_node_name_to_nodes = collections.OrderedDict()\n op_node_name_to_nodes = collections.OrderedDict()\n\n for block in program.blocks:\n # Process Program variables and create Tensorflow Nodes\n for var_name in block.vars:\n var_def = block.var(var_name)\n\n node_def = tf.NodeDef()\n node_def.name = var_def.name\n\n node_def.op = str(var_def.type)\n output_to_op_name[node_def.name] = node_def.name\n var_node_name_to_nodes[node_def.name] = node_def\n\n # Process Program operations and create Tensorflow Nodes\n for op in block.ops:\n attrs = {}\n\n if op.type in op_name_counter:\n op_name_counter[op.type] += 1\n else:\n op_name_counter[op.type] = 0\n\n node_name = '%s_%s' % (op.type, op_name_counter[op.type])\n\n for attr_name in op.desc.attr_names():\n tensor_value = tf.AttrValue()\n tensor_value.s = str(op.desc.attr(attr_name))\n attrs[attr_name] = tensor_value\n\n inputs = []\n\n # Get operation inputs and outputs in order to create a connected Graph\n for input_name in op.input_names:\n input_name = op.input(input_name)[0]\n if input_name in output_to_op_name.keys():\n input_node_name = output_to_op_name[input_name]\n inputs.append(input_node_name)\n\n for output_name in op.output_names:\n if op.output(output_name):\n o_name = op.output(output_name)[0]\n output_to_op_name[o_name] = node_name\n\n if o_name in var_node_name_to_nodes.keys():\n var_node = var_node_name_to_nodes[o_name]\n var_node.input.extend([node_name])\n\n node_def = tf.NodeDef(attr=attrs, input=inputs)\n node_def.name = node_name\n\n node_def.op = op.type\n op_node_name_to_nodes[node_def.name] = node_def\n\n # Add Program variables and operations as Nodes in the Tensorflow graph\n graph_def.node.extend(var_node_name_to_nodes.values())\n graph_def.node.extend(op_node_name_to_nodes.values())\n\n return graph_def" }, { "alpha_fraction": 0.6014189124107361, "alphanum_fraction": 0.6272170543670654, "avg_line_length": 31.642105102539062, "blob_id": "33edfd4cc064a9ba64392a68eb6368b8c621e545", "content_id": "7d8b683cfaea031f143aeef2734d3bde4e2ff4c0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3101, "license_type": "permissive", "max_line_length": 100, "num_lines": 95, "path": "/prototype/PaddleTensorBoardDemo/fluid/test_recognize_digits_conv.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "from __future__ import print_function\nimport datetime\nimport numpy as np\nimport paddle.v2 as paddle\nimport paddle.v2.fluid as fluid\n\nfrom PaddleFileWriter.paddle_file_writer import PaddleFileWriter\nfrom PaddleFileWriter import paddleboard_utils as pbu\n\n\n# Create PaddleFileWriter with log\ntimestamp_dir = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\ntrain_fw = PaddleFileWriter('./logs/%s/train' % timestamp_dir)\n\ntrain_lists = []\n\nimages = fluid.layers.data(name='pixel', shape=[1, 28, 28], dtype='float32')\nlabel = fluid.layers.data(name='label', shape=[1], dtype='int64')\nconv_pool_1 = fluid.nets.simple_img_conv_pool(\n input=images,\n filter_size=5,\n num_filters=20,\n pool_size=2,\n pool_stride=2,\n act=\"relu\")\nconv_pool_2 = fluid.nets.simple_img_conv_pool(\n input=conv_pool_1,\n filter_size=5,\n num_filters=50,\n pool_size=2,\n pool_stride=2,\n act=\"relu\")\n\npredict = fluid.layers.fc(input=conv_pool_2, size=10, act=\"softmax\")\ncost = fluid.layers.cross_entropy(input=predict, label=label)\navg_cost = fluid.layers.mean(x=cost)\noptimizer = fluid.optimizer.Adam(learning_rate=0.01)\noptimizer.minimize(avg_cost)\n\naccuracy = fluid.evaluator.Accuracy(input=predict, label=label)\n\nBATCH_SIZE = 50\nPASS_NUM = 3\ntrain_reader = paddle.batch(\n paddle.reader.shuffle(\n paddle.dataset.mnist.train(), buf_size=500),\n batch_size=BATCH_SIZE)\n\nplace = fluid.CPUPlace()\nexe = fluid.Executor(place)\n\nexe.run(fluid.default_startup_program())\n\n# Print computation graph\ntrain_fw.write_graph(pbu.convert_program_to_tf_graph_def(fluid.default_main_program()))\n\nbatch_id = 0\nfor pass_id in range(PASS_NUM):\n accuracy.reset(exe)\n\n for data in train_reader():\n img_data = np.array(map(lambda x: x[0].reshape([1, 28, 28]),\n data)).astype(\"float32\")\n y_data = np.array(map(lambda x: x[1], data)).astype(\"int64\")\n y_data = y_data.reshape([BATCH_SIZE, 1])\n\n loss, acc = exe.run(fluid.default_main_program(),\n feed={\"pixel\": img_data,\n \"label\": y_data},\n fetch_list=[avg_cost] + accuracy.metrics)\n pass_acc = accuracy.eval(exe)\n print(\"pass_id=\" + str(pass_id) + \" acc=\" + str(acc) + \" pass_acc=\" +\n str(pass_acc))\n # print loss, acc\n if loss < 10.0 and pass_acc > 0.9:\n # if avg cost less than 10.0 and accuracy is larger than 0.9, we think our code is good.\n exit(0)\n\n # Log training batch cost and error\n train_fw.write(\"cost\", float(loss[0]), batch_id)\n train_fw.write(\"error\", float(1.0-pass_acc[0]), batch_id)\n train_lists.append((loss, float(1.0-pass_acc[0])))\n\n best = sorted(train_lists, key=lambda list: float(list[0]))[0]\n acc = 100 - float(best[1]) * 100\n print('The training classification accuracy is %.2f%%' % acc)\n train_fw.write(\"accuracy\", acc, batch_id)\n\n batch_id += 1\n\n pass_acc = accuracy.eval(exe)\n print(\"pass_id=\" + str(pass_id) + \" pass_acc=\" + str(pass_acc))\n\n\nexit(1)\n" }, { "alpha_fraction": 0.7049891352653503, "alphanum_fraction": 0.7310194969177246, "avg_line_length": 13.903225898742676, "blob_id": "07d5532139a1a24e5f009d9fa8464d1527ae5750", "content_id": "37e29482ce6462c89a438b538d1ff016fdc0753c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 461, "license_type": "permissive", "max_line_length": 59, "num_lines": 31, "path": "/README.md", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "# Paddleboard\n\n## Installation Instructions\n\n1) Clone paddleboard repo\n\n```\ngit clone [email protected]:PaddlePaddle/board.git\n```\n\n2) Create virtual environment\n\n```\ncd board\nvirtualenv venv\nsource venv/bin/activate\n``` \n\n3) Build and install paddleboard wheel\n```\n./paddleboard/pip/build_package.sh\npip install --upgrade dist/paddleboard-0.1-py2-none-any.whl\n```\n\n4) Run server\n\n```\npaddleboard runserver\n```\n\n5) Launch browser and navigate to http://localhost:8000" }, { "alpha_fraction": 0.6043996810913086, "alphanum_fraction": 0.6226696372032166, "avg_line_length": 33.83116912841797, "blob_id": "d7e0025974288fcb1481dd03ff038b777f7799f3", "content_id": "338aebf808cad853e5dd4b3e3cff9dbd4adc0641", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2682, "license_type": "permissive", "max_line_length": 129, "num_lines": 77, "path": "/prototype/PaddleTensorBoardDemo/fluid/PaddleFileWriter/paddle_file_writer.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "import tensorflow as tf\nimport os\nimport time\nimport struct\nfrom crc32c import crc32c\n\n\nclass PaddleFileWriter:\n\n def __init__(self, log_path = None):\n\n # tensorboard looks for tag \"tfevents\" in filename to load data\n filename = 'events.out.tfevents.{}'.format(int(time.time()))\n if log_path is None:\n path = filename\n else:\n if not os.path.exists(log_path):\n os.makedirs(log_path)\n path = os.path.join(log_path, filename)\n\n self.writer = open(path, 'wb')\n # every log file has to start with event of file version\n self.writeEvent(tf.Event(wall_time=time.time(), step=0, file_version='brain.Event:2'))\n\n #this function replicates scalar() function in tensorflow, simlpy logs a single value and plot in a graph\n def write(self, name, data, step = 0):\n # data will wrap in summary and write as a Event protobuf\n #'tag' will group the plot data in a single graph\n event = tf.Event(\n wall_time=time.time(),\n step=step,\n summary=tf.Summary(\n value=[tf.Summary.Value(\n tag=name, simple_value=data)]))\n\n self.writeEvent(event)\n\n\n def write_graph(self, graph_def):\n # data will wrap in summary and write as a Event protobuf\n #'tag' will group the plot data in a single graph\n event = tf.Event(graph_def=graph_def.SerializeToString())\n self.writeEvent(event)\n\n\n def writeEvent(self, event):\n # serialize the protobuf as a string\n data = event.SerializeToString()\n w = self.writer\n # tensorboard uses a checksum algorithm(CRC) to verify data integrity\n\n #format defined in here: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/io/record_writer.cc#L96\n\n # Format of a single record:\n # uint64 length\n # uint32 masked crc of length\n # byte data[length]\n # uint32 masked crc of data\n\n # struck.pack will format string as binary data in a format\n header = struct.pack('Q', len(data)) #'Q' is the format of unsigned long long(uint64)\n w.write(header)\n w.write(struct.pack('I', masked_crc32c(header))) #'I' is unsigned int(uint32)\n w.write(data)\n w.write(struct.pack('I', masked_crc32c(data)))\n w.flush()\n\n\ndef masked_crc32c(data):\n # mast function defined in: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/hash/crc32c.h#L40\n kMaskDelta = 0xa282ead8\n x = u32(crc32c(data))\n return u32(((x >> 15) | u32(x << 17)) + kMaskDelta)\n\n\ndef u32(x):\n return x & 0xffffffff\n" }, { "alpha_fraction": 0.46666666865348816, "alphanum_fraction": 0.5174603462219238, "avg_line_length": 23.230770111083984, "blob_id": "dc9ffd748d44c7b4bf276a67f994ebfa59784c09", "content_id": "99f595263c0883965570867ec61d3a1e352c959d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 630, "license_type": "permissive", "max_line_length": 58, "num_lines": 26, "path": "/prototype/PaddleBoard/chart/views.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "from django.http import HttpResponse\nfrom django.template import loader\nimport json\n\n\ndef index(request):\n data = read_chart_data()\n template = loader.get_template('chart/index.html')\n context = {\n 'chart_data': json.dumps(data)\n }\n return HttpResponse(template.render(context, request))\n\n\ndef read_chart_data():\n #TODO: READ FROM PROTOBUF\n chart_data = [\n {'x': 1, 'y': 5},\n {'x': 20, 'y': 20},\n {'x': 40, 'y': 10},\n {'x': 60, 'y': 40},\n {'x': 80, 'y': 5},\n {'x': 100, 'y': 60},\n {'x': 120, 'y': 40},\n {'x': 140, 'y': 30}]\n return chart_data\n" }, { "alpha_fraction": 0.5969005823135376, "alphanum_fraction": 0.6172069907188416, "avg_line_length": 37.19047546386719, "blob_id": "5b953d9041a64675cca6439b87bae0cc550b0b62", "content_id": "b4d3c159cc2050e12b4bacb5096d30f1fa596f17", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5614, "license_type": "permissive", "max_line_length": 104, "num_lines": 147, "path": "/prototype/PaddleTensorBoardDemo/mnist.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "import os\nimport datetime\nimport numpy as np\nimport paddle.v2 as paddle\nfrom PaddleFileWriter.paddleFileWriter import PaddleFileWriter\n\ndef softmax_regression(img):\n predict = paddle.layer.fc(\n input=img, size=10, act=paddle.activation.Softmax())\n return predict\n\n\ndef multilayer_perceptron(img):\n # The first fully-connected layer\n hidden1 = paddle.layer.fc(input=img, size=128, act=paddle.activation.Relu())\n # The second fully-connected layer and the according activation function\n hidden2 = paddle.layer.fc(\n input=hidden1, size=64, act=paddle.activation.Relu())\n # The thrid fully-connected layer, note that the hidden size should be 10,\n # which is the number of unique digits\n predict = paddle.layer.fc(\n input=hidden2, size=10, act=paddle.activation.Softmax())\n return predict\n\n\ndef convolutional_neural_network(img):\n # first conv layer\n conv_pool_1 = paddle.networks.simple_img_conv_pool(\n input=img,\n filter_size=5,\n num_filters=20,\n num_channel=1,\n pool_size=2,\n pool_stride=2,\n act=paddle.activation.Relu())\n # second conv layer\n conv_pool_2 = paddle.networks.simple_img_conv_pool(\n input=conv_pool_1,\n filter_size=5,\n num_filters=50,\n num_channel=20,\n pool_size=2,\n pool_stride=2,\n act=paddle.activation.Relu())\n # fully-connected layer\n predict = paddle.layer.fc(\n input=conv_pool_2, size=10, act=paddle.activation.Softmax())\n return predict\n\n\ndef main():\n paddle.init(use_gpu=False, trainer_count=1)\n\n # define network topology\n images = paddle.layer.data(\n name='pixel', type=paddle.data_type.dense_vector(784))\n label = paddle.layer.data(\n name='label', type=paddle.data_type.integer_value(10))\n\n # Here we can build the prediction network in different ways. Please\n # choose one by uncomment corresponding line.\n # predict = softmax_regression(images)\n # predict = multilayer_perceptron(images)\n predict = convolutional_neural_network(images)\n\n cost = paddle.layer.classification_cost(input=predict, label=label)\n\n parameters = paddle.parameters.create(cost)\n\n optimizer = paddle.optimizer.Momentum(\n learning_rate=0.1 / 128.0,\n momentum=0.9,\n regularization=paddle.optimizer.L2Regularization(rate=0.0005 * 128))\n\n trainer = paddle.trainer.SGD(\n cost=cost, parameters=parameters, update_equation=optimizer)\n\n train_lists = []\n test_lists = []\n\n timestamp_dir = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\n train_fw = PaddleFileWriter('./logs/%s/train' % timestamp_dir)\n test_fw = PaddleFileWriter('./logs/%s/test' % timestamp_dir)\n\n def event_handler_train(event):\n if isinstance(event, paddle.event.EndIteration):\n if event.batch_id % 10 == 0:\n print \"Train Data: Pass %d, Batch %d, Cost %f, %s\" % (\n event.pass_id, event.batch_id, event.cost, event.metrics)\n train_fw.write(\"cost\", event.cost, event.batch_id)\n train_fw.write(\"error\", event.metrics['classification_error_evaluator'], event.batch_id)\n train_lists.append((event.pass_id, event.cost,\n event.metrics['classification_error_evaluator']))\n\n best = sorted(train_lists, key=lambda list: float(list[1]))[0]\n accuracy = 100 - float(best[2]) * 100\n print 'The training classification accuracy is %.2f%%' % accuracy\n train_fw.write(\"accuracy\", accuracy, event.batch_id)\n\n def event_handler_test(event):\n if isinstance(event, paddle.event.EndIteration):\n if event.batch_id % 10 == 0:\n print \"Test Data: Pass %d, Batch %d, Cost %f, %s\" % (\n event.pass_id, event.batch_id, event.cost, event.metrics)\n test_fw.write(\"cost\", event.cost, event.batch_id)\n test_fw.write(\"error\", event.metrics['classification_error_evaluator'], event.batch_id)\n test_lists.append((event.pass_id, event.cost,\n event.metrics['classification_error_evaluator']))\n\n best = sorted(test_lists, key=lambda list: float(list[1]))[0]\n accuracy = 100 - float(best[2]) * 100\n print 'The training classification accuracy is %.2f%%' % accuracy\n test_fw.write(\"accuracy\", accuracy, event.batch_id)\n\n trainer.train(\n reader=paddle.batch(\n paddle.reader.shuffle(paddle.dataset.mnist.train(), buf_size=8192),\n batch_size=128),\n event_handler=event_handler_train,\n num_passes=1)\n\n trainer.train(\n reader=paddle.batch(\n paddle.reader.shuffle(paddle.dataset.mnist.test(), buf_size=8192),\n batch_size=128),\n event_handler=event_handler_test,\n num_passes=1)\n\n # def load_image(file):\n # im = Image.open(file).convert('L')\n # im = im.resize((28, 28), Image.ANTIALIAS)\n # im = np.array(im).astype(np.float32).flatten()\n # im = im / 255.0\n # return im\n #\n # test_data = []\n # cur_dir = os.path.dirname(os.path.realpath(__file__))\n # test_data.append((load_image(cur_dir + '/image/infer_3.png'), ))\n #\n # probs = paddle.infer(\n # output_layer=predict, parameters=parameters, input=test_data)\n # lab = np.argsort(-probs) # probs and lab are the results of one batch data\n # print \"Label of image/infer_3.png is: %d\" % lab[0][0]\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6031073331832886, "alphanum_fraction": 0.604519784450531, "avg_line_length": 22.600000381469727, "blob_id": "05cb2a6fd1c8163747eb42f941fd363cd031b010", "content_id": "3e93a2bb7fc632a60f6af7ebce2d7d2b669b08ff", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 708, "license_type": "permissive", "max_line_length": 81, "num_lines": 30, "path": "/paddleboard/pip/build_package.sh", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nset -e\n\nfunction main() {\n CURRENT_DIR=`pwd`\n DEST=$CURRENT_DIR/dist\n TMPDIR=$(mktemp -d -t tmp.XXXXXXXXXX)\n SCRIPT_DIR=$(dirname \"$0\")\n\n echo $(date) : \"=== Using tmpdir: ${TMPDIR}\"\n\n cp \"${SCRIPT_DIR}/setup.py\" \"${TMPDIR}\"\n cp \"${SCRIPT_DIR}/MANIFEST.in\" \"${TMPDIR}\"\n cp -R \"${SCRIPT_DIR}/../python/paddleboard\" \"${TMPDIR}/paddleboard\"\n cp \"${SCRIPT_DIR}/../python/manage.py\" \"${TMPDIR}/paddleboard/server/manage.py\"\n\n pushd ${TMPDIR}\n\n echo $(date) : \"*** Building paddleboard wheel ***\"\n echo $(pwd)\n python setup.py bdist_wheel\n mkdir -p ${DEST}\n cp dist/* ${DEST}\n popd\n rm -rf ${TMPDIR}\n echo $(date) : \"*** Wrote paddleboard wheel to: ${DEST} ***\"\n}\n\nmain \"$@\"\n" }, { "alpha_fraction": 0.43478259444236755, "alphanum_fraction": 0.6739130616188049, "avg_line_length": 14.333333015441895, "blob_id": "8269f2e89683ee65ae4a3d15ddda84473d1c86d6", "content_id": "674306343116e1f75a248058e06b314a557345be", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 46, "license_type": "permissive", "max_line_length": 16, "num_lines": 3, "path": "/paddleboard/python/paddleboard/server/requirements.txt", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "Django==1.8.11\ngunicorn==19.7.1\ngevent==1.2.1\n" }, { "alpha_fraction": 0.711773693561554, "alphanum_fraction": 0.7178899049758911, "avg_line_length": 38.66666793823242, "blob_id": "bd80670d617d5bf54d79ebc268b8b4600e3f675d", "content_id": "bb542375fd4835ff99d71029af72aa01aa4ee9f5", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 1308, "license_type": "permissive", "max_line_length": 87, "num_lines": 33, "path": "/paddleboard/cc/CMakeLists.txt", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "cmake_minimum_required(VERSION 3.2)\nproject(paddleboard)\n\nfind_program(CCACHE_FOUND ccache)\nif(CCACHE_FOUND)\n set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)\n set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)\nendif(CCACHE_FOUND)\n\nset(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} \"${CMAKE_CURRENT_SOURCE_DIR}/cmake\")\nset(CMAKE_CXX_STANDARD 11)\nset(CMAKE_CXX_FLAGS \"-fPIC\")\n\n\nset(THIRD_PARTY_PATH \"${CMAKE_BINARY_DIR}/third_party\" CACHE STRING\n \"A path setting third party libraries download & build directories.\")\n\n################################ Configurations #######################################\noption(WITH_TESTING \"Compile paddleboard with unit testing\" ON)\n\ninclude(external/zlib) # download, build, install zlib\ninclude(external/gflags) # download, build, install gflags\ninclude(external/glog) # download, build, install glog\ninclude(external/gtest) # download, build, install gtest\ninclude(external/pybind11) # download pybind11\ninclude(external/protobuf) # download, build, install protobuf\n#include(external/python) # find python and set path\n\ninclude_directories(${PROJECT_SOURCE_DIR})\ninclude_directories(${CMAKE_CURRENT_BINARY_DIR})\ninclude_directories(${PROJECT_SOURCE_DIR}/thirdparty/local/include)\n\nSET(SOURCE_DIR ${PROJECT_SOURCE_DIR}/src)" }, { "alpha_fraction": 0.6265624761581421, "alphanum_fraction": 0.6473958492279053, "avg_line_length": 29.967741012573242, "blob_id": "072dd0aedfb447ccce52d8ebc78b9443e2804f7d", "content_id": "75d61f825e00a2779f3bdbf680dce56b8b9be04d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1920, "license_type": "permissive", "max_line_length": 87, "num_lines": 62, "path": "/prototype/PaddleTensorBoardDemo/fluid/test_fit_a_line.py", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "import datetime\nimport numpy as np\nimport paddle.v2 as paddle\nimport paddle.v2.fluid as fluid\n\nfrom PaddleFileWriter.paddle_file_writer import PaddleFileWriter\nfrom PaddleFileWriter import paddleboard_utils as pbu\n\n\n# Create PaddleFileWriter with log\ntimestamp_dir = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\ntrain_fw = PaddleFileWriter('./logs/%s/train' % timestamp_dir)\n\nx = fluid.layers.data(name='x', shape=[13], dtype='float32')\n\ny_predict = fluid.layers.fc(input=x, size=1, act=None)\n\ny = fluid.layers.data(name='y', shape=[1], dtype='float32')\n\ncost = fluid.layers.square_error_cost(input=y_predict, label=y)\navg_cost = fluid.layers.mean(x=cost)\n\nsgd_optimizer = fluid.optimizer.SGD(learning_rate=0.001)\nsgd_optimizer.minimize(avg_cost)\n\nBATCH_SIZE = 20\n\ntrain_reader = paddle.batch(\n paddle.reader.shuffle(\n paddle.dataset.uci_housing.train(), buf_size=500),\n batch_size=BATCH_SIZE)\n\nplace = fluid.CPUPlace()\nexe = fluid.Executor(place)\n\nexe.run(fluid.default_startup_program())\n\n# Print computation graph\ntrain_fw.write_graph(pbu.convert_program_to_tf_graph_def(fluid.default_main_program()))\n\nPASS_NUM = 100\nbatch_id = 0\nfor pass_id in range(PASS_NUM):\n fluid.io.save_persistables(exe, \"./fit_a_line.model/\")\n fluid.io.load_persistables(exe, \"./fit_a_line.model/\")\n\n for data in train_reader():\n x_data = np.array(map(lambda _: _[0], data)).astype(\"float32\")\n y_data = np.array(map(lambda _: _[1], data)).astype(\"float32\")\n\n avg_loss_value, = exe.run(fluid.default_main_program(),\n feed={'x': x_data,\n 'y': y_data},\n fetch_list=[avg_cost])\n\n train_fw.write(\"cost\", float(avg_loss_value[0]), batch_id)\n\n if avg_loss_value[0] < 10.0:\n exit(0) # if avg cost less than 10.0, we think our code is good.\n\n batch_id += 1\nexit(1)\n" }, { "alpha_fraction": 0.7916980981826782, "alphanum_fraction": 0.7916980981826782, "avg_line_length": 41.74193572998047, "blob_id": "46d5b1fb7eb708bd283215325bf3bc6998a93d26", "content_id": "1d6c269fc067df7dc58f6f3a9dfc79a84f79f7d3", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1325, "license_type": "permissive", "max_line_length": 197, "num_lines": 31, "path": "/prototype/README.md", "repo_name": "philippslang/board", "src_encoding": "UTF-8", "text": "# PaddleBoard\n\n## Purpose\n\nA deep learning model is complex and can be difficult to understand or debug. It can feel like a black box to PaddlePaddle users. PaddleBoard is the flashlight to shine what is under the hood!\n\nIn order to help users to understand, optimize, and debug their PaddlePaddle program, we want to build a vidualization tool that can give insight into the computational graph architecture and data.\n\n\n## Ideas / Proposals\n\n### Key Features\n\n- A diagram that displays entire model including operators, variables and layers\n- A graph that keep tracks of metrics/data over time \n\n\n### Improvement from Tensorboard / Long term proposals \n\n- Side by side code and graph relationship (pointing to a certain op that can highlight the corresponding code)\n- Make the board a lightweight IDE that able to run the program with custom parameters and see dynamically changing in visual graphs\n- Simply drag and drop to draw the visual diagram and auto generate the code for non programmers, providing a UI to upload all the training data\n- Bring something like TensorFlow Playground concepts into board\n\n\n## Project Info and Progress\nThis wiki page contains all the info and keep track of progress \nhttps://github.com/PaddlePaddle/board/wiki/Paddle-Board-Project-Info\n\n## Wiki and Technical Doc\nPlease go to Wiki Tab!\n" } ]
14
nanococo/2DRaytracer
https://github.com/nanococo/2DRaytracer
dc5fa415fd58e70eacadd8dfb63b10ae024fbd6a
9c8bd4c89bd2d7a6cc5710648f922a7620ff4051
976a78ff574de9f96e1198fcc9a707e02f027349
refs/heads/master
2022-11-11T23:45:38.816590
2020-06-10T02:53:47
2020-06-10T02:53:47
270,852,245
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5, "alphanum_fraction": 0.6037735939025879, "avg_line_length": 14.285714149475098, "blob_id": "91fb64b105a37f0f27f001fa0ee1f9ffad1694c0", "content_id": "2e9f1868b1b814c2680dd91b2bcc14b6581e84a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 106, "license_type": "no_license", "max_line_length": 31, "num_lines": 7, "path": "/test.py", "repo_name": "nanococo/2DRaytracer", "src_encoding": "UTF-8", "text": "import numpy as np\n\nx = np.arange(10)\nx2 = np.reshape(x, (2,5))\n\nprint(x2)\nprint(np.roll(x2, (1,2),(0,1)))" } ]
1
MeikoZhang/light_wechat
https://github.com/MeikoZhang/light_wechat
8789dc05b4951c94df5ac9879511e34e841b3505
d2189ae9270e6d6d1e0c4dd4530af09a85ceb9d3
d1d6d66ff42922628560b909050cf125e090c313
refs/heads/master
2020-03-25T04:39:41.106510
2018-08-08T13:02:14
2018-08-08T13:02:14
143,406,764
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6170180439949036, "alphanum_fraction": 0.6193094849586487, "avg_line_length": 30.623188018798828, "blob_id": "4796ec13bf444b20991b6d62652a3118d180655f", "content_id": "24193386cb0b8756ca9a44f912152590169481fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6818, "license_type": "no_license", "max_line_length": 112, "num_lines": 207, "path": "/django_web/login_console.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "# Create your tests here.\nimport itchat\nimport time\nimport sys\nfrom itchat.content import *\nimport os\nimport json\nfrom django_web.Logger import logger\n\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n# 验证码存储路径\nqrCode_dir = os.path.join(BASE_DIR, 'static\\wx_login\\qrcode.jpg')\n# 登陆信息存储目录\nlogin_status_dir = os.path.join(BASE_DIR, 'static\\wx_login\\itchat.pkl')\n# 微信图片/文件存放目录\nwx_files_dir = os.path.join(BASE_DIR, 'static\\wx_files')\n\n\nif_login = False\nif_run = False\n\n\ndef login_callback():\n global if_login\n if_login = True\n logger.info(\"登陆成功 ...\")\n load_user = itchat.search_friends()\n logger.info(json.dumps(load_user))\n\n\ndef exit_callback():\n global if_login\n if_login = False\n logger.info(\"程序已登出 ...\")\n\n\nuuid_last_received = None\n\n\ndef qr_callback(uuid=None, status=None, qrcode=None):\n logger.info(\"qr_callback uuid:%s\" % (uuid))\n global uuid_last_received\n if uuid_last_received != uuid:\n logger.info(\"二维码获取及存储 ...uuid:%s status:%s\" % (uuid, status))\n with open(qrCode_dir, 'wb') as f:\n f.write(qrcode)\n uuid_last_received = uuid\n\n\ndef output_info(msg):\n print('[INFO] %s' % msg)\n\n\ndef open_QR():\n for get_count in range(10):\n output_info('Getting uuid')\n uuid = itchat.get_QRuuid()\n while uuid is None:\n uuid = itchat.get_QRuuid()\n time.sleep(1)\n output_info('Getting QR Code')\n if itchat.get_QR(uuid):\n break\n elif get_count >= 9:\n output_info('Failed to get QR Code, please restart the program')\n sys.exit()\n output_info('Please scan the QR Code')\n return uuid\n\n\ndef login():\n uuid = open_QR()\n print('-------get qrcode')\n waitForConfirm = False\n while 1:\n status = itchat.check_login(uuid)\n if status == '200':\n break\n elif status == '201':\n if waitForConfirm:\n output_info('Please press confirm')\n waitForConfirm = True\n elif status == '408':\n output_info('Reloading QR Code')\n uuid = open_QR()\n waitForConfirm = False\n\n print('-------get login success')\n\n itchat.login()\n\n # 保存登陆状态\n itchat.dump_login_status(fileDir=login_status_dir)\n\n # 获取登陆人信息\n userInfo = itchat.web_init()\n print('Login successfully as %s' % userInfo['User']['NickName'])\n\n # 手机web微信登陆状态显示\n itchat.show_mobile_login()\n print('-------show mobile login')\n\n # 获取最新近聊列表\n itchat.get_contact(update=True)\n print('-------get contact complete')\n\n # 获取最新好友列表\n itchat.get_friends(update=True)\n print('-------get friends complete')\n\n # 获取最新群聊列表\n chatrooms = itchat.get_chatrooms(update=True)\n print('-------get chatrooms complete')\n\n # 更新群聊详细信息(人员列表)\n for chatroom in chatrooms:\n # print(json.dumps(chatroom))\n itchat.update_chatroom(userName=chatroom['UserName'])\n print('-------update chatrooms members complete')\n\n # 启动心跳连接\n itchat.start_receiving()\n print('-------start receiving,itchat class:'+str(itchat))\n\n # 消息注册 好友消息\n @itchat.msg_register(TEXT)\n def text_reply(msg):\n # print(json.dumps(msg))\n fromuser = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n print(itchat.search_friends(userName=msg['ToUserName']))\n touser = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msgtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msgtext = msg['Text']\n print('time:%s from:%s to: %s content:%s' % (msgtime, fromuser, touser, msgtext))\n\n # 消息注册 群聊消息\n @itchat.msg_register(TEXT, isGroupChat=True)\n def text_reply(msg):\n print(json.dumps(msg))\n # chatgroupname = msg['User']['NickName']\n print(itchat.search_chatrooms(userName=msg['ToUserName']))\n chatgroupname = itchat.search_chatrooms(userName=msg['ToUserName'])['NickName']\n chatusername = msg['ActualNickName']\n msgtext = msg['Text']\n msgtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n print('time:%s from:%s group:%s content:%s' % (msgtime, chatusername, chatgroupname, msgtext))\n\n @itchat.msg_register([PICTURE, RECORDING, ATTACHMENT, VIDEO])\n def download_files(msg):\n file = msg.download(os.path.join(BASE_DIR,'static\\wx_files', msg.fileName))\n typeSymbol = {\n PICTURE: 'img',\n VIDEO: 'vid', }.get(msg.type, 'fil')\n return '@%s@%s' % (typeSymbol, msg.fileName)\n\n # itchat.run(blockThread=False)\n itchat.run()\n\n # def newThread():\n # itchat.run()\n # threading.Thread(target=newThread).start()\n # print(\"正在监控中 ... \")\n # while True:\n # print(\"---------- get msg from queue ...\")\n # queuemsg = q.get()\n # fromuser = itchat.search_friends(userName=queuemsg['FromUserName'])['NickName']\n # print(itchat.search_friends(userName=queuemsg['ToUserName']))\n # touser = itchat.search_friends(userName=queuemsg['ToUserName'])['NickName']\n # msgtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(queuemsg.createTime))\n # msgtext = queuemsg['Text']\n # print('msg from queue ... time:%s from:%s to: %s content:%s' % (msgtime, fromuser, touser, msgtext))\n\n\n# 消息注册 好友消息\[email protected]_register(TEXT)\ndef text_reply(msg):\n print(json.dumps(msg))\n fromuser = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n print(itchat.search_friends(userName=msg['ToUserName']))\n touser = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msgtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msgtext = msg['Text']\n print('time:%s from:%s to: %s content:%s' % (msgtime, fromuser, touser, msgtext))\n\n\n# hotReload=False, statusStorageDir='itchat.pkl',\n# enableCmdQR=False, picDir=None, qrCallback=None,\n# loginCallback=None, exitCallback=None\n\n\nitchat.auto_login(hotReload=True, statusStorageDir=login_status_dir, picDir=qrCode_dir,\n # qrCallback=qr_callback,\n loginCallback=login_callback, exitCallback=exit_callback)\nprint('over')\n\n# time.sleep(5)\n# itchat.logout()\n# print('login out')\n#\n# itchat.auto_login(hotReload=True, statusStorageDir=login_status_dir,picDir=qrCode_dir,\n# # qrCallback=qr_callback,\n# loginCallback=login_callback, exitCallback=exit_callback)\n# print('re login over')\n\nitchat.run()\nprint('run over')\n" }, { "alpha_fraction": 0.6780303120613098, "alphanum_fraction": 0.6840909123420715, "avg_line_length": 35.66666793823242, "blob_id": "35c92c95bd630c79991d3f74fa0550039e247f63", "content_id": "064d1fb213b3cf6dcf9eb528ddd858e0b42bcf47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1378, "license_type": "no_license", "max_line_length": 77, "num_lines": 36, "path": "/wechat/urls.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "\"\"\"wechat URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.0/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.urls import path\n\nfrom django.conf.urls import url\nfrom django.contrib import admin\n#from django_web.views import index #导入views.py文件中的index函数\nimport django_web.views as view\n\nurlpatterns = [\n url(r'^admin/', admin.site.urls),\n url(r'^index/', view.index), #在url中凡是以url开头的访问都使用index函数来处理该请求\n # url(r'^test/', view.test),\n # url(r'^qrcode/', view.qrcode),\n # url(r'^loadlogin/', view.load_login),\n # url(r'^checklogin/', view.check_login),\n # url(r'^login/', view.login),\n # url(r'^getmsg/', view.get_msg)\n url(r'^login/', view.login),\n url(r'^login_status/', view.login_status),\n url(r'^logout/', view.logout)\n]\n" }, { "alpha_fraction": 0.6969990134239197, "alphanum_fraction": 0.6979671120643616, "avg_line_length": 20.081632614135742, "blob_id": "8ca30a93241ff917b457f4a6d94d2ee9b102b426", "content_id": "c0a10281134a41fa87372e2b75cb4807849d7ce1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1033, "license_type": "no_license", "max_line_length": 71, "num_lines": 49, "path": "/django_web/views.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nimport django_web.login as weblogin\nfrom django.http import HttpResponse\nimport json\n\n\n# Create your views here.\ndef index(request):\n # weblogin.auto_login()\n print('request index over')\n return render(request, 'index.html')\n\n\ndef test(request):\n return render(request, 'test.html')\n\n\ndef qrcode(request):\n weblogin.get_qr()\n return render(request, 'login.html')\n\n\ndef load_login(request):\n status = weblogin.load_login()\n return HttpResponse(status)\n\n\ndef check_login(request):\n status = weblogin.check_login()\n return HttpResponse(status)\n\n\ndef login(request):\n status = weblogin.auto_login()\n return HttpResponse(status)\n\n\ndef get_msg(request):\n msg = weblogin.get_msg()\n return HttpResponse(json.dumps(msg) if msg is not None else \"\",\n content_type=\"application/json; charset=utf-8\")\n\n\ndef login_status(request):\n return HttpResponse(weblogin.login_status())\n\n\ndef logout(request):\n return HttpResponse(weblogin.logout())\n" }, { "alpha_fraction": 0.5738007426261902, "alphanum_fraction": 0.6051660776138306, "avg_line_length": 36.379310607910156, "blob_id": "d9278a2d74850cf8aac15d9661f13ef5b3f7e8be", "content_id": "05f9fda9afd55c79cded881e3a26c51500325822", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1328, "license_type": "no_license", "max_line_length": 91, "num_lines": 29, "path": "/django_web/tests.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "# Create your tests here.\nfrom django.test import TestCase\nfrom django_web.models import WxRecord\n\n\n# 写完测试用例后,执行测试用例。这里与unittest的运行方法也不一样。\n# Django提供了“test”命令来运行测试,用cmd执行,测试插入的数据均在临时测试库中\n# python manage.py test\nclass ModelTest(TestCase):\n\n # 初始化:创建一条数据\n def setUp(self):\n # WxRecord.objects.create(msg_type='1', msg_time='2018-08-04 14:22:42',\n # msg_from='凉城', msg_to='六岁就微信', msg_text='感觉')\n wxRecord = WxRecord(is_group='0', msg_type='TEXT', msg_time='2018-08-04 14:22:42',\n msg_from='凉城', msg_to='六岁就微信', msg_text='感觉')\n wxRecord.save()\n\n # 下面开始写测试用例了\n # 通过get的方法,查询插入的发布会数据,并根据地址判断\n def test_event_models(self):\n result = WxRecord.objects.get(msg_type='TEXT')\n self.assertEqual(result.msg_type, \"TEXT\")\n\n def test_all_models(self):\n result = WxRecord.objects.all()\n for r in result:\n print(\"group:%s type:%s time:%s from:%-15s to: %-15s content:%s\"\n % (r.is_group, r.msg_type, r.msg_time, r.msg_from, r.msg_to, r.msg_text))\n" }, { "alpha_fraction": 0.739130437374115, "alphanum_fraction": 0.739130437374115, "avg_line_length": 14, "blob_id": "d8b86bce578721e220f50e49cbdb777c0100de4b", "content_id": "574eba99d200bda82e451b43abb45471d2c973fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 46, "license_type": "no_license", "max_line_length": 30, "num_lines": 3, "path": "/django_web/logout.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "import django_web.tests as dwt\n\ndwt.logout()\n\n" }, { "alpha_fraction": 0.5161290168762207, "alphanum_fraction": 0.5537634491920471, "avg_line_length": 28.760000228881836, "blob_id": "e9692f53586c4eba9b6fa5cebda10c835ba5760d", "content_id": "9f02213f78261c05064d4f8d66f6a068c5e898c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 744, "license_type": "no_license", "max_line_length": 114, "num_lines": 25, "path": "/django_web/migrations/0001_initial.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "# Generated by Django 2.0.7 on 2018-08-04 05:41\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='WxRecord',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('msg_type', models.CharField(max_length=2)),\n ('msg_time', models.CharField(max_length=50)),\n ('msg_from', models.CharField(max_length=100)),\n ('msg_to', models.CharField(max_length=100)),\n ('msg_text', models.CharField(max_length=2048)),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5967933535575867, "alphanum_fraction": 0.6021377444267273, "avg_line_length": 32.56478500366211, "blob_id": "754adfcd73e535a9817003956af38774441af21c", "content_id": "a4d8d4f134b250de4617cddf576792c5e4c2fa51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10542, "license_type": "no_license", "max_line_length": 119, "num_lines": 301, "path": "/django_web/login.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "import os, time, json, sys, threading\nfrom queue import Queue\nimport itchat\nfrom itchat.content import *\nfrom itchat.utils import test_connect\nfrom django_web.models import WxRecord\nfrom django_web.Logger import logger\n\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n# 验证码存储路径\nqrCode_dir = os.path.join(BASE_DIR, 'static\\wx_login\\qrcode.jpg')\nif os.path.exists(qrCode_dir):\n os.remove(qrCode_dir)\n# 登陆信息存储目录\nlogin_status_dir = os.path.join(BASE_DIR, 'static\\wx_login\\itchat.pkl')\n# 微信图片/文件存放目录\nwx_files_dir = os.path.join(BASE_DIR, 'static\\wx_files')\n\nq = Queue(maxsize=100)\n\nif_login = False\nqruuid = None\nhead_img = None\n\nload_user = None\n\n\ndef login_callback():\n global if_login\n if_login = True\n logger.info(\"登陆成功 ...\")\n global load_user\n load_user = itchat.search_friends()\n\n\ndef exit_callback():\n global if_login\n if_login = False\n logger.info(\"程序已登出 ...\")\n\n\ndef qr_callback(uuid=None, status=None, qrcode=None):\n logger.info(\"二维码获取及存储 ...uuid:%s status:%s\" % (uuid, status))\n with open(qrCode_dir, 'wb') as f:\n f.write(qrcode)\n # qruuid = uuid\n # logger.info(\"qr_callback uuid:%s\" % (uuid))\n # global qruuid\n # if qruuid != uuid:\n\n\ndef get_qr():\n if not test_connect():\n logger.info(\"You can't get access to internet or wechat domain, so exit.\")\n return None\n\n global qruuid\n qruuid = itchat.get_QRuuid()\n itchat.uuid = qruuid\n if os.path.exists(qrCode_dir):\n os.remove(qrCode_dir)\n itchat.get_QR(uuid=qruuid, picDir=qrCode_dir, qrCallback=qr_callback)\n return qruuid\n\n\ndef load_login():\n return itchat.load_login_status(fileDir=login_status_dir, loginCallback=login_callback, exitCallback=exit_callback)\n\n\ndef check_login():\n return itchat.check_login(qruuid)\n\n\ndef login():\n if load_login():\n global if_login\n if_login = True\n logger.info('loan login status success')\n return '200'\n\n logger.info('begin to login ...')\n status = itchat.check_login(qruuid)\n logger.info('check login status'+status)\n\n if status == '200':\n if_login = True\n logger.info('check login, status success')\n elif status == '201':\n logger.info('check login, need confirm')\n return status\n elif status == '408':\n logger.info('check login, qrCode timeout')\n get_qr()\n return status\n\n # 获取登陆人信息\n user_info = itchat.web_init()\n logger.info('Login successfully as %s' % user_info['User']['NickName'])\n\n # 手机web微信登陆状态显示\n itchat.show_mobile_login()\n logger.info('show mobile login')\n\n # 获取最新近聊列表\n itchat.get_contact(update=True)\n logger.info('get contact complete')\n\n # 获取最新好友列表\n itchat.get_friends(update=True)\n logger.info('get friends complete')\n\n # 获取最新群聊列表\n chat_rooms = itchat.get_chatrooms(update=True)\n logger.info('get chatRooms complete')\n\n # 更新群聊详细信息(人员列表)\n for chat_room in chat_rooms:\n logger.debug(json.dumps(chat_room))\n itchat.update_chatroom(userName=chat_room['UserName'])\n logger.info('update chatRooms members complete')\n\n # 保存登陆状态\n itchat.dump_login_status(fileDir=login_status_dir)\n logger.info('save the login success to %s' % login_status_dir)\n\n # 启动心跳连接\n itchat.start_receiving()\n logger.info('start receiving and heartbeat')\n\n class WebMessage(object):\n def __init__(self, _msg):\n self._msg = _msg\n\n def get_msg(self):\n return self._msg\n\n # 消息注册,好友文本消息\n @itchat.msg_register(TEXT)\n def text_reply(msg):\n logger.debug(json.dumps(msg))\n\n # q_msg = WebMessage('text', msg)\n # q.put(q_msg)\n\n msg_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msg_from = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n msg_to = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msg_text = msg['Text']\n\n wx_record = WxRecord(is_group='0', msg_type=msg.type,\n msg_time=msg_time, msg_from=msg_from, msg_to=msg_to, msg_text=msg_text)\n wx_record.save()\n\n q.put(WebMessage({'is_group': '0', 'msg_type': msg.type, 'msg_time': msg_time,\n 'msg_from': msg_from, 'msg_to': msg_to, 'msg_text': msg_text}))\n\n logger.debug(\"save to db type:%s time:%s from:%-15s to: %-15s content:%s\"\n % (msg.type, msg_time, msg_from, msg_to, msg_text))\n\n # 消息注册,好友图片/音频/视频/文件消息\n @itchat.msg_register([PICTURE, RECORDING, ATTACHMENT, VIDEO])\n def download_files(msg):\n msg.download(os.path.join(wx_files_dir, msg.fileName))\n\n msg_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msg_from = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n msg_to = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msg_text = os.path.join(wx_files_dir, msg.fileName)\n\n wx_record = WxRecord(is_group='0', msg_type=msg.type,\n msg_time=msg_time, msg_from=msg_from, msg_to=msg_to, msg_text=msg_text)\n wx_record.save()\n q.put(WebMessage({'is_group': '0', 'msg_type': msg.type, 'msg_time': msg_time,\n 'msg_from': msg_from, 'msg_to': msg_to, 'msg_text': msg_text}))\n logger.debug(\"save to db type:%s time:%s from:%-15s to: %-15s content:%s\"\n % (msg.type, msg_time, msg_from, msg_to, msg_text))\n\n # 消息注册,群文本消息\n @itchat.msg_register(TEXT, isGroupChat=True)\n def text_reply(msg):\n logger.debug(json.dumps(msg))\n\n msg_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msg_from = msg['ActualNickName']\n msg_to = msg['User']['NickName']\n msg_text = msg['Text']\n\n wx_record = WxRecord(is_group='1', msg_type=msg.type,\n msg_time=msg_time, msg_from=msg_from, msg_to=msg_to, msg_text=msg_text)\n wx_record.save()\n q.put(WebMessage({'is_group': '1', 'msg_type': msg.type, 'msg_time': msg_time,\n 'msg_from': msg_from, 'msg_to': msg_to, 'msg_text': msg_text}))\n logger.debug(\"save to db type:%s time:%s from:%-15s to: %-15s content:%s\"\n % ('2', msg_time, msg_from, msg_to, msg_text))\n\n # 消息注册,群图片/音频/视频/文件消息\n @itchat.msg_register([PICTURE, RECORDING, ATTACHMENT, VIDEO], isGroupChat=True)\n def download_files(msg):\n msg.download(os.path.join(wx_files_dir, msg.fileName))\n\n msg_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msg_from = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n msg_to = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msg_text = os.path.join(wx_files_dir, msg.fileName)\n\n wx_record = WxRecord(is_group='1', msg_type=msg.type,\n msg_time=msg_time, msg_from=msg_from, msg_to=msg_to, msg_text=msg_text)\n wx_record.save()\n q.put(WebMessage({'is_group': '1', 'msg_type': msg.type, 'msg_time': msg_time,\n 'msg_from': msg_from, 'msg_to': msg_to, 'msg_text': msg_text}))\n logger.debug(\"save to db type:%s time:%s from:%-15s to: %-15s content:%s\"\n % (msg.type, msg_time, msg_from, msg_to, msg_text))\n\n # 新建线程跑任务\n def new_thread():\n itchat.run()\n\n threading.Thread(target=new_thread).start()\n logger.info(\"聊天记录同步中 ... \")\n return status\n\n\n# 获取消息\ndef get_msg():\n if not if_login:\n logger.info(\"status not login, reloading...\")\n reload_status = login()\n if reload_status == '200':\n logger.info(\"status not login, reloading success\")\n else:\n return {'status': 'not login, reloading failed'}\n\n logger.info(\"getting msg from webQueue ...\")\n try:\n q_msg = q.get(timeout=25)\n except Exception:\n logger.error(\"getting from queue error\")\n q_msg = None\n\n if q_msg is None:\n return None\n\n msg = q_msg.get_msg()\n\n msg_type_list = {TEXT: '文本消息', PICTURE: '图片消息',\n RECORDING: '语音消息', ATTACHMENT: '附件消息', VIDEO: '视频消息'}\n msg_type = msg_type_list.get(msg.get('msg_type'))\n msg_text = msg.get('msg_text') if msg_type == '文本消息' else msg_type\n msg_time = msg.get('msg_time')\n msg_from = msg.get('msg_from')\n msg_to = msg.get('msg_to')\n\n msg_group = msg.get('is_group')\n if msg_group == '0':\n logger.info('好友消息 ... time:%s from:%-15s to: %-15s content:%s' %\n (msg_time, msg_from, msg_to, msg_text))\n else:\n logger.info('群内消息 ... time:%s from:%-15s to:%-15s content:%s' %\n (msg_time, msg_from, msg_to, msg_text))\n return {'status': 'ok', 'msg_type': msg_type, 'msg_time': msg_time,\n 'msg_from': msg_from, 'msg_to': msg_to, 'msg_text': msg_text}\n\n\ndef thread_auto_login():\n\n # 消息注册 好友消息\n @itchat.msg_register(TEXT)\n def text_reply(msg):\n print(json.dumps(msg))\n fromuser = itchat.search_friends(userName=msg['FromUserName'])['NickName']\n print(itchat.search_friends(userName=msg['ToUserName']))\n touser = itchat.search_friends(userName=msg['ToUserName'])['NickName']\n msgtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg.createTime))\n msgtext = msg['Text']\n print('time:%s from:%s to: %s content:%s' % (msgtime, fromuser, touser, msgtext))\n\n itchat.auto_login(hotReload=True, statusStorageDir=login_status_dir, picDir=qrCode_dir,\n qrCallback=qr_callback,\n loginCallback=login_callback, exitCallback=exit_callback)\n itchat.run(blockThread=False)\n global if_login\n if_login = True\n print('auto_login over')\n\n\ndef auto_login():\n print('thread_auto_login start')\n threading.Thread(target=thread_auto_login).start()\n print('thread_auto_login over')\n itchat.web_init\n\n\ndef login_status():\n return if_login;\n\n\ndef logout():\n global if_login\n if_login = False\n itchat.logout();\n\n" }, { "alpha_fraction": 0.7156549692153931, "alphanum_fraction": 0.7763578295707703, "avg_line_length": 21.214284896850586, "blob_id": "4109b353f413d6fe5a41b68e0790e6c9a86ccbc9", "content_id": "6dc29c3c5bf0ccd84f7c935c66289f0b5c4fe7fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 347, "license_type": "no_license", "max_line_length": 53, "num_lines": 14, "path": "/readme.txt", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "\n\n\nPython3.4+Django1.9+Bootstrap3\n\thttps://www.cnblogs.com/alan-babyblog/p/5843773.html\n\n用pycharm开发django项目示例\n\thttps://www.cnblogs.com/kylinlin/p/5184592.html\n\n\nlykchat信息发送系统\n\thttps://github.com/lykops/lykchat/\n\n\nwxpy: 用 Python 玩微信\n\thttps://github.com/youfou/wxpy\n\thttp://wxpy.readthedocs.io/zh/latest/itchat.html" }, { "alpha_fraction": 0.6368821263313293, "alphanum_fraction": 0.6692014932632446, "avg_line_length": 28.22222137451172, "blob_id": "a61c38829b6b84dc6ba0eb6e68f6c4cbe5e1ebd2", "content_id": "962655a12ed4ec40a653115495bae07482add458", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 526, "license_type": "no_license", "max_line_length": 48, "num_lines": 18, "path": "/django_web/models.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\n\nclass WxRecord(models.Model):\n # group :1 not group:0\n is_group = models.CharField(max_length=2)\n # text,img,vid,fil\n msg_type = models.CharField(max_length=50)\n # %Y-%m-%d %H:%M:%S\n msg_time = models.CharField(max_length=50)\n # who send the message\n msg_from = models.CharField(max_length=100)\n # who receive the message\n msg_to = models.CharField(max_length=100)\n # message content\n msg_text = models.CharField(max_length=2048)\n" }, { "alpha_fraction": 0.6093514561653137, "alphanum_fraction": 0.61689293384552, "avg_line_length": 17.38888931274414, "blob_id": "09f6850e3d94be37891504fd3ffdd69aae522b3a", "content_id": "3b3fa5c797cf888736d790dc976a1ae033a28f5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 723, "license_type": "no_license", "max_line_length": 82, "num_lines": 36, "path": "/django_web/wxdb.py", "repo_name": "MeikoZhang/light_wechat", "src_encoding": "UTF-8", "text": "from django_web.models import WxRecord\n\n\ndef get_all():\n\n # 1.查询出所有图书的信息\n all = WxRecord.objects.all()\n return all\n\n\ndef get_index(msg_type=None):\n re = WxRecord.objects.get(msg_type=msg_type)\n return re\n\n\ndef save(msg_type=None, msg_time=None, msg_from=None, msg_to=None, msg_text=None):\n # 1.创建BookInfo对象\n r = WxRecord()\n r.msg_type = msg_type\n r.msg_time = msg_time\n r.msg_from = msg_from\n r.msg_to = msg_to\n r.msg_text = msg_text\n\n # 2.保存进数据库\n res = r.save()\n return res\n\n\ndef delete(msg_type):\n # 1.通过bid获取图书对象\n rd = WxRecord.objects.get(msg_type=msg_type)\n\n # 2.删除\n res = rd.delete()\n return res\n\n" } ]
10
SuavisLiu/StochasticChemicalKinetics
https://github.com/SuavisLiu/StochasticChemicalKinetics
f551f96f741e423fd762aa8344a27590a4b79cb4
17ab3ca65a99d8607198f075980917a8eae81604
41ac6c26128c43687b1557ed501f730cc8b0b216
refs/heads/main
2023-04-23T03:05:44.938871
2021-06-01T22:05:07
2021-06-01T22:05:07
362,586,347
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.42604929208755493, "alphanum_fraction": 0.47701531648635864, "avg_line_length": 29.93814468383789, "blob_id": "87da118e8c3c2c5968dd9c03d7bb5080bce4c0eb", "content_id": "7b6a9163fef216e554db45e18a5ead59c5efafab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3002, "license_type": "no_license", "max_line_length": 91, "num_lines": 97, "path": "/nextReact.py", "repo_name": "SuavisLiu/StochasticChemicalKinetics", "src_encoding": "UTF-8", "text": "from numpy.random import Generator, PCG64 # numpy randon number generator routines\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport random \nfrom itertools import accumulate\n\n#------------------------ This is the function for Next Reaction -----------------------#\n\ndef nextRact( x0, jps, kap, T, r):\n \"\"\"\n This is the funciton of Next Reaction algorithm. \n It will simulate Xt, the number of compounds in the reactions. \n Arguments\n x0: vector of size of number of different types of compounds. Intial status.\n jps: set of vectors of same size of x0. Jumps of the reactions. \n kap: vector of size of number of reactions. Stores the reaction rate. \n T: (type = float) final time of the reaction \n return\n x: the list that contains all steps for the reactions\n t: a list of jump time \n \"\"\"\n\n t = [0]\n x = [x0]\n xt = x0\n tau = np.array([0, 0, 0, 0, 0, 0, 0 ,0])\n\n\n while (t[-1] < 10):\n print(t[-1])\n G = invG( t[-1], T, tau, xt, kap, r)\n m = G.argmin()\n xt = np.add(xt, jps[m])\n x.append(xt.tolist())\n T[m] = T[m] - np.log(np.random.uniform(0,1))\n tau = tau + ( G[m] - t[-1] ) * reactRate( xt, kap, r)\n dt = G[m]\n t.append(dt)\n\n return x, t\n#------------------------- Helper Function Computing Reaction Rate ---------------------#\n\ndef reactRate( xt, kap, r):\n rate = np.zeros(r)\n rate[0] = kap[0] * xt[0]\n rate[1] = kap[1] * xt[1]\n rate[2] = kap[2] * xt[1]\n rate[3] = kap[3] * xt[2]\n rate[4] = kap[4] * xt[2] * (xt[2] - 1)\n rate[5] = kap[5] * xt[3]\n rate[6] = kap[6] * xt[0] * xt[3]\n rate[7] = kap[7] * xt[4]\n return np.array(rate)\n\n\ndef invG( t, T, tau, xt, kap, r):\n return(t + np.divide((T - tau), reactRate(xt, kap, r)))\n\n################################### Main Program #########################################\n\n#x0 = [1, 10, 50, 10, 0]\nx0 = [20, 200, 1000, 200, 0]\n\n#kap = [200, 10, 25, 1, 0.01, 1, 0, 0]\nkap = [200, 10, 25, 1, 0.01, 1, 2, 0.1]\n\njps = [[0,1,0,0,0],[0,0,1,0,0],[0,-1,0,0,0],[0,0,-1,0,0],[0,0,-2,1,0],[0,0,0,-1,0],\n [-1,0,0,-1,1],[1,0,0,1,-1]]\nr = 8\nT = - np.log(np.random.uniform(0,1,r))\n\nx, t = nextRact( x0, jps, kap, T, r)\n\nx = np.array(x)\nnumG = x[:,0]\nnumM = x[:,1]\nnumP = x[:,2]\nnumD = x[:,3]\nnumB = x[:,4]\nxt = x[-1]\n\n################################# Meaningless Stuff #######################################\n\nprint('Final State:', xt)\n\nfig, ax = plt.subplots() \n#ax.plot(time, numG, '-', label = 'numG') \nax.plot(t, numM, '-', label = 'numM') \nax.plot(t, numP, '-', label = 'numP') \nax.plot(t, numD, '-', label = 'numD')\n#ax.plot(time, numB, '-', label = 'numB') \nax.legend() \nax.set_ylabel('num of compounds') \nax.set_xlabel('Time')\ntitle = \" Trajectory of Compounds with k =\", kap\nax.set_title(title) \nplt.show() \n" }, { "alpha_fraction": 0.6927212476730347, "alphanum_fraction": 0.695202648639679, "avg_line_length": 24.09375, "blob_id": "4a1f15f0a6fbf36fb74cd75c7435e32b8678c8e9", "content_id": "cb21fb8ccb3abfa5fe33b3cbab1a963c22dd1f74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2418, "license_type": "no_license", "max_line_length": 129, "num_lines": 96, "path": "/README.md", "repo_name": "SuavisLiu/StochasticChemicalKinetics", "src_encoding": "UTF-8", "text": "# Stochastic Chemical Kinetics\n\n## Introduction \nThis project contains three algorithms modeling the chemical reactions over time with intial conditions and given reaction rates.\n\n## Next Reaction\n\n** nextReaction.py contains the following main function. **\n\nInspired by Bill\n\nThis is the funciton of Next Reaction algorithm. \nIt will simulate Xt, the number of compounds in the reactions. \n\n* Arguments \n\n x0: vector of size of number of different types of compounds. Intial status.\n\n jps: set of vectors of same size of x0. Jumps of the reactions. \n\n kap: vector of size of number of reactions. Stores the reaction rate. \n\n T: (type = float) final time of the reaction \n\n* Returns \n\n x: the list that contains all steps for the reactions\n \n t: a list of jump time \n \n \n## Gillespie Algorithm\n\n** gillespie.py contains the following main function. **\n\nThis is the funciton of Gillespie' algorithm. \nIt will simulate Xt, the number of compounds in the reactions. \n \n \n* Arguments \n\n x0: vector of size of number of different types of compounds. Intial status.\n\n jps: set of vectors of same size of x0. Jumps of the reactions. \n\n kap: vector of size of number of reactions. Stores the reaction rate. \n\n T: (type = float) final time of the reaction \n\n r: number of possible reactions\n\n n: numebr of different kinds of compounds\n \n \n* Returns \n\n x: the list that contains all steps for the reactions\n \n t: a list of jump time \n \n\n\n\n## Tau Leaping algorithm \n\n** tau_leap.py contains the following main function. **\n\n This is the funciton of tau leaping algorithm. \n It will simulate Xt, the number of compounds in the reactions. \n \n* Arguments\n \n x0: vector of size of number of different types of compounds. Intial status.\n \n jps: set of vectors of same size of x0. Jumps of the reactions. \n \n kap: vector of size of number of reactions. Stores the reaction rate. \n \n T: (type = float) final time of the reaction \n \n r: number of possible reactions\n \n h: step size\n \n \n* Returns\n \n x: the list that contains all steps for the reactions\n \n t: a list of jump time\n\n## Order of Accurarcy \n\n orderAcc.py contains the major code for computing the weak order of accurarcy to tau leaping.\n \n Here, we compare the approximate solution --- tau leaping, with the true solution --- gillespie. \n \n \n" }, { "alpha_fraction": 0.44240602850914, "alphanum_fraction": 0.4875187873840332, "avg_line_length": 29.227272033691406, "blob_id": "a9a4e3cdb3d24f7d7ce9adb1df7acdaafa0c417d", "content_id": "47f2419e16cedf13a27fd137a6ba082d2c3dbd45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3325, "license_type": "no_license", "max_line_length": 91, "num_lines": 110, "path": "/tau_leap.py", "repo_name": "SuavisLiu/StochasticChemicalKinetics", "src_encoding": "UTF-8", "text": "from numpy.random import Generator, PCG64 # numpy randon number generator routines\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport random \nfrom array import *\nfrom itertools import accumulate\n\n#------------------------ This is the function for Tau Leaping ------------------------#\ndef tauleap( x0, kap, jps, T, h, r):\n \"\"\"This is the funciton of tau leaping algorithm. \n It will simulate Xt, the number of compounds in the reactions. \n Arguments\n x0: vector of size of number of different types of compounds. Intial status.\n jps: set of vectors of same size of x0. Jumps of the reactions. \n kap: vector of size of number of reactions. Stores the reaction rate. \n T: (type = float) final time of the reaction \n r: number of possible reactions\n h: step size\n return\n x: the list that contains all steps for the reactions\n t: a list of jump time \"\"\"\n\n # initial status\n xt = x0\n x = [x0]\n t = [0]\n ks = np.array(jps).transpose()\n while (t[-1] < T):\n # generate tau \n tau = np.array(reactRate( xt, kap, r))\n #tau = (t[-1] - h * (len(t)-1)) * np.array(xt)\n tau[tau < 0] = 0\n #print(tau)\n # geenrate Y\n Y = np.random.poisson(tau * h)\n \n #print(Y)\n\n if (t[-1] + h > T):\n h = T - t[-1]\n\n # the change over the current interval\n dx = np.matmul(ks, Y) # return an array \n #print(dx)\n xt = np.add(xt, dx)\n x.append(xt.tolist())\n t.append(t[-1] + h)\n\n\n return x, t\n\n#------------------------- Helper Function Computing Reaction Rate ---------------------#\ndef reactRate( xt, kap, r):\n\n \"\"\" This function returns a r-vector for the reaction rate \"\"\"\n xt = np.array(xt)\n xt[xt < 0] = 0\n xt.tolist()\n rate = np.zeros(r)\n rate[0] = kap[0] * xt[0]\n rate[1] = kap[1] * xt[1]\n rate[2] = kap[2] * xt[1]\n rate[3] = kap[3] * xt[2]\n rate[4] = kap[4] * xt[2] * (xt[2] - 1)\n rate[5] = kap[5] * xt[3]\n rate[6] = kap[6] * xt[0] * xt[3]\n rate[7] = kap[7] * xt[4]\n return rate\n\n################################### Main Program #########################################\n\n# initialize the parameters\n#x0 = [1, 10, 50, 10, 0]\nx0 = [20, 200, 1000, 200, 0]\njps = [[0,1,0,0,0],[0,0,1,0,0],[0,-1,0,0,0],[0,0,-1,0,0],[0,0,-2,1,0],[0,0,0,-1,0],\n [-1,0,0,-1,1],[1,0,0,1,-1]]\nkap = [200, 10, 25, 1, 0.01, 1, 2, 0.1]\n#kap = [200, 10, 25, 1, 0.01, 1, 0, 0]\nT = 10\nr = 8\nh = 0.01\n\n# run the algorithm \nx, t = tauleap(x0, kap, jps, T, h, r)\n\n\nx = np.array(x)\nnumG = x[:,0]\nnumM = x[:,1]\nnumP = x[:,2]\nnumD = x[:,3]\nnumB = x[:,4]\nxt = x[-1]\n\n################################# Meaningless Stuff #######################################\n\nprint('Final State:', xt)\n\nfig, ax = plt.subplots() \n#ax.plot(time, numG, '-', label = 'numG') \nax.plot(t, numM, '-', label = 'numM') \nax.plot(t, numP, '-', label = 'numP') \nax.plot(t, numD, '-', label = 'numD')\n#ax.plot(time, numB, '-', label = 'numB') \nax.legend() \nax.set_ylabel('num of compounds') \nax.set_xlabel('Time')\ntitle = \"Tau Trajectory of Compounds with k =\", kap\nax.set_title(title) \nplt.show() " }, { "alpha_fraction": 0.41511499881744385, "alphanum_fraction": 0.4737130403518677, "avg_line_length": 24.02739715576172, "blob_id": "400b0be1c92c6f5484e5c4df3bb9987caf6e80bd", "content_id": "17a137ae930b1e8ced655325253ecf13da71f725", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1826, "license_type": "no_license", "max_line_length": 90, "num_lines": 73, "path": "/orderAcc.py", "repo_name": "SuavisLiu/StochasticChemicalKinetics", "src_encoding": "UTF-8", "text": "from numpy.random import Generator, PCG64 # numpy randon number generator routines\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport random \nfrom itertools import accumulate\n\nimport gillespie\nimport tau_leap\n\n\n#------------------------- Helper Function Finding the index -----------------------#\ndef compare(lst, num):\n for i in range(len(lst) - 1):\n if num <= lst[i]: return i \n return 0\n\n################################### Main Program #########################################\n# initialize the parameters\nx0 = [1, 10, 50, 10, 0]\njps = [[0,1,0,0,0],[0,0,1,0,0],[0,-1,0,0,0],[0,0,-1,0,0],[0,0,-2,1,0],[0,0,0,-1,0],\n [-1,0,0,-1,1],[1,0,0,1,-1]]\n#kap = [200, 10, 25, 1, 0.01, 1, 2, 0.1]\nkap = [200, 10, 25, 1, 0.01, 1, 0, 0]\nT = 10\nr = 8\nerror = []\nhlist = []\nh = 0.0001\n\n\nfor i in range (3):\n\n h = h/2\n maxE = 0\n\n x_gillespie, t_gillespie = gillespie.gillespie(x0, jps, kap, T, r)\n x_tau, t_tau = tau_leap.tauleap( x0, kap, jps, T, h, r)\n\n t = 0\n while (t < T):\n\n idx1 = compare(t_gillespie, t)\n idx2 = compare(t_tau, t)\n\n x = np.array(x_gillespie[idx1])\n y = np.array(x_tau[idx2])\n\n z = sum(np.absolute(x - y))\n if maxE < z:\n maxE = z\n \n t = t + 0.25\n\n error = error + [maxE]\n hlist = hlist + [h]\n\np = np.log(error[0]/error[1]) / np.log(hlist[0]/hlist[1])\n\nprint(error)\nprint(hlist)\nprint(p)\n\nhlist = np.log(hlist)\nerror = np.log(error)\n\nfig, ax = plt.subplots() \nax.plot(hlist , error , '*-') \nax.legend() \nax.set_ylabel('log error') \nax.set_xlabel('log h')\ntitle = \"Error vs. h in loglog Plot\"\nax.set_title(title) \nplt.show()" } ]
4
niuxinzan/minelab
https://github.com/niuxinzan/minelab
6d2f68bbfbc63d97680cccc74c3448886d6c6f0f
6b75daeb6ddf3d2abe9797938c7fb105e60a323e
9a638b447b6ff1fa54c0965d43b640f3b787c5a1
refs/heads/master
2020-03-23T06:56:50.264871
2018-07-31T05:45:20
2018-07-31T05:45:20
141,238,667
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6264117956161499, "alphanum_fraction": 0.635389506816864, "avg_line_length": 30.10360336303711, "blob_id": "dc6552630a8dca71c74e242b43c5537fbe85898e", "content_id": "b17bddd059a90e1a0ab08d13a7beb71d853e0e59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6906, "license_type": "no_license", "max_line_length": 86, "num_lines": 222, "path": "/minelab/datasets/TRAJECTORY.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# @Time : 2018/7/17 14:12\n# @Author : buf\n# @Email : [email protected]\n# @File : TRAJECTORY.py\n# @Software: PyCharm\nclass Trajectory:\n def __init__(self,line):\n lineArr=str(line).split(\",\")\n self.__rowkey = lineArr[0]\n self.__datasource = lineArr[1]\n self.__carID = lineArr[2]\n self.__carType = int(lineArr[3])\n self.__linkSequence = lineArr[4]\n self.__gridSequence = lineArr[5]\n self.__meshSequence = lineArr[6]\n self.__citySequence = lineArr[7]\n self.__enterTimeSequence = lineArr[8]\n self.__speedSequence = lineArr[9]\n self.__travelLengthSequence = lineArr[10]\n self.__linkNo = int(lineArr[11])\n self.__status = int(lineArr[12])\n self.__event = int(lineArr[13])\n self.__travelDistance = int(lineArr[14])\n self.__travelTime = int(lineArr[15])\n self.__averageSpeed = int(lineArr[16])\n self.__oLinkID = int(lineArr[17])\n self.__dLinkID = int(lineArr[18])\n self.__splitConfidence = float(lineArr[19])\n self.__integrityConfidence = float(lineArr[20])\n self.__confidence = float(lineArr[21])\n self.__oLongitude = float(lineArr[22])\n self.__oLatitude =float(lineArr[23])\n self.__dLongitude = float(lineArr[24])\n self.__dLatitude = float(lineArr[25])\n self.__oTime = int(lineArr[26])\n self.__dTime = int(lineArr[27])\n self.__orgGPSSequence = lineArr[28]\n self.__prejSequence = lineArr[29]\n\n @property\n def rowkey(self):\n return self.__rowkey\n @rowkey.setter\n def rowkey(self, rowkey):\n self.__rowkey = rowkey\n @property\n def datasource(self):\n return self.__datasource\n @datasource.setter\n def datasource(self, datasource):\n self.__datasource = datasource\n @property\n def carID(self):\n return self.__carID\n @carID.setter\n def carID(self, carID):\n self.__carID = carID\n @property\n def carType(self):\n return self.__carType\n @carType.setter\n def carType(self, carType):\n self.__carType = carType\n @property\n def linkSequence(self):\n return self.__linkSequence\n @linkSequence.setter\n def linkSequence(self, linkSequence):\n self.__linkSequence = linkSequence\n @property\n def gridSequence(self):\n return self.__gridSequence\n @gridSequence.setter\n def gridSequence(self, gridSequence):\n self.__gridSequence = gridSequence\n @property\n def meshSequence(self):\n return self.__meshSequence\n @meshSequence.setter\n def meshSequence(self, meshSequence):\n self.__meshSequence = meshSequence\n @property\n def citySequence(self):\n return self.__citySequence\n @citySequence.setter\n def citySequence(self, citySequence):\n self.__citySequence = citySequence\n @property\n def enterTimeSequence(self):\n return self.__enterTimeSequence\n @enterTimeSequence.setter\n def enterTimeSequence(self, enterTimeSequence):\n self.__enterTimeSequence = enterTimeSequence\n @property\n def speedSequence(self):\n return self.__speedSequence\n @speedSequence.setter\n def speedSequence(self, speedSequence):\n self.__speedSequence = speedSequence\n @property\n def travelLengthSequence(self):\n return self.__travelLengthSequence\n @travelLengthSequence.setter\n def travelLengthSequence(self, travelLengthSequence):\n self.__travelLengthSequence = travelLengthSequence\n @property\n def linkNo(self):\n return self.__linkNo\n @linkNo.setter\n def linkNo(self, linkNo):\n self.__linkNo = linkNo\n @property\n def status(self):\n return self.__status\n @status.setter\n def status(self, status):\n self.__status = status\n @property\n def event(self):\n return self.__event\n @event.setter\n def event(self, event):\n self.__event = event\n @property\n def travelDistance(self):\n return self.__travelDistance\n @travelDistance.setter\n def travelDistance(self, travelDistance):\n self.__travelDistance = travelDistance\n @property\n def travelTime(self):\n return self.__travelTime\n @travelTime.setter\n def travelTime(self, travelTime):\n self.__travelTime = travelTime\n @property\n def averageSpeed(self):\n return self.__averageSpeed\n @averageSpeed.setter\n def averageSpeed(self, averageSpeed):\n self.__averageSpeed = averageSpeed\n @property\n def oLinkID(self):\n return self.__oLinkID\n @oLinkID.setter\n def oLinkID(self, oLinkID):\n self.__oLinkID = oLinkID\n @property\n def dLinkID(self):\n return self.__dLinkID\n @dLinkID.setter\n def dLinkID(self, dLinkID):\n self.__dLinkID = dLinkID\n @property\n def splitConfidence(self):\n return self.__splitConfidence\n @splitConfidence.setter\n def splitConfidence(self, splitConfidence):\n self.__splitConfidence = splitConfidence\n @property\n def integrityConfidence(self):\n return self.__integrityConfidence\n @integrityConfidence.setter\n def integrityConfidence(self, integrityConfidence):\n self.__integrityConfidence = integrityConfidence\n @property\n def confidence(self):\n return self.__confidence\n @confidence.setter\n def confidence(self, confidence):\n self.__confidence = confidence\n @property\n def oLongitude(self):\n return self.__oLongitude\n @oLongitude.setter\n def oLongitude(self, oLongitude):\n self.__oLongitude = oLongitude\n @property\n def oLatitude(self):\n return self.__oLatitude\n @oLatitude.setter\n def oLatitude(self, oLatitude):\n self.__oLatitude = oLatitude\n @property\n def dLongitude(self):\n return self.__dLongitude\n @dLongitude.setter\n def dLongitude(self, dLongitude):\n self.__dLongitude = dLongitude\n @property\n def dLatitude(self):\n return self.__dLatitude\n @dLatitude.setter\n def dLatitude(self, dLatitude):\n self.__dLatitude = dLatitude\n @property\n def oTime(self):\n return self.__oTime\n @oTime.setter\n def oTime(self, oTime):\n self.__oTime = oTime\n @property\n def dTime(self):\n return self.__dTime\n @dTime.setter\n def dTime(self, dTime):\n self.__dTime = dTime\n @property\n def orgGPSSequence(self):\n return self.__orgGPSSequence\n @orgGPSSequence.setter\n def orgGPSSequence(self, orgGPSSequence):\n self.__orgGPSSequence = orgGPSSequence\n @property\n def prejSequence(self):\n return self.__prejSequence\n @prejSequence.setter\n def prejSequence(self, prejSequence):\n self.__prejSequence = prejSequence\n def __str__(self):\n return str(self.carID)+\",\"+str(self.travelDistance)+\",\"+str(self.averageSpeed)\n\n" }, { "alpha_fraction": 0.6269117593765259, "alphanum_fraction": 0.6361810564994812, "avg_line_length": 29.102325439453125, "blob_id": "13741eeef3cb68c6208092d72a773ad00b51a298", "content_id": "45283a51cf1f3901a9355b11ba05bdc3ddff472e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6473, "license_type": "no_license", "max_line_length": 64, "num_lines": 215, "path": "/minelab/datasets/CNTF.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# @Time : 2018/7/17 14:11\n# @Author : buf\n# @Email : [email protected]\n# @File : CNTF.py\n# @Software: PyCharm\nclass Cntf(object):\n def __init__(self,line):\n lineArr = str(line).split(\",\")\n self.__UNIQUEID=lineArr[0]\n self.__trafficIDType = lineArr[1]\n self.__directionFlag = lineArr[2]\n self.__regionType = lineArr[3]\n self.__objectID_type = lineArr[4]\n self.__regionID = lineArr[5]\n self.__objectID = lineArr[6]\n self.__roadLength = lineArr[7]\n self.__roadClass = lineArr[8]\n self.__linkType = lineArr[9]\n self.__timeStamp = lineArr[10]\n self.__locationFlag = lineArr[11]\n self.__flowFlag = lineArr[12]\n self.__trafficFlag = lineArr[13]\n self.__incidentFlag = lineArr[14]\n self.__fillFlag = lineArr[15]\n self.__hfillFlag = lineArr[16]\n self.__tfillFlag = lineArr[17]\n self.__lfillFlag = lineArr[18]\n self.__dfillFlag = lineArr[19]\n self.__precidentFlag = lineArr[20]\n self.__sprecidentFlag = lineArr[21]\n self.__mprecidentFlag = lineArr[22]\n self.__lprecidentFlag = lineArr[23]\n self.__status = lineArr[24]\n self.__travelTime = lineArr[25]\n self.__watingTime = lineArr[26]\n self.__linkCarCnt = lineArr[27]\n self.__linkSeqID = lineArr[28]\n @property\n def UNIQUEID(self):\n return self.__UNIQUEID\n @UNIQUEID.setter\n def UNIQUEID(self,UNIQUEID):\n self.__UNIQUEID=UNIQUEID\n @property\n def trafficIDType(self):\n return self.__trafficIDType\n @trafficIDType.setter\n def trafficIDType(self,trafficIDType):\n self.__trafficIDType=trafficIDType\n @property\n def directionFlag(self):\n return self.__directionFlag\n @directionFlag.setter\n def directionFlag(self,directionFlag):\n self.__directionFlag=directionFlag\n @property\n def regionType(self):\n return self.__regionType\n @regionType.setter\n def regionType(self,regionType):\n self.__regionType=regionType\n @property\n def objectID_type(self):\n return self.__objectID_type\n @objectID_type.setter\n def objectID_type(self,objectID_type):\n self.__objectID_type=objectID_type\n @property\n def regionID(self):\n return self.__regionID\n @regionID.setter\n def regionID(self,regionID):\n self.__regionID=regionID\n @property\n def objectID(self):\n return self.__objectID\n @objectID.setter\n def objectID(self,objectID):\n self.__objectID=objectID\n @property\n def roadLength(self):\n return self.__roadLength\n @roadLength.setter\n def roadLength(self,roadLength):\n self.__roadLength=roadLength\n @property\n def roadClass(self):\n return self.__roadClass\n @roadClass.setter\n def roadClass(self,roadClass):\n self.__roadClass=roadClass\n @property\n def linkType(self):\n return self.__linkType\n @linkType.setter\n def linkType(self,linkType):\n self.__linkType=linkType\n @property\n def timeStamp(self):\n return self.__timeStamp\n @timeStamp.setter\n def timeStamp(self,timeStamp):\n self.__timeStamp=timeStamp\n @property\n def locationFlag(self):\n return self.__locationFlag\n @locationFlag.setter\n def locationFlag(self,locationFlag):\n self.__locationFlag=locationFlag\n @property\n def flowFlag(self):\n return self.__flowFlag\n @flowFlag.setter\n def flowFlag(self,flowFlag):\n self.__flowFlag=flowFlag\n @property\n def trafficFlag(self):\n return self.__trafficFlag\n @trafficFlag.setter\n def trafficFlag(self,trafficFlag):\n self.__trafficFlag=trafficFlag\n @property\n def incidentFlag(self):\n return self.__incidentFlag\n @incidentFlag.setter\n def incidentFlag(self,incidentFlag):\n self.__incidentFlag=incidentFlag\n @property\n def fillFlag(self):\n return self.__fillFlag\n @fillFlag.setter\n def fillFlag(self,fillFlag):\n self.__fillFlag=fillFlag\n @property\n def hfillFlag(self):\n return self.__hfillFlag\n @hfillFlag.setter\n def hfillFlag(self,hfillFlag):\n self.__hfillFlag=hfillFlag\n @property\n def tfillFlag(self):\n return self.__tfillFlag\n @tfillFlag.setter\n def tfillFlag(self,tfillFlag):\n self.__tfillFlag=tfillFlag\n @property\n def lfillFlag(self):\n return self.__lfillFlag\n @lfillFlag.setter\n def lfillFlag(self,lfillFlag):\n self.__lfillFlag=lfillFlag\n @property\n def dfillFlag(self):\n return self.__dfillFlag\n @dfillFlag.setter\n def dfillFlag(self,dfillFlag):\n self.__dfillFlag=dfillFlag\n @property\n def precidentFlag(self):\n return self.__precidentFlag\n @precidentFlag.setter\n def precidentFlag(self,precidentFlag):\n self.__precidentFlag=precidentFlag\n @property\n def sprecidentFlag(self):\n return self.__sprecidentFlag\n @sprecidentFlag.setter\n def sprecidentFlag(self,sprecidentFlag):\n self.__sprecidentFlag=sprecidentFlag\n @property\n def mprecidentFlag(self):\n return self.__mprecidentFlag\n @mprecidentFlag.setter\n def mprecidentFlag(self,mprecidentFlag):\n self.__mprecidentFlag=mprecidentFlag\n @property\n def lprecidentFlag(self):\n return self.__lprecidentFlag\n @lprecidentFlag.setter\n def lprecidentFlag(self,lprecidentFlag):\n self.__lprecidentFlag=lprecidentFlag\n @property\n def status(self):\n return self.__status\n @status.setter\n def status(self,status):\n self.__status=status\n @property\n def travelTime(self):\n return self.__travelTime\n @travelTime.setter\n def travelTime(self,travelTime):\n self.__travelTime=travelTime\n @property\n def watingTime(self):\n return self.__watingTime\n @watingTime.setter\n def watingTime(self,watingTime):\n self.__watingTime=watingTime\n @property\n def linkCarCnt(self):\n return self.__linkCarCnt\n @linkCarCnt.setter\n def linkCarCnt(self,linkCarCnt):\n self.__linkCarCnt=linkCarCnt\n @property\n def linkSeqID(self):\n return self.__linkSeqID\n @linkSeqID.setter\n def linkSeqID(self,linkSeqID):\n self.__linkSeqID=linkSeqID\n\n def __str__(self) -> str:\n return self.objectID+\",\"+self.status+\",\"+self.travelTime\n\n" }, { "alpha_fraction": 0.5918367505073547, "alphanum_fraction": 0.6015971899032593, "avg_line_length": 26.487804412841797, "blob_id": "2fe3674ffb16805d1e7b7e676b295124bf956459", "content_id": "10bcea604e359c711f94f4a3e6dac0dac611f82a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2254, "license_type": "no_license", "max_line_length": 76, "num_lines": 82, "path": "/minelab/datasets/RTIC.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# @Time : 2018/7/17 14:11\n# @Author : buf\n# @Email : [email protected]\n# @File : RTIC.py\n# @Software: PyCharm\nclass Rtic(object):\n def __init__(self,data_line):\n dataArr=str(data_line).split(\",\")\n self.__areaId=dataArr[0]\n self.__mapVersion=dataArr[1]\n self.__timeStamp=dataArr[2]\n self.__meshID=dataArr[3]\n self.__number = dataArr[4]\n self.__layer = dataArr[5]\n self.__rticKind = dataArr[6]\n self.__rricTravelTime = dataArr[7]\n self.__LOS = dataArr[8]\n self.__sectionCount = dataArr[9]\n @property\n def areaId(self):\n return self.__areaId\n @areaId.setter\n def areaId(self,areaId):\n self.__areaId=areaId\n\n @property\n def mapVersion(self):\n return self.__mapVersion\n @mapVersion.setter\n def mapVersion(self,mapVersion):\n self.__mapVersion=mapVersion\n @property\n def timeStamp(self):\n return self.__timeStamp\n @timeStamp.setter\n def timeStamp(self,timeStamp):\n self.__timeStamp=timeStamp\n @property\n def meshID(self):\n return self.__meshID\n @meshID.setter\n def meshID(self,meshID):\n self.__meshID=meshID\n @property\n def number(self):\n return self.__number\n @number.setter\n def number(self,number):\n self.__number=number\n @property\n def layer(self):\n return self.__layer\n @layer.setter\n def layer(self,layer):\n self.__layer=layer\n @property\n def rticKind(self):\n return self.__rticKind\n @rticKind.setter\n def rticKind(self,rticKind):\n self.__rticKind=rticKind\n @property\n def rricTravelTime(self):\n return self.__rricTravelTime\n @rricTravelTime.setter\n def rricTravelTime(self,rricTravelTime):\n self.__rricTravelTime=rricTravelTime\n @property\n def LOS(self):\n return self.__LOS\n @LOS.setter\n def LOS(self,LOS):\n self.__LOS=LOS\n @property\n def sectionCount(self):\n return self.__sectionCount\n @sectionCount.setter\n def sectionCount(self,sectionCount):\n self.__sectionCount=sectionCount\n def __str__(self):\n return self.areaId+','+self.rticKind+','+self.meshID+\",\"+self.number\n" }, { "alpha_fraction": 0.6369165778160095, "alphanum_fraction": 0.6785666942596436, "avg_line_length": 48.75, "blob_id": "f72af001075ded1f796b9c86afb65360371559ff", "content_id": "0a2287dabd12c2dfc62289dc26a9aacf6c4f0caf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7839, "license_type": "no_license", "max_line_length": 139, "num_lines": 152, "path": "/minelab/nn/VGG.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "import tensorflow as tf\n\nimport numpy as np\nfrom functools import reduce\n\nVGG_MEAN = [103.939, 116.779, 123.68]\ndef inference(RGBData,label_size,train_mode=True,trainable=True, dropout=0.5,rgb_channel_size=3,image_width = 224,\\\n image_high = 224,conv1_deep= 64,conv2_deep= 128,conv3_deep= 256,conv4_deep= 512,conv5_deep= 512,\\\n fc6_size=4096,fc7_size=4096,kernal_size=3,max_pool_size=[1, 2, 2, 1],\\\n max_pool_strides=[1,2,2,1],conv_strides=[1,1,1,1]):\n '''\n :param RGBData:RGB图片数据\n :param label_size:\n :param train_mode:训练模式,如果为true,开启dropout\n :param trainable:是否是训练模式,默认是\n :param dropout:dropout值\n :param rgb_channel_size:图片通道数\n :param image_width:图片宽度\n :param image_high:图片长度\n :param conv1_deep:第一层卷积核通道数\n :param conv2_deep:第二层卷积核通道数\n :param conv3_deep:第三层卷积核通道数\n :param conv4_deep:第四层卷积核通道数\n :param conv5_deep:第五层卷积核通道数\n :param fc6_size:第六层全连接层输出神经元个数\n :param fc7_size:第七层全连接层输出神经元个数\n :param kernal_size:卷积核大小\n :param max_pool_size:池化层大小\n :param max_pool_strides:池化层步长\n :param conv_strides:卷积核步长\n :return:predictValue,预测分类值\n '''\n # rgb_scaled = RGBData * 255.0\n # # Convert RGB to BGR\n # red, green, blue = tf.split(axis=rgb_channel_size, num_or_size_splits=rgb_channel_size, value=rgb_scaled)\n # assert red.get_shape().as_list()[1:] == [image_width, image_high, 1]\n # assert green.get_shape().as_list()[1:] == [image_width, image_high, 1]\n # assert blue.get_shape().as_list()[1:] == [image_width, image_high, 1]\n # bgr = tf.concat(axis=rgb_channel_size, values=[\n # blue - VGG_MEAN[0],\n # green - VGG_MEAN[1],\n # red - VGG_MEAN[2],\n # ])\n # assert RGBData.get_shape().as_list()[1:] == [image_width, image_high, rgb_channel_size]\n\n conv1_1 = conv_layer(RGBData, rgb_channel_size, conv1_deep, \"conv1_1\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv1_2 = conv_layer(conv1_1, conv1_deep, conv1_deep, \"conv1_2\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n pool1 = max_pool(conv1_2, 'pool1')\n\n conv2_1 = conv_layer(pool1, conv1_deep, conv2_deep, \"conv2_1\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv2_2 = conv_layer(conv2_1, conv2_deep, conv2_deep, \"conv2_2\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n pool2 = max_pool(conv2_2, 'pool2')\n\n conv3_1 = conv_layer(pool2, conv2_deep, conv3_deep, \"conv3_1\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv3_2 = conv_layer(conv3_1, conv3_deep, conv3_deep, \"conv3_2\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv3_3 = conv_layer(conv3_2, conv3_deep, conv3_deep, \"conv3_3\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv3_4 = conv_layer(conv3_3, conv3_deep, conv3_deep, \"conv3_4\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n pool3 = max_pool(conv3_4, 'pool3')\n\n conv4_1 = conv_layer(pool3, conv3_deep, conv4_deep, \"conv4_1\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv4_2 = conv_layer(conv4_1, conv4_deep, conv4_deep, \"conv4_2\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv4_3 = conv_layer(conv4_2, conv4_deep, conv4_deep, \"conv4_3\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv4_4 = conv_layer(conv4_3, conv4_deep, conv4_deep, \"conv4_4\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n pool4 = max_pool(conv4_4, 'pool4',max_pool_size=max_pool_size)\n\n conv5_1 = conv_layer(pool4, conv4_deep, conv5_deep, \"conv5_1\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv5_2 = conv_layer(conv5_1, conv5_deep, conv5_deep, \"conv5_2\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv5_3 = conv_layer(conv5_2, conv5_deep, conv5_deep, \"conv5_3\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n conv5_4 = conv_layer(conv5_3, conv5_deep, conv5_deep, \"conv5_4\",trainable=trainable,kernal_size=kernal_size,strides=conv_strides)\n pool5 = max_pool(conv5_4, 'pool5',max_pool_size=max_pool_size,strides=max_pool_strides)\n\n pool_shape = pool5.get_shape().as_list()\n nodes = pool_shape[1] * pool_shape[2] * pool_shape[3]\n fc6 = fc_layer(pool5, nodes, fc6_size, \"fc6\",trainable=trainable) # 25088 = ((224 // (2 ** 5)) ** 2) * 512\n relu6 = tf.nn.relu(fc6)\n if train_mode is not None:\n relu6 = tf.cond(train_mode, lambda: tf.nn.dropout(relu6, dropout), lambda: relu6)\n elif trainable:\n relu6 = tf.nn.dropout(relu6, dropout)\n fc7 = fc_layer(relu6, fc6_size, fc7_size, \"fc7\",trainable=trainable)\n relu7 = tf.nn.relu(fc7)\n if train_mode is not None:\n relu7 = tf.cond(train_mode, lambda: tf.nn.dropout(relu7, dropout), lambda: relu7)\n elif trainable:\n relu7 = tf.nn.dropout(relu7, dropout)\n\n fc8 = fc_layer(relu7, fc7_size, label_size, \"fc8\",trainable=trainable)\n predictValue = tf.nn.softmax(fc8, name=\"prob\")\n return predictValue\n\ndef max_pool(bottom, name,max_pool_size=[1,2,2,1],strides=[1,2,2,1]):\n return tf.nn.max_pool(bottom, ksize=max_pool_size, strides=strides, padding='SAME', name=name)\n\ndef conv_layer(bottom, in_channels, out_channels, name,trainable=True,kernal_size=3,strides=[1, 1, 1, 1]):\n with tf.variable_scope(name):\n filt, conv_biases = get_conv_var(kernal_size, in_channels, out_channels, name,trainable=trainable)\n conv = tf.nn.conv2d(bottom, filt, strides, padding='SAME')\n bias = tf.nn.bias_add(conv, conv_biases)\n relu = tf.nn.relu(bias)\n\n return relu\n\ndef fc_layer(bottom, in_size, out_size, name,trainable=True):\n with tf.variable_scope(name):\n weights, biases = get_fc_var(in_size, out_size, name,trainable=trainable)\n x = tf.reshape(bottom, [-1, in_size])\n fc = tf.nn.bias_add(tf.matmul(x, weights), biases)\n\n return fc\ndef get_conv_var(filter_size, in_channels, out_channels, name,trainable=True):\n initial_value = tf.truncated_normal([filter_size, filter_size, in_channels, out_channels], 0.0, 0.001)\n filters = get_var(initial_value, name + \"_filters\",trainable=trainable)\n initial_value = tf.truncated_normal([out_channels], .0, .001)\n biases = get_var(initial_value, name + \"_biases\",trainable=trainable)\n\n return filters, biases\n\ndef get_fc_var(in_size, out_size, name,trainable=True):\n initial_value = tf.truncated_normal([in_size, out_size], 0.0, 0.001)\n weights = get_var(initial_value,name + \"_weights\",trainable=trainable)\n\n initial_value = tf.truncated_normal([out_size], .0, .001)\n biases = get_var(initial_value,name + \"_biases\",trainable=trainable)\n\n return weights, biases\n\ndef get_var(initial_value, var_name,trainable=True):\n value = initial_value\n var = tf.Variable(value, name=var_name)\n # if trainable:\n # var = tf.Variable(value, name=var_name)\n # else:\n # var = tf.constant(value, dtype=tf.float32, name=var_name)\n\n # print var_name, var.get_shape().as_list()\n assert var.get_shape() == initial_value.get_shape()\n\n return var\ndef save_npy(sess,var_dict,npy_path=\"./vgg19-save.npy\"):\n assert isinstance(sess, tf.Session)\n\n data_dict = {}\n\n for (name, idx), var in list(var_dict.items()):\n var_out = sess.run(var)\n if name not in data_dict:\n data_dict[name] = {}\n data_dict[name][idx] = var_out\n\n np.save(npy_path, data_dict)\n print((\"file saved\", npy_path))\n return npy_path\n\n" }, { "alpha_fraction": 0.5201900005340576, "alphanum_fraction": 0.54038006067276, "avg_line_length": 21.1842098236084, "blob_id": "c3fdb2e30f80ce2f6fa4a68444db72f1bef3c3c1", "content_id": "c641503edc675ce4ff98d9810a80611830d51b4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 882, "license_type": "no_license", "max_line_length": 56, "num_lines": 38, "path": "/minelab/test/test_get_set.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# @Time : 2018/7/30 14:08\n# @Author : buf\n# @Email : [email protected]\n# @File : test_get_set.py\n# @Software: PyCharm\n#-*-encoding:utf-8-*-\nclass Person():\n # 只允许拥有私有的name和age属性\n __slots__ = ('__name', '__age')\n def __init__(self,name,age):\n self.__name=name\n self.__age=age\n\n @property\n def name(self):\n return self.__name\n @name.setter\n def name(self,name):\n self.__name=name\n\n @property\n def age(self):\n return self.__age\n\n @age.setter\n def age(self, age):\n self.__age = age\n def __str__(self):\n return '姓名 '+self.__name+' \\n年龄'+str(self.__age)\nif __name__=='__main__':\n zhangsan=Person('张三',20)\n print(zhangsan)\n print(zhangsan.name)\n print(zhangsan.age)\n zhangsan.age=30\n zhangsan.name='张三三'\n print(zhangsan)" }, { "alpha_fraction": 0.6070398092269897, "alphanum_fraction": 0.6687824726104736, "avg_line_length": 30.509090423583984, "blob_id": "8c7f07d8c6720dc334d25302b5b6db893d2c4fcd", "content_id": "aee12b0d130cc90a6cb8970c8c7eb62e9417199f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1739, "license_type": "no_license", "max_line_length": 111, "num_lines": 55, "path": "/minelab/test/test_nn.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "\"\"\"\n\n\"\"\"\n\nimport tensorflow as tf\n\nimport minelab.test.utils as utils\nimport minelab.nn.LeNet5 as lenet5\n#define X\nimg = utils.load_image(\"./img-guitar-224x224.jpg\")\n#define Y\nimg_true_result = [1 if i == 1 else 0 for i in range(2)]\n\nbatch = img.reshape((1, 224, 224, 3))\n\n# 初始化tensorflow\nimages = tf.placeholder(tf.float32, [1, 224, 224, 3])\ntrue_out = tf.placeholder(tf.float32, [1, 2])\ntrain_mode = tf.placeholder(tf.bool)\n\nrgb_scaled = images * 255.0\n# Convert RGB to BGR\nred, green, blue = tf.split(axis=3, num_or_size_splits=3, value=rgb_scaled)\nassert red.get_shape().as_list()[1:] == [224, 224, 1]\nassert green.get_shape().as_list()[1:] == [224, 224, 1]\nassert blue.get_shape().as_list()[1:] == [224, 224, 1]\nVGG_MEAN = [103.939, 116.779, 123.68]\nbgr = tf.concat(axis=3, values=[\n blue - VGG_MEAN[0],\n green - VGG_MEAN[1],\n red - VGG_MEAN[2],\n])\n\n# prob=vgg19.inference(images,2,train_mode=train_mode)\nprob=lenet5.inference(images,2,num_channels=3)\nprob = tf.nn.softmax(prob, name=\"prob\")\n\ncost = tf.reduce_sum((prob - true_out) ** 2)\ntrain = tf.train.GradientDescentOptimizer(0.0001).minimize(cost)\n\npred_max =tf.argmax(prob, 1)\ny_max =tf.argmax([img_true_result], 1)\ncorrect_pred = tf.equal(pred_max, y_max)\naccuracy=tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n\nsaver = tf.train.Saver()\nwith tf.Session() as sess:\n sess.run(tf.global_variables_initializer())\n step = 1\n print(\"Start training!\")\n while step < 2:\n sess.run(train,feed_dict={images: batch, true_out: [img_true_result], train_mode: True})\n accuracy1 =sess.run(accuracy,feed_dict={images: batch, true_out: [img_true_result], train_mode: False})\n print(\"accuracy1\",accuracy1)\n saver.save(sess, \"./accuracy1\")\n" }, { "alpha_fraction": 0.6005171537399292, "alphanum_fraction": 0.6444731950759888, "avg_line_length": 48.91935348510742, "blob_id": "5661089e295b332ebc268491e0d564be52ae69c0", "content_id": "330416dcfa6c9060bb438c5be1a6cf32cf26776f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3280, "license_type": "no_license", "max_line_length": 149, "num_lines": 62, "path": "/minelab/nn/LeNet5.py", "repo_name": "niuxinzan/minelab", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# @Time : 2018/7/17 14:13\n# @Author : buf\n# @Email : [email protected]\n# @File : LeNet5.py\n# @Software: PyCharm\n# 三个卷积层,两个下采样层,一个全连接层\nimport tensorflow as tf\n\n# LeNet5网路结构\ndef inference(input_tensor,num_labels,regularizer=None,num_channels=1, trainFlag=True, conv1_deep= 32,conv1_size= 5,\n conv2_deep= 64,conv2_size= 5,fc_size= 512,dropout= 0.7):\n '''\n :param input_tensor:输入图片\n :param trainFlag:是不是训练过程,是为True\n :param regularizer:正则化\n :param num_channels:图片通道数\n :param num_labels:标签类型数\n :param conv1_deep:第一层卷积核个数\n :param conv1_size:第一层卷积核大小\n :param conv2_deep:第二层卷积核个数\n :param conv2_size:第二层卷积核大小\n :param fc_size:全连接层大小\n :param dropout:dropout大小\n :return:logit:预测值\n '''\n with tf.variable_scope(\"layer1-conv1\"):\n conv1_weights = tf.get_variable(\n \"weight\", [conv1_size, conv1_size, num_channels, conv1_deep],\n initializer=tf.truncated_normal_initializer(stddev=0.1))\n conv1_biases = tf.get_variable(\"bias\", [conv1_deep], initializer=tf.constant_initializer(0.0))\n conv1 = tf.nn.conv2d(input_tensor, conv1_weights, strides=[1, 1, 1, 1], padding='SAME')\n relu1 = tf.nn.relu(tf.nn.bias_add(conv1, conv1_biases))\n with tf.name_scope(\"layer2-pool1\"):\n pool1= tf.nn.max_pool(relu1,ksize=[1,2,2,1],strides=[1,2,2,1],padding=\"SAME\")\n with tf.variable_scope(\"layer3-conv2\"):\n conv2_weights=tf.get_variable(\"weight\",[conv2_size,conv2_size,conv1_deep,conv2_deep],initializer=tf.truncated_normal_initializer(stddev=0.1))\n conv2_biases=tf.get_variable(\"bias\",[conv2_deep],initializer=tf.truncated_normal_initializer(0.0))\n conv2=tf.nn.conv2d(pool1,conv2_weights,strides=[1,1,1,1],padding=\"SAME\")\n relu2=tf.nn.relu(tf.nn.bias_add(conv2,conv2_biases))\n with tf.name_scope(\"layer4-pool2\"):\n pool2 = tf.nn.max_pool(relu2, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')\n pool_shape = pool2.get_shape().as_list()\n nodes = pool_shape[1] * pool_shape[2] * pool_shape[3]\n reshaped = tf.reshape(pool2, [-1, nodes])\n with tf.variable_scope('layer5-fc1'):\n fc1_weights = tf.get_variable(\"weight\", [nodes, fc_size],initializer=tf.truncated_normal_initializer(stddev=0.1))\n if regularizer != None:\n tf.add_to_collection('losses', regularizer(fc1_weights))\n fc1_biases = tf.get_variable(\"bias\", [fc_size], initializer=tf.constant_initializer(0.1))\n fc1 = tf.nn.relu(tf.matmul(reshaped, fc1_weights) + fc1_biases)\n if trainFlag:\n fc1 = tf.nn.dropout(fc1, dropout)\n with tf.variable_scope('layer6-fc2'):\n fc2_weights = tf.get_variable(\"weight\", [fc_size, num_labels],\n initializer=tf.truncated_normal_initializer(stddev=0.1))\n if regularizer != None:\n tf.add_to_collection('losses', regularizer(fc2_weights))\n fc2_biases = tf.get_variable(\"bias\", [num_labels], initializer=tf.constant_initializer(0.1))\n logit = tf.matmul(fc1, fc2_weights) + fc2_biases\n\n return logit" } ]
7
GordonLesti/SlidingWindowFilter-experiment
https://github.com/GordonLesti/SlidingWindowFilter-experiment
1a73c33a8eb440d16337245cfecab514576fccc8
c8a7ba233db1b664e89da4d798ac3760424f73f2
c3cd5038b5f644c9191818320529c442a03140b1
refs/heads/master
2020-01-23T21:45:45.971320
2017-02-23T09:30:19
2017-02-23T09:30:19
74,680,955
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4719387888908386, "alphanum_fraction": 0.47820037603378296, "avg_line_length": 33.774192810058594, "blob_id": "dacd0de7101e08e50767bbdf825831b1e61e7ec5", "content_id": "bb9ddef1cb63703faea0373f80cf2d48d36e896c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4312, "license_type": "permissive", "max_line_length": 79, "num_lines": 124, "path": "/src/xwiimote_recorder.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"A simple script that connects with a wii controller and starts experiment\"\"\"\n\nimport errno\nimport time\nfrom select import poll, POLLIN\nimport threading\nimport Queue\nfrom src.experiment import Experiment\nfrom src.tasks_gui import TasksGui\n\nclass App(threading.Thread):\n \"\"\"A application class puts accel data into a queue\"\"\"\n\n def __init__(self):\n try:\n self.xwiimote = __import__(\"xwiimote\")\n except ImportError:\n print \"No xwiimote found\"\n exit(1)\n else:\n self.fd_value = None\n self.dev = None\n self.ini_xwii()\n self.queue = Queue.Queue()\n self.poll = poll()\n self.poll.register(self.fd_value, POLLIN)\n self.loop_active = True\n gui = TasksGui()\n experiment = Experiment(gui)\n threading.Thread.__init__(self)\n self.start()\n output_file = None\n while self.loop_active:\n evt = self.queue.get()\n if evt[0] == 1:\n experiment.press_b_down(evt[1])\n elif evt[0] == 2:\n experiment.press_b_up(evt[1])\n if experiment.is_finished():\n output_file = open(\n \"data/record-\"+str(time.time())+\".txt\", \"w\"\n )\n output_file.write(experiment.get_output())\n output_file.close()\n self.loop_active = False\n elif evt[0] == 3:\n experiment.accel(evt[1], evt[2], evt[3], evt[4])\n elif evt[0] == 4:\n self.loop_active = False\n gui.quit()\n\n def ini_xwii(self):\n \"\"\"Find the WiiController\"\"\"\n\n # display a constant\n print \"=== \" + self.xwiimote.NAME_CORE + \" ===\"\n\n # list wiimotes and remember the first one\n try:\n mon = self.xwiimote.monitor(True, True)\n print \"mon fd\", mon.get_fd(False)\n ent = mon.poll()\n first_wiimote = ent\n while ent is not None:\n print \"Found device: \" + ent\n ent = mon.poll()\n except SystemError as ex:\n print \"ooops, cannot create monitor (\", ex, \")\"\n\n # continue only if there is a wiimote\n if first_wiimote is None:\n print \"No wiimote to read\"\n exit(0)\n\n # create a new iface\n try:\n self.dev = self.xwiimote.iface(first_wiimote)\n except IOError as ex:\n print \"ooops,\", ex\n exit(1)\n\n # display some information and open the iface\n try:\n print \"syspath:\" + self.dev.get_syspath()\n self.fd_value = self.dev.get_fd()\n print \"fd:\", self.fd_value\n print \"opened mask:\", self.dev.opened()\n self.dev.open(\n self.dev.available() | self.xwiimote.IFACE_WRITABLE\n )\n print \"opened mask:\", self.dev.opened()\n print \"capacity:\", self.dev.get_battery(), \"%\"\n except SystemError as ex:\n print \"ooops\", ex\n exit(1)\n\n def run(self):\n # read some values\n evt = self.xwiimote.event()\n local_loop_active = True\n while local_loop_active and self.loop_active:\n self.poll.poll()\n try:\n self.dev.dispatch(evt)\n if evt.type == self.xwiimote.EVENT_KEY:\n key, state = evt.get_key()\n if key == self.xwiimote.KEY_B:\n if state == 1:\n self.queue.put([1, time.time()])\n elif state == 0:\n self.queue.put([2, time.time()])\n elif evt.type == self.xwiimote.EVENT_ACCEL:\n x_value, y_value, z_value = evt.get_abs(0)\n self.queue.put([3, x_value, y_value, z_value, time.time()])\n if evt.type == self.xwiimote.EVENT_GONE:\n self.queue.put([4])\n local_loop_active = False\n except IOError as ex:\n if ex.errno != errno.EAGAIN:\n print \"Bad\"\n exit(0)\n\nApp()\nexit(0)\n" }, { "alpha_fraction": 0.549573540687561, "alphanum_fraction": 0.5554370880126953, "avg_line_length": 31.34482765197754, "blob_id": "70d705edac0a6e0ab068f790595e6c2f6610d020", "content_id": "67f13778ebbe6a9c7514ba8f8a5d5935f74274c3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1876, "license_type": "permissive", "max_line_length": 74, "num_lines": 58, "path": "/src/tasks_gui.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"This script contains the Tasks Gui class\"\"\"\n\nfrom Tkinter import Tk, Label, TOP, BOTH, YES\nfrom PIL import ImageTk, Image\n\nclass TasksGui(object):\n \"\"\"A small Tkinter GUI that shows the tasks\"\"\"\n\n def __init__(self):\n self.root = Tk()\n self.size = (\n self.root.winfo_screenwidth(),\n self.root.winfo_screenheight()\n )\n self.root.overrideredirect(1)\n self.root.geometry(\"%dx%d+0+0\" % self.size)\n self.image_count = 19\n self.task_index = 0\n self.task_images = range(self.image_count)\n self.photo_images = range(self.image_count)\n for i in range(self.image_count):\n self.task_images[i] = Image.open(\n \"img/\" + str(i + 1) + \".png\"\n )\n self.task_images[i] = self.task_images[i].resize(\n self.size, Image.ANTIALIAS\n )\n self.photo_images[i] = ImageTk.PhotoImage(self.task_images[i])\n print \"Preload img/\" + str(i + 1) + \".png\"\n self.panel = Label(\n self.root,\n image=self.photo_images[0],\n background='white'\n )\n self.panel.pack(side=TOP, fill=BOTH, expand=YES)\n self.root.update()\n\n def next_task(self):\n \"\"\"Continues with the next task\"\"\"\n self.task_index = self.task_index + 1\n self.panel.configure(image=self.photo_images[self.task_index])\n self.panel.image = self.photo_images[self.task_index]\n self.root.update()\n\n def is_finished(self):\n \"\"\"Returns the status of the tasks\"\"\"\n if self.task_index >= self.image_count - 1:\n return True\n return False\n\n def get_task_index(self):\n \"\"\"Returns the task index\"\"\"\n return self.task_index\n\n def quit(self):\n \"\"\"Qit the GUI\"\"\"\n self.root.quit()\n self.root.update()\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 39, "blob_id": "ddcd7b62651264edd7cd94a76b6af563d1cd39c9", "content_id": "797b48c3fdb4d7d00362108bcb683c0e1081994d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 40, "license_type": "permissive", "max_line_length": 39, "num_lines": 1, "path": "/src/__init__.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"The source code of the experiment\"\"\"\n" }, { "alpha_fraction": 0.6774193644523621, "alphanum_fraction": 0.6774193644523621, "avg_line_length": 30, "blob_id": "9e65154a4efe6736a98acdc2e6f96a16a0b06194", "content_id": "dd843b9221b41f18e3b527e1709aeb2e241d30fc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31, "license_type": "permissive", "max_line_length": 30, "num_lines": 1, "path": "/test/__init__.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"Tests for the experiment\"\"\"\n" }, { "alpha_fraction": 0.5465477108955383, "alphanum_fraction": 0.5868890881538391, "avg_line_length": 33.37333297729492, "blob_id": "6a58691a30bbd51a923b324fdef36bd27488b07c", "content_id": "0c80d54a81ef7978c03c0c2379d6ab90b3f8ce27", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2578, "license_type": "permissive", "max_line_length": 79, "num_lines": 75, "path": "/test/test_experiment.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"This script contains tests for the Experiment class\"\"\"\n\nimport time\nimport unittest\nfrom src.experiment import Experiment\n\nclass TasksMock(object):\n \"\"\"A small test class that mocks the task GUI\"\"\"\n\n def __init__(self):\n self.image_count = 19\n self.task_index = 0\n\n def next_task(self):\n \"\"\"Continues with the next task\"\"\"\n self.task_index = self.task_index + 1\n\n def is_finished(self):\n \"\"\"Returns the status of the test tasks\"\"\"\n if self.task_index + 1 >= self.image_count:\n return True\n return False\n\n def get_task_index(self):\n \"\"\"Returns the tesk index\"\"\"\n return self.task_index\n\n\n# pylint: disable=R0904\nclass TestExperiment(unittest.TestCase):\n \"\"\"A class that tests the class Experiment\"\"\"\n def test_accel(self):\n \"\"\"Tests the method accel\"\"\"\n experiment = Experiment(TasksMock())\n experiment.accel(80, 48, 97, time.time())\n experiment.accel(3, 42, 79, time.time())\n self.assertRegexpMatches(\n experiment.get_output(),\n '^[0-9]* 80 48 97\\n[0-9]* 3 42 79\\n$'\n )\n\n def test_press_b_success(self):\n \"\"\"Tests the method press_b_down\"\"\"\n experiment = Experiment(TasksMock())\n experiment.accel(80, 48, 97, time.time())\n experiment.press_b_down(time.time())\n experiment.accel(3, 42, 79, time.time())\n experiment.press_b_up(time.time())\n experiment.accel(56, 21, 43, time.time())\n experiment.press_b_down(time.time())\n experiment.accel(62, 32, 28, time.time())\n experiment.press_b_up(time.time())\n self.assertRegexpMatches(\n experiment.get_output(),\n '^[0-9]* 80 48 97\\n[0-9]* START 0\\n[0-9]* 3 42 79\\n[0-9]* END 0' \\\n '\\n[0-9]* 56 21 43\\n[0-9]* START 1\\n[0-9]* 62 32 28\\n[0-9]* END ' \\\n '1\\n$'\n )\n\n def test_is_finished(self):\n \"\"\"Tests the method is_finished\"\"\"\n experiment = Experiment(TasksMock())\n self.assertEquals(False, experiment.is_finished())\n for _ in range(0, 17):\n experiment.press_b_down(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_up(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_down(time.time())\n self.assertEquals(False, experiment.is_finished())\n experiment.press_b_up(time.time())\n self.assertEquals(True, experiment.is_finished())\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.7626841068267822, "alphanum_fraction": 0.7635024785995483, "avg_line_length": 30.33333396911621, "blob_id": "10a2262d2c38bdf11875ac0d4a511bc723def175", "content_id": "a0e39229b7641cce544537043cb90a4a9fee8638", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1222, "license_type": "permissive", "max_line_length": 119, "num_lines": 39, "path": "/README.md", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "# SlidingWindowFilter-experiment\n\n[![Software License][ico-license]](LICENSE.md)\n[![Build Status][ico-travis]][link-travis]\n\nA small python application that is part of the [final paper](https://github.com/GordonLesti/SlidingWindowFilter) of my\nbachelor degree.\n\n## Requirements\n\n* [python](https://www.python.org/) 2\n* [dvdhrm/xwiimote](https://github.com/dvdhrm/xwiimote)\n* [dvdhrm/xwiimote-bindings](https://github.com/dvdhrm/xwiimote-bindings)\n* A Wii Remote controller\n\n## Run\n\nPlease run the following command as root. The application will write the result of the experiment into the `data`\ndirectory.\n```bash\n python -m src.xwiimote_recorder\n```\n\n## Test\n\nHere the commands for [pylint](https://www.pylint.org/) and the unittests.\n```bash\n pylint src/ test/\n python -m unittest test.test_experiment\n```\n\n## License\n\nThe MIT License (MIT). Please see [License File](LICENSE.md) for more information.\n\n[ico-license]: https://img.shields.io/github/license/GordonLesti/SlidingWindowFilter-experiment.svg?style=flat-square\n[ico-travis]: https://img.shields.io/travis/GordonLesti/SlidingWindowFilter-experiment/master.svg?style=flat-square\n\n[link-travis]: https://travis-ci.org/GordonLesti/SlidingWindowFilter-experiment\n" }, { "alpha_fraction": 0.5786682367324829, "alphanum_fraction": 0.5839717388153076, "avg_line_length": 35.10638427734375, "blob_id": "64d8d8b4e6844b37a86a6d06093a137e348f85cf", "content_id": "747dccb8b6f62319920f62a9a69b13f4be6aaca0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1697, "license_type": "permissive", "max_line_length": 78, "num_lines": 47, "path": "/src/experiment.py", "repo_name": "GordonLesti/SlidingWindowFilter-experiment", "src_encoding": "UTF-8", "text": "\"\"\"This script contains the Experiment class\"\"\"\n\nimport time\n\nclass Experiment(object):\n \"\"\"A class that represents a experiment with accel data\"\"\"\n\n def __init__(self, tasks):\n self.output = \"\"\n self.task_index = 0\n self.start_time = int(round(time.time() * 1000))\n self.tasks = tasks\n self.button_b_down = False\n\n def accel(self, x_value, y_value, z_value, rec_time):\n \"\"\"Store accel data\"\"\"\n self.output += self.__get_experiment_time_string(rec_time) + \" \" \\\n + str(x_value) + \" \" + str(y_value) + \" \" + str(z_value) + \"\\n\"\n\n def press_b_down(self, rec_time):\n \"\"\"Handle B button pressed down\"\"\"\n if self.button_b_down:\n raise Exception('Button B is already down.')\n self.button_b_down = True\n self.output += (self.__get_experiment_time_string(rec_time) \\\n + \" START \" + str(self.tasks.get_task_index()) + \"\\n\")\n\n def press_b_up(self, rec_time):\n \"\"\"Handle B button pressed up\"\"\"\n if not self.button_b_down:\n raise Exception('Button B is already up.')\n self.button_b_down = False\n self.output += self.__get_experiment_time_string(rec_time) + \" END \" \\\n + str(self.tasks.get_task_index()) + \"\\n\"\n self.tasks.next_task()\n\n def is_finished(self):\n \"\"\"Returns the status of the experiment\"\"\"\n return self.tasks.is_finished()\n\n def get_output(self):\n \"\"\"Returns the output of the experiment\"\"\"\n return self.output\n\n def __get_experiment_time_string(self, rec_time):\n \"\"\"Returns the current experiment time as string\"\"\"\n return str(int(round(rec_time * 1000)) - self.start_time)\n" } ]
7
piaorenjie/test
https://github.com/piaorenjie/test
bd983aafb91c487feb98695cd1eb3c145ee6036d
abe27389e3bf4ba1a0cf3f7eb18bd8099d9dad5f
235ffbc7ff8cce7e48a91db4a2d761a9eb45aaa4
refs/heads/master
2020-03-07T22:44:00.183749
2018-04-02T14:46:43
2018-04-02T14:46:43
80,529,170
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5384615659713745, "alphanum_fraction": 0.5641025900840759, "avg_line_length": 8.75, "blob_id": "42d838478832dbd656c4d6221733210b78eec0da", "content_id": "dbd1ba4b9afdd7b50273ba18ba26580388950ec2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 39, "license_type": "no_license", "max_line_length": 20, "num_lines": 4, "path": "/HelloWorld.py", "repo_name": "piaorenjie/test", "src_encoding": "UTF-8", "text": "a = 1\nprint(\"hello world\")\n\nprint(\"'\") " }, { "alpha_fraction": 0.7989864945411682, "alphanum_fraction": 0.8344594836235046, "avg_line_length": 19.44827651977539, "blob_id": "78bfffa68dcfd74b1f3f3f688b5f4b06aafca67e", "content_id": "8bbcd3f5e551e09ad3d823a204453851b5056b20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 608, "license_type": "no_license", "max_line_length": 77, "num_lines": 29, "path": "/ReadMe.md", "repo_name": "piaorenjie/test", "src_encoding": "UTF-8", "text": "官方快捷键大全:https://code.visualstudio.com/docs/customization/keybindings\nhttp://blog.csdn.net/u010019717/article/details/50443970\n\nhttps://www.shopify.com/partners/blog/best-visual-studio-code-extensions-2017\n\ntoken:9e8159a767678c8b64c1e919bb5cc4e8dcd13b94\nAtom one Dark Theme \nBlank Line at the End of File\nCss Peek\nDebugger for chrome\nESLint\nGithub Build Status\nGitLens\nGo\nGuides\nHtml Snippets\nindent-rainbow\nInstant Markdown\nIntelliSense for CSS class names in HTML\nLiquid Languages Support\nopen in browser\nPath Intellisense\nPython \nRainbow Brackets\nShopify Liquid Template Snippets\nstylelint\nTrailing Spaces\nTSLint\nvscode-icons" } ]
2
motius/scs_core
https://github.com/motius/scs_core
9d2d6bfbbc05cc91e5b6f3be0ee386e392ea0df5
ceac0be169ab540fa478d6d053f3814f1ae6bfce
bff8128579f95f6641c23e2455263784a7a6b76a
refs/heads/master
2020-03-11T23:44:54.349124
2018-04-07T15:45:37
2018-04-07T15:45:37
130,330,631
0
0
null
2018-04-20T08:07:49
2018-04-19T09:43:51
2018-04-19T09:43:50
null
[ { "alpha_fraction": 0.38526254892349243, "alphanum_fraction": 0.4102117717266083, "avg_line_length": 24.34558868408203, "blob_id": "18b068410c93fe9dbb8ebbdaaee1a0c1fdb80483", "content_id": "a00d84b93ac95fd539df930044a84fa3407d48df", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3447, "license_type": "permissive", "max_line_length": 118, "num_lines": 136, "path": "/src/scs_core/position/gpgsa.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Dec 2016\n\n@author: Bruno Beloff ([email protected])\n\nGNSS DOP and Active Satellites\n$xxGSA,opMode,navMode{,sv},PDOP,HDOP,VDOP*cs\n\nexample sentence:\n$GPGSA,A,3,23,29,07,08,09,18,26,28,,,,,1.94,1.18,1.54*0D\n\nexample values:\nGPGSA:{op_mode:A, nav_mode:3, sv:[21, 02, 28, 13, 30, 05, None, None, None, None, None, None], pdop:4.61,\nhdop:3.10, vdop:3.41}\n\nGPGSA:{op_mode:A, nav_mode:1, sv:[None, None, None, None, None, None, None, None, None, None, None, None], pdop:99.99,\nhdop:99.99, vdop:99.99}\n\"\"\"\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GPGSA(object):\n \"\"\"\n classdocs\n \"\"\"\n\n MESSAGE_ID = \"$GPGSA\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct(cls, s):\n if s.str(0) != cls.MESSAGE_ID:\n raise TypeError(\"invalid sentence:%s\" % s)\n\n op_mode = s.str(1)\n nav_mode = s.int(2)\n\n sv = []\n for i in range(12):\n sv.append(s.int(3 + i))\n\n pdop = s.float(15, 2)\n hdop = s.float(16, 2)\n vdop = s.float(17, 2)\n\n return GPGSA(op_mode, nav_mode, sv, pdop, hdop, vdop)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, op_mode, nav_mode, sv, pdop, hdop, vdop):\n \"\"\"\n Constructor\n \"\"\"\n self.__op_mode = op_mode # string\n self.__nav_mode = nav_mode # int\n \n self.__sv = sv # list of int\n\n self.__pdop = pdop # float(2)\n self.__hdop = hdop # float(2)\n self.__vdop = vdop # float(2)\n\n\n def __eq__(self, other):\n if not isinstance(other, self.__class__):\n return NotImplemented\n\n return self.__dict__ == other.__dict__\n\n # if self.op_mode != other.op_mode:\n # return False\n #\n # if self.nav_mode != other.nav_mode:\n # return False\n #\n # if self.pdop != other.pdop:\n # return False\n #\n # if self.hdop != other.hdop:\n # return False\n #\n # if self.vdop != other.vdop:\n # return False\n #\n # return True\n\n\n def __ne__(self, other):\n if not isinstance(other, self.__class__):\n return NotImplemented\n\n return not self == other\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def op_mode(self):\n return self.__op_mode\n\n\n @property\n def nav_mode(self):\n return self.__nav_mode\n\n\n @property\n def sv(self):\n return self.__sv\n\n\n @property\n def pdop(self):\n return self.__pdop\n\n\n @property\n def hdop(self):\n return self.__hdop\n\n\n @property\n def vdop(self):\n return self.__vdop\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n svs = '[' + ', '.join(str(sv) for sv in self.__sv) + ']'\n\n return \"GPGSA:{op_mode:%s, nav_mode:%s, sv:%s, pdop:%s, hdop:%s, vdop:%s}\" % \\\n (self.op_mode, self.nav_mode, svs, self.pdop, self.hdop, self.vdop)\n" }, { "alpha_fraction": 0.39782822132110596, "alphanum_fraction": 0.40276408195495605, "avg_line_length": 24.9743595123291, "blob_id": "c0471138eb6ac2befd2d094d44fd6c36bae27f9e", "content_id": "d4bc642fcd58a364ceff226a8d2a84ca940171d3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3040, "license_type": "permissive", "max_line_length": 118, "num_lines": 117, "path": "/src/scs_core/gas/ndir_datum.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 20 Jun 2017\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom numbers import Number\nfrom collections import OrderedDict\n\nfrom scs_core.data.datum import Datum\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass NDIRDatum(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n if not jdict:\n return None\n\n temp = jdict.get('tmp')\n cnc = jdict.get('cnc-raw')\n cnc_igl = jdict.get('cnc')\n\n return NDIRDatum(temp, cnc, cnc_igl)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, temp, cnc, cnc_igl):\n \"\"\"\n Constructor\n \"\"\"\n self.__temp = Datum.float(temp, 1) # temperature ºC\n self.__cnc = Datum.float(cnc, 1) # concentration ppm\n self.__cnc_igl = Datum.float(cnc_igl, 1) # concentration (ideal gas law corrected) ppm\n\n\n def __eq__(self, other):\n if not isinstance(other, self.__class__):\n return False\n\n if self.temp != other.temp:\n return False\n\n if self.cnc != other.cnc:\n return False\n\n if self.cnc_igl != other.cnc_igl:\n return False\n\n return True\n\n\n def __add__(self, other):\n if not isinstance(other, self.__class__):\n raise TypeError(other)\n\n temp = self.temp + other.temp\n cnc = self.cnc + other.cnc\n cnc_igl = self.cnc_igl + other.cnc_igl\n\n return NDIRDatum(temp, cnc, cnc_igl)\n\n\n def __truediv__(self, other):\n if not isinstance(other, Number):\n raise TypeError(other)\n\n temp = self.temp / other\n cnc = self.cnc / other\n cnc_igl = self.cnc_igl / other\n\n return NDIRDatum(temp, cnc, cnc_igl)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n jdict['tmp'] = self.temp\n jdict['cnc-raw'] = self.cnc\n jdict['cnc'] = self.cnc_igl\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def temp(self):\n return self.__temp\n\n\n @property\n def cnc(self):\n return self.__cnc\n\n\n @property\n def cnc_igl(self):\n return self.__cnc_igl\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"NDIRDatum:{temp:%0.1f, cnc:%0.1f, cnc_igl:%0.1f}\" % \\\n (self.temp, self.cnc, self.cnc_igl)\n" }, { "alpha_fraction": 0.41226819157600403, "alphanum_fraction": 0.41963860392570496, "avg_line_length": 31.604650497436523, "blob_id": "1d941059cda9bc44be93942634f15b80aee14d9f", "content_id": "a10fdaf5e15751fb72f5143c8d34cab3530cf876", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4206, "license_type": "permissive", "max_line_length": 118, "num_lines": 129, "path": "/src/scs_core/aws/client/mqtt_client.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 6 Oct 2017\n\n@author: Bruno Beloff ([email protected])\n\nhttps://github.com/aws/aws-iot-device-sdk-python\n\"\"\"\n\nimport AWSIoTPythonSDK.MQTTLib as MQTTLib\n\nfrom AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient\n\nfrom scs_core.data.json import JSONify\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass MQTTClient(object):\n \"\"\"\n classdocs\n \"\"\"\n\n __PORT = 8883\n\n __QUEUE_SIZE = -1 # recommended: infinite\n __QUEUE_DROP_BEHAVIOUR = MQTTLib.DROP_OLDEST # not required for infinite queue\n __QUEUE_DRAINING_FREQUENCY = 1 # recommended: 2 (Hz)\n\n __RECONN_BASE = 1 # recommended: 1 (sec)\n __RECONN_MAX = 32 # recommended: 32 (sec)\n __RECONN_STABLE = 20 # recommended: 20 (sec)\n\n __DISCONNECT_TIMEOUT = 30 # recommended: 10 (sec)\n __OPERATION_TIMEOUT = 30 # recommended: 5 (sec)\n\n __PUB_QOS = 1\n __SUB_QOS = 1\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, *subscribers):\n \"\"\"\n Constructor\n \"\"\"\n self.__client = None\n self.__subscribers = subscribers\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def connect(self, auth):\n # client...\n self.__client = AWSIoTMQTTClient(auth.client_id)\n\n # configuration...\n self.__client.configureEndpoint(auth.endpoint, self.__PORT)\n\n self.__client.configureCredentials(auth.root_ca_file_path, auth.private_key_path, auth.certificate_path)\n\n self.__client.configureAutoReconnectBackoffTime(self.__RECONN_BASE, self.__RECONN_MAX, self.__RECONN_STABLE)\n\n self.__client.configureOfflinePublishQueueing(self.__QUEUE_SIZE)\n self.__client.configureDrainingFrequency(self.__QUEUE_DRAINING_FREQUENCY)\n\n self.__client.configureConnectDisconnectTimeout(self.__DISCONNECT_TIMEOUT)\n self.__client.configureMQTTOperationTimeout(self.__OPERATION_TIMEOUT)\n\n # subscriptions...\n for subscriber in self.__subscribers:\n self.__client.subscribe(subscriber.topic, self.__SUB_QOS, subscriber.handler)\n\n # connect...\n self.__client.connect()\n\n\n def disconnect(self):\n self.__client.disconnect()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def publish(self, publication):\n payload = JSONify.dumps(publication.payload)\n\n self.__client.publish(publication.topic, payload, self.__PUB_QOS)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n subscribers = '[' + ', '.join(str(subscriber) for subscriber in self.__subscribers) + ']'\n\n return \"MQTTClient:{subscribers:%s}\" % subscribers\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass MQTTSubscriber(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, topic, handler):\n \"\"\"\n Constructor\n \"\"\"\n self.__topic = topic\n self.__handler = handler\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def topic(self):\n return self.__topic\n\n\n @property\n def handler(self):\n return self.__handler\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"MQTTSubscriber:{topic:%s, handler:%s}\" % (self.topic, self.handler)\n" }, { "alpha_fraction": 0.41937944293022156, "alphanum_fraction": 0.453204482793808, "avg_line_length": 27.08571434020996, "blob_id": "476518414b341c0f00ea68a52574b6ece4ff4032", "content_id": "e352432f0ad10e367e0c26b1d8bf4030975f8859", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3932, "license_type": "permissive", "max_line_length": 118, "num_lines": 140, "path": "/src/scs_core/position/gpgga.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Dec 2016\n\n@author: Bruno Beloff ([email protected])\n\nGlobal positioning system fix data\n$xxGGA,time,lat,NS,long,EW,quality,numSV,HDOP,alt,M,sep,M,diffAge,diffStation*cs\n\nexample sentence:\n$GPGGA,092725.00,4717.11399,N,00833.91590,E,1,08,1.01,499.6,M,48.0,M,,*5B\n\nexample values:\nGPGGA:{time:GPTime:{time:141058.00}, loc:GPLoc:{lat:5049.38432, ns:N, lng:00007.37801, ew:W}, quality:2, num_sv:06,\nhdop:3.10, alt:37.5, sep:45.4, diff_age:None, diff_station:0000}\n\nGPGGA:{time:GPTime:{time:140047.00}, loc:GPLoc:{lat:None, ns:None, lng:None, ew:None}, quality:0, num_sv:00,\nhdop:99.99, alt:None, sep:None, diff_age:None, diff_station:None}\n\"\"\"\n\nfrom scs_core.position.gploc import GPLoc\nfrom scs_core.position.gptime import GPTime\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GPGGA(object):\n \"\"\"\n classdocs\n \"\"\"\n\n MESSAGE_ID = \"$GPGGA\"\n\n QUALITY_NO_FIX = 0\n QUALITY_AUTONOMOUS_GNSS = 1\n QUALITY_DIFFERENTIAL_GNSS = 2\n QUALITY_ESTIMATED_FIX = 6\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct(cls, s):\n if s.str(0) != cls.MESSAGE_ID:\n raise TypeError(\"invalid sentence:%s\" % s)\n\n time = GPTime(s.str(1))\n\n lat = s.str(2)\n ns = s.str(3)\n\n lng = s.str(4)\n ew = s.str(5)\n\n loc = GPLoc(lat, ns, lng, ew)\n\n quality = s.int(6)\n num_sv = s.int(7)\n hdop = s.float(8, 3)\n alt = s.float(9, 2)\n sep = s.float(11, 2)\n\n diff_age = s.float(13, 3)\n diff_station = s.str(14)\n\n return GPGGA(time, loc, quality, num_sv, hdop, alt, sep, diff_age, diff_station)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, time, loc, quality, num_sv, hdop, alt, sep, diff_age, diff_station):\n \"\"\"\n Constructor\n \"\"\"\n self.__time = time # GPTime\n self.__loc = loc # GPLoc\n\n self.__quality = quality # int\n self.__num_sv = num_sv # int\n self.__hdop = hdop # float(2)\n self.__alt = alt # float(1) - altitude (metres)\n self.__sep = sep # float(1) - geoid separation (metres)\n\n self.__diff_age = diff_age # float(3) - age of differential corrections (seconds)\n self.__diff_station = diff_station # string - ID of station providing differential corrections\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def time(self):\n return self.__time\n\n\n @property\n def loc(self):\n return self.__loc\n\n\n @property\n def quality(self):\n return self.__quality\n\n\n @property\n def num_sv(self):\n return self.__num_sv\n\n\n @property\n def hdop(self):\n return self.__hdop\n\n\n @property\n def alt(self):\n return self.__alt\n\n\n @property\n def sep(self):\n return self.__sep\n\n\n @property\n def diff_age(self):\n return self.__diff_age\n\n\n @property\n def diff_station(self):\n return self.__diff_station\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"GPGGA:{time:%s, loc:%s, quality:%s, num_sv:%s, hdop:%s, alt:%s, sep:%s, \" \\\n \"diff_age:%s, diff_station:%s}\" % \\\n (self.time, self.loc, self.quality, self.num_sv, self.hdop, self.alt, self.sep,\n self.diff_age, self.diff_station)\n" }, { "alpha_fraction": 0.3826339840888977, "alphanum_fraction": 0.41462400555610657, "avg_line_length": 25.163043975830078, "blob_id": "2cca77bd3ebf28dc7148e2c53a14d83167e73bec", "content_id": "fb64fe9b5e2352f9dac4c381f2fdf49fb741f2b5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2407, "license_type": "permissive", "max_line_length": 118, "num_lines": 92, "path": "/src/scs_core/position/gpgll.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Dec 2016\n\n@author: Bruno Beloff ([email protected])\n\nLatitude and longitude, with time of position fix and status\n$xxGLL,lat,NS,long,EW,time,status,posMode*cs\n\nexample sentence:\n$GPGLL,5049.37823,N,00007.37872,W,103228.00,A,D*7F\n\nexample values:\nGPGLL:{loc:GPLoc:{lat:5049.38432, ns:N, lng:00007.37801, ew:W}, time:GPTime:{time:141058.00}, status:A, pos_mode:D}\nGPGLL:{loc:GPLoc:{lat:None, ns:None, lng:None, ew:None}, time:GPTime:{time:140047.00}, status:V, pos_mode:N}\n\"\"\"\n\nfrom scs_core.position.gploc import GPLoc\nfrom scs_core.position.gptime import GPTime\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GPGLL(object):\n \"\"\"\n classdocs\n \"\"\"\n\n MESSAGE_ID = \"$GPGLL\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct(cls, s):\n if s.str(0) != cls.MESSAGE_ID:\n raise TypeError(\"invalid sentence:%s\" % s)\n\n lat = s.str(1)\n ns = s.str(2)\n\n lng = s.str(3)\n ew = s.str(4)\n\n loc = GPLoc(lat, ns, lng, ew)\n\n time = GPTime(s.str(5))\n\n status = s.str(6)\n pos_mode = s.str(7)\n\n return GPGLL(loc, time, status, pos_mode)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, loc, time, status, pos_mode):\n \"\"\"\n Constructor\n \"\"\"\n self.__loc = loc # GPLoc\n self.__time = time # GPTime\n\n self.__status = status # string\n self.__pos_mode = pos_mode # string\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def loc(self):\n return self.__loc\n\n\n @property\n def time(self):\n return self.__time\n\n\n @property\n def status(self):\n return self.__status\n\n\n @property\n def pos_mode(self):\n return self.__pos_mode\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"GPGLL:{loc:%s, time:%s, status:%s, pos_mode:%s}\" % \\\n (self.loc, self.time, self.status, self.pos_mode)\n" }, { "alpha_fraction": 0.3786810338497162, "alphanum_fraction": 0.3961012065410614, "avg_line_length": 25.494504928588867, "blob_id": "fae5704486085116e623b5a9534bf5208f6146c0", "content_id": "715b1625142a3917a57e0d67d9be7d19381d2284", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2411, "license_type": "permissive", "max_line_length": 118, "num_lines": 91, "path": "/src/scs_core/position/gpvtg.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Dec 2016\n\n@author: Bruno Beloff ([email protected])\n\nCourse over ground and Ground speed\n$xxVTG,cogt,T,cogm,M,knots,N,kph,K,pos_mode*cs\n\nexample sentence:\n$GPVTG,77.52,T,,M,0.004,N,0.008,K,A*06\n\nexample values:\nGPVTG:{cogt:None, cogm:None, knots:0.005, kph:0.010, pos_mode:D}\nGPVTG:{cogt:None, cogm:None, knots:None, kph:None, pos_mode:N}\n\"\"\"\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GPVTG(object):\n \"\"\"\n classdocs\n \"\"\"\n\n MESSAGE_ID = \"$GPVTG\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct(cls, s):\n if s.str(0) != cls.MESSAGE_ID:\n raise TypeError(\"invalid sentence:%s\" % s)\n\n cogt = s.float(1, 2)\n cogm = s.float(3, 2)\n\n knots = s.float(5, 3)\n kph = s.float(7, 3)\n\n pos_mode = s.str(9)\n\n return GPVTG(cogt, cogm, knots, kph, pos_mode)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, cogt, cogm, knots, kph, pos_mode):\n \"\"\"\n Constructor\n \"\"\"\n self.__cogt = cogt # float(2) - degrees course over ground\n self.__cogm = cogm # float(2) - degrees course over ground (magnetic)\n\n self.__knots = knots # float(3) - speed over ground (knots)\n self.__kph = kph # float(3) - speed over ground (kph)\n\n self.__pos_mode = pos_mode # string\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def cogt(self):\n return self.__cogt\n\n\n @property\n def cogm(self):\n return self.__cogm\n\n\n @property\n def knots(self):\n return self.__knots\n\n\n @property\n def kph(self):\n return self.__kph\n\n\n @property\n def pos_mode(self):\n return self.__pos_mode\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"GPVTG:{cogt:%s, cogm:%s, knots:%s, kph:%s, pos_mode:%s}\" % \\\n (self.cogt, self.cogm, self.knots, self.kph, self.pos_mode)\n" }, { "alpha_fraction": 0.48598435521125793, "alphanum_fraction": 0.5076597332954407, "avg_line_length": 26.89090919494629, "blob_id": "686dc7024de645312fa4cf2c11c10c9c1fe0d2d6", "content_id": "fe3c0bfaa30854d744cc5382293597bbcff70fae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6136, "license_type": "permissive", "max_line_length": 118, "num_lines": 220, "path": "/src/scs_core/data/localized_datetime.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 13 Aug 2016\n\n@author: Bruno Beloff ([email protected])\n\nNote that, for the ISO 8601 constructors, milliseconds are optional.\n\nhttp://www.saltycrane.com/blog/2009/05/converting-time-zones-datetime-objects-python/\n\"\"\"\n\nfrom datetime import datetime\nfrom datetime import timedelta\nfrom datetime import timezone\n\nimport pytz\nimport re\nimport tzlocal\n\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass LocalizedDatetime(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n @classmethod\n def now(cls):\n zone = tzlocal.get_localzone()\n localized = datetime.now(zone)\n\n return LocalizedDatetime(localized)\n\n\n @classmethod\n def construct_from_date(cls, date):\n zone = tzlocal.get_localzone()\n localized = zone.localize(datetime(date.year, date.month, date.day))\n\n return LocalizedDatetime(localized)\n\n\n @classmethod\n def construct_from_timestamp(cls, t, tz=None):\n zone = tzlocal.get_localzone() if tz is None else tz\n localized = datetime.fromtimestamp(t, zone)\n\n return LocalizedDatetime(localized)\n\n\n @classmethod\n def construct_from_iso8601(cls, datetime_str):\n if datetime_str is None:\n return None\n\n # Z timezone offset...\n localized = cls.__construct_from_iso8601_z(datetime_str)\n\n if localized:\n return localized\n\n # numeric timezone offset...\n return cls.__construct_from_iso8601_numeric(datetime_str)\n\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n return cls.construct_from_iso8601(jdict)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def __construct_from_iso8601_z(cls, datetime_str):\n # match...\n match = re.match('(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2})(?:.(\\d{3}))?Z', datetime_str)\n\n if match is None:\n return None\n\n fields = match.groups()\n\n # fields...\n year = int(fields[0])\n month = int(fields[1])\n day = int(fields[2])\n\n hour = int(fields[3])\n minute = int(fields[4])\n second = int(fields[5])\n micros = int(fields[6]) * 1000 if fields[6] else 0\n\n # construct...\n zone_offset = timedelta(hours=0, minutes=0)\n zone = timezone(zone_offset)\n\n localized = datetime(year, month, day, hour, minute, second, micros, tzinfo=zone)\n\n return LocalizedDatetime(localized)\n\n\n @classmethod\n def __construct_from_iso8601_numeric(cls, datetime_str):\n # match...\n match = re.match('(\\d{4})-(\\d{2})-(\\d{2})T(\\d{2}):(\\d{2}):(\\d{2})(?:.(\\d{3}))?([ +\\-]?)(\\d{2}):(\\d{2})',\n datetime_str)\n\n if match is None:\n return None\n\n fields = match.groups()\n\n # fields...\n year = int(fields[0])\n month = int(fields[1])\n day = int(fields[2])\n\n hour = int(fields[3])\n minute = int(fields[4])\n second = int(fields[5])\n micros = int(fields[6]) * 1000 if fields[6] else 0\n\n zone_sign = -1 if fields[7] == '-' else 1\n zone_hours = int(fields[8])\n zone_mins = int(fields[9])\n\n # construct...\n zone_offset = zone_sign * timedelta(hours=zone_hours, minutes=zone_mins)\n zone = timezone(zone_offset)\n\n localized = datetime(year, month, day, hour, minute, second, micros, tzinfo=zone)\n\n return LocalizedDatetime(localized)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, localized):\n \"\"\"\n Constructor\n \"\"\"\n self.__datetime = localized # datetime\n\n\n def __add__(self, other: datetime):\n return LocalizedDatetime(self.__datetime + other)\n\n\n def __sub__(self, other):\n other_datetime = other.__datetime if type(other) == LocalizedDatetime else other\n\n return self.__datetime - other_datetime\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def utc(self):\n localized = self.datetime.astimezone(pytz.timezone('Etc/UTC'))\n\n return LocalizedDatetime(localized)\n\n\n def localize(self, zone): # zone may be datetime.timezone or pytz.timezone\n localized = self.datetime.astimezone(zone)\n\n return LocalizedDatetime(localized)\n\n\n def timedelta(self, days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0):\n td = timedelta(days=days, seconds=seconds, microseconds=microseconds, milliseconds=milliseconds,\n minutes=minutes, hours=hours, weeks=weeks)\n\n return LocalizedDatetime(self.__datetime + td)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_iso8601(self):\n \"\"\"\n example: 2016-08-13T00:38:05.210+00:00\n \"\"\"\n date = self.__datetime.strftime(\"%Y-%m-%d\")\n time = self.__datetime.strftime(\"%H:%M:%S\")\n\n micros = float(self.__datetime.strftime(\"%f\"))\n millis = \"%03d\" % (micros // 1000)\n\n zone = self.__datetime.strftime(\"%z\")\n zone_hours = zone[:3]\n zone_mins = zone[3:]\n\n return \"%sT%s.%s%s:%s\" % (date, time, millis, zone_hours, zone_mins)\n\n\n def as_json(self):\n return self.as_iso8601()\n\n\n def timestamp(self):\n return self.__datetime.timestamp()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def datetime(self):\n return self.__datetime\n\n\n @property\n def tzinfo(self):\n return self.__datetime.tzinfo\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"LocalizedDatetime:{datetime:%s}\" % self.datetime\n" }, { "alpha_fraction": 0.4138983190059662, "alphanum_fraction": 0.4335593283176422, "avg_line_length": 29.72916603088379, "blob_id": "cdbea5419f6d866858fca19b72e5d752aeb81b8f", "content_id": "8e592e73a63db19fa775ea081b0fc11a288e8a19", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2950, "license_type": "permissive", "max_line_length": 118, "num_lines": 96, "path": "/src/scs_core/gas/afe_baseline.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 1 Mar 2017\n\n@author: Bruno Beloff ([email protected])\n\nexample JSON:\n{\"sn1\": {\"calibrated_on\": \"2017-03-01\", \"offset\": 111}, \"sn2\": {\"calibrated_on\": \"2017-03-01\", \"offset\": 222},\n\"sn3\": {\"calibrated_on\": \"2017-03-01\", \"offset\": 333}, \"sn4\": {\"calibrated_on\": \"2017-03-01\", \"offset\": 444}}\n\"\"\"\n\nimport os\n\nfrom collections import OrderedDict\n\nfrom scs_core.data.json import PersistentJSONable\nfrom scs_core.gas.sensor_baseline import SensorBaseline\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass AFEBaseline(PersistentJSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n __SENSORS = 4 # TODO: better to find out how long the AFECalib is than to use a constant\n\n # ----------------------------------------------------------------------------------------------------------------\n\n __FILENAME = \"afe_baseline.json\"\n\n @classmethod\n def filename(cls, host):\n return os.path.join(host.conf_dir(), cls.__FILENAME)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n if not jdict:\n return AFEBaseline([SensorBaseline(None, 0)] * cls.__SENSORS)\n\n sensor_baselines = []\n\n for i in range(len(jdict)):\n key = 'sn' + str(i + 1)\n\n baseline = SensorBaseline.construct_from_jdict(jdict[key]) if key in jdict else SensorBaseline(None, 0)\n sensor_baselines.append(baseline)\n\n return AFEBaseline(sensor_baselines)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, sensor_baselines):\n \"\"\"\n Constructor\n \"\"\"\n super().__init__()\n\n self.__sensor_baselines = sensor_baselines # array of SensorBaseline\n\n\n def __len__(self):\n return len(self.__sensor_baselines)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n for i in range(len(self.__sensor_baselines)):\n jdict['sn' + str(i + 1)] = self.__sensor_baselines[i]\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def sensor_baseline(self, i):\n return self.__sensor_baselines[i]\n\n\n def set_sensor_baseline(self, i, sensor_baseline):\n self.__sensor_baselines[i] = sensor_baseline\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n sensor_baselines = '[' + ', '.join(str(baseline) for baseline in self.__sensor_baselines) + ']'\n\n return \"AFEBaseline:{sensor_baselines:%s}\" % sensor_baselines\n" }, { "alpha_fraction": 0.3926829397678375, "alphanum_fraction": 0.4548780620098114, "avg_line_length": 30.538461685180664, "blob_id": "684e7b605543f07f0049c736f2eb79b5532f7536", "content_id": "2fea3a85d9ca9b8058081f0eb0860fac62384827", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1640, "license_type": "permissive", "max_line_length": 118, "num_lines": 52, "path": "/src/scs_core/sample/status_sample.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 20 Oct 2016\n\n@author: Bruno Beloff ([email protected])\n\nexample document:\n{\"tag\": \"scs-bgx-401\",\n \"rec\": \"2017-09-24T07:56:27.304+00:00\",\n \"val\": {\n \"tz\": {\"name\": \"Europe/London\", \"utc-offset\": \"+01:00\"},\n \"pos\": {\"lat\": 50.8229247, \"lng\": -0.1229787, \"qual\": 2},\n \"sch\": {\"scs-climate\": {\"interval\": 60.0, \"tally\": 1}, \"scs-gases\": {\"interval\": 10.0, \"tally\": 1},\n \"scs-particulates\": {\"interval\": 10.0, \"tally\": 1}, \"scs-status\": {\"interval\": 60.0, \"tally\": 1}},\n \"tmp\": {\"brd\": 35.8, \"hst\": null},\n \"up\": {\"period\": \"01-21:52:00.000\", \"users\": 0, \"load\": {\"av1\": 0.0, \"av5\": 0.0, \"av15\": 0.0}},\n \"psu\": {\"p-rst\": false, \"w-rst\": false, \"batt-flt\": false, \"host-3v3\": 3.4, \"pwr-in\": 12.4, \"prot-batt\": 6.7}}}\n\"\"\"\n\nfrom scs_core.sample.sample import Sample\n\n\n# TODO: reporting GPS / PSU should depend on conf, not value\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass StatusSample(Sample):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, tag, rec, timezone, position, temperature, schedule, uptime, psu_status):\n \"\"\"\n Constructor\n \"\"\"\n val = []\n\n if timezone:\n val.append(('tz', timezone))\n\n if position:\n val.append(('pos', position))\n\n val.append(('sch', schedule))\n val.append(('tmp', temperature))\n val.append(('up', uptime))\n\n if psu_status:\n val.append(('psu', psu_status))\n\n super().__init__(tag, rec, *val)\n" }, { "alpha_fraction": 0.30174925923347473, "alphanum_fraction": 0.352769672870636, "avg_line_length": 23.5, "blob_id": "645a592b6ad6bd5087bbc1e12fc0bb8dd02f4664", "content_id": "6f5ca391db2fb5e560b57fe9fc7708a6d9b7f002", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 686, "license_type": "permissive", "max_line_length": 118, "num_lines": 28, "path": "/src/scs_core/sample/climate_sample.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 17 Feb 2017\n\n@author: Bruno Beloff ([email protected])\n\nexample document:\n{\"tag\": \"scs-be2-2\",\n \"rec\": \"2017-09-24T07:52:40.489+00:00\",\n \"val\": {\"hmd\": 56.2, \"tmp\": 22.2}}\n\"\"\"\n\nfrom scs_core.sample.sample import Sample\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass ClimateSample(Sample):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, tag, rec, sample):\n \"\"\"\n Constructor\n \"\"\"\n super().__init__(tag, rec, ('hmd', sample.humid), ('tmp', sample.temp))\n" }, { "alpha_fraction": 0.43572986125946045, "alphanum_fraction": 0.46078431606292725, "avg_line_length": 31.785715103149414, "blob_id": "03c73450b8ac7232cebaa34f2ad0197d3f93776f", "content_id": "2f87cbdf4b368d19e81ec79dc769e41d74e6ab35", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1836, "license_type": "permissive", "max_line_length": 118, "num_lines": 56, "path": "/src/scs_core/aws/manager/message_manager.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 6 Nov 2017\n\n@author: Bruno Beloff ([email protected])\n\nhttps://xy1eszuu23.execute-api.us-west-2.amazonaws.com/staging/topicMessages?\ntopic=south-coast-science-dev/production-test/loc/1/gases&startTime=2018-03-31T10:45:50Z&endTime=2018-03-31T10:46:50Z\n\"\"\"\n\nfrom scs_core.aws.client.rest_client import RESTClient\nfrom scs_core.aws.data.message import MessageCollection\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass MessageManager(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, http_client, api_key, verbose=False):\n \"\"\"\n Constructor\n \"\"\"\n self.__rest_client = RESTClient(http_client, api_key)\n self.__verbose = verbose\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def find_for_topic(self, topic, start_date, end_date):\n request_path = '/staging/topicMessages'\n params = {'topic': topic, 'startTime': start_date.utc().as_iso8601(), 'endTime': end_date.utc().as_iso8601()}\n\n # request...\n self.__rest_client.connect()\n\n try:\n jdict = self.__rest_client.get(request_path, params)\n\n # messages...\n collection = MessageCollection.construct_from_jdict(jdict)\n\n messages = collection.items\n finally:\n self.__rest_client.close()\n\n return messages\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"MessageManager:{rest_client:%s, verbose:%s}\" % (self.__rest_client, self.__verbose)\n" }, { "alpha_fraction": 0.39801639318466187, "alphanum_fraction": 0.40534713864326477, "avg_line_length": 28.35443115234375, "blob_id": "b75a713d961914f2bece7cbb2dfdca2ad653ce18", "content_id": "a51b27f6650d06a5aafe8d8190be512e5a01132a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2319, "license_type": "permissive", "max_line_length": 118, "num_lines": 79, "path": "/src/scs_core/gas/sensor_baseline.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 1 Mar 2017\n\n@author: Bruno Beloff ([email protected])\n\nexample JSON:\n{\"calibrated_on\": \"2017-03-01\", \"offset\": 444}\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom scs_core.data.datum import Datum\nfrom scs_core.data.localized_datetime import LocalizedDatetime\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass SensorBaseline(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n if not jdict:\n return SensorBaseline(None, 0)\n\n if 'calibrated_on' in jdict: # TODO: deprecated\n date = Datum.date(jdict.get('calibrated_on'))\n calibrated_on = LocalizedDatetime.construct_from_date(date)\n\n else:\n calibrated_on = Datum.datetime(jdict.get('calibrated-on'))\n\n offset = Datum.int(jdict.get('offset'))\n\n return SensorBaseline(calibrated_on, offset)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, calibrated_on, offset):\n \"\"\"\n Constructor\n \"\"\"\n self.__calibrated_on = calibrated_on # LocalizedDatetime\n self.__offset = offset # int ppb\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n jdict['calibrated-on'] = self.calibrated_on\n jdict['offset'] = self.offset\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def calibrated_on(self):\n return self.__calibrated_on\n\n\n @property\n def offset(self):\n return self.__offset\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"SensorBaseline:{calibrated_on:%s, offset:%d}\" % (self.calibrated_on, self.offset)\n" }, { "alpha_fraction": 0.2911512851715088, "alphanum_fraction": 0.29686012864112854, "avg_line_length": 25.274999618530273, "blob_id": "ae60be1375f227b36fff3287fdd09083b95c4f36", "content_id": "304cdcfd5d9f57e1bda37642bf151f0e65a17a87", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1051, "license_type": "permissive", "max_line_length": 118, "num_lines": 40, "path": "/src/scs_core/csv/csv_logger.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 10 Jul 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom _csv import writer\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass CSVLogger(object):\n \"\"\"\n heap memory storage of CSV data\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, filename):\n self.__filename = filename\n self.__log = []\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def writerow(self, row):\n self.__log.append(tuple(row))\n\n\n def flush(self):\n csv = writer(self.__filename)\n\n for row in self.__log:\n csv.writerow(row)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"CSVLogger:{filename:%s, log:%s}\" % (self.__filename, self.__log)\n" }, { "alpha_fraction": 0.3998144567012787, "alphanum_fraction": 0.4730983376502991, "avg_line_length": 20.959182739257812, "blob_id": "f8023b52d7980ccf3ba4c2f7df319a9a01dc9e42", "content_id": "f85f2a05c5d4f3cfe3c37e22d0b3250f25f53e31", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1078, "license_type": "permissive", "max_line_length": 118, "num_lines": 49, "path": "/tests/csv/csv_dict_test.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n\"\"\"\nCreated on 21 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport json\nfrom collections import OrderedDict\n\nfrom scs_core.csv.csv_dict import CSVDict\nfrom scs_core.data.json import JSONify\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\njstr = '{\"rec\": \"2016-09-27T13:29:52.947+01:00\", \"val\": {\"opc\": {\"pm1\": 5.1, \"pm2p5\": 22.7, \"pm10\": 195.1, ' \\\n '\"per\": 5.0, \"bin1\": [77, 38, 52, 39, 9, 15, 9, 2], \"bin2\": [4, 1, 2, 4, 0, 0, 0, 0], ' \\\n '\"mtf1\": 30, \"mtf3\": 34, \"mtf5\": 34, \"mtf7\": 39}}}'\nprint(jstr)\nprint(\"-\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\njdict = json.loads(jstr, object_pairs_hook=OrderedDict)\nprint(jdict)\nprint(\"-\")\n\ndatum = CSVDict(jdict)\nprint(datum)\nprint(\"-\")\n\nheader = datum.header\nprint(header)\nprint(\"-\")\n\nrow = datum.row\nprint(row)\nprint(\"-\")\n\njdict = CSVDict.as_dict(header, row)\nprint(jdict)\nprint(\"-\")\n\njstr = JSONify.dumps(jdict)\nprint(jstr)\nprint(\"=\")\n\n\n" }, { "alpha_fraction": 0.37920621037483215, "alphanum_fraction": 0.3805004358291626, "avg_line_length": 27.096969604492188, "blob_id": "8d9e3b94297104c29e0fbc38f0fdc4597cb2e7a9", "content_id": "882099d40c95fd981b9e1272d761ac4aff45be9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4636, "license_type": "permissive", "max_line_length": 118, "num_lines": 165, "path": "/src/scs_core/data/json.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 13 Aug 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport json\nimport os\n\nfrom abc import abstractmethod\n\nfrom collections import OrderedDict\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass JSONable(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_list(self, jlist):\n del jlist[:] # empty the list\n\n for key, value in self.as_json().items():\n try:\n value = value.as_json() # TODO: recurse to construct a list\n except AttributeError:\n pass\n\n jlist.append((key, value)) # append the key-value pairs of the dictionary\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @abstractmethod\n def as_json(self): # TODO: handle named parameters of JSONify.dumps(..)\n pass\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass PersistentJSONable(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def load(cls, host):\n filename = cls.filename(host)\n\n instance = None if filename is None else cls.load_from_file(filename)\n\n if instance is not None:\n instance.__host = host # TODO: remove host management from PersistentJSONable subclasses\n\n return instance\n\n\n @classmethod\n def load_from_file(cls, filename):\n try:\n f = open(filename, \"r\")\n except FileNotFoundError:\n return cls.construct_from_jdict(None)\n\n jstr = f.read().strip()\n f.close()\n\n jdict = json.loads(jstr, object_pairs_hook=OrderedDict)\n\n return cls.construct_from_jdict(jdict)\n\n\n @classmethod\n def delete(cls, host):\n os.remove(cls.filename(host))\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n @abstractmethod\n def filename(cls, _):\n return ''\n\n\n @classmethod\n @abstractmethod\n def construct_from_jdict(cls, _):\n return PersistentJSONable()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self):\n self.__host = None\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def save(self, host):\n self.__host = host\n\n self.save_to_file(self.filename(host))\n\n\n def save_to_file(self, filename):\n jstr = JSONify.dumps(self)\n\n f = open(filename, \"w\")\n f.write(jstr + '\\n')\n f.close()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @abstractmethod\n def as_json(self):\n pass\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def host(self):\n return self.__host\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"PersistentJSONable:{host:%s}\" % self.host\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass JSONify(json.JSONEncoder):\n \"\"\"\n classdocs\n \"\"\"\n\n @staticmethod\n def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,\n allow_nan=True, cls=None, indent=None, separators=None,\n default=None, sort_keys=False, **kw):\n\n handler = JSONify if cls is None else cls\n\n return json.dumps(obj, skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular,\n allow_nan=allow_nan, cls=handler, indent=indent, separators=separators,\n default=default, sort_keys=sort_keys, **kw)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def default(self, obj):\n if isinstance(obj, JSONable):\n return obj.as_json()\n\n return json.JSONEncoder.default(self, obj)\n" }, { "alpha_fraction": 0.4195979833602905, "alphanum_fraction": 0.4271356761455536, "avg_line_length": 16.688888549804688, "blob_id": "aa0cbf2256799799586a3fa65d070f70f1e8e791", "content_id": "d3181962509b5d17cb5de310209315287f06eda6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 796, "license_type": "permissive", "max_line_length": 118, "num_lines": 45, "path": "/src/scs_core/gas/ndir.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 15 Mar 2018\n\n@author: Bruno Beloff ([email protected])\n\nThe abstract definition of an NDIR required by NDIRMonitor.\nImplementations are elsewhere.\n\"\"\"\n\nfrom abc import abstractmethod\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass NDIR(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n # abstract NDIR...\n\n @abstractmethod\n def power_on(self):\n pass\n\n\n @abstractmethod\n def power_off(self):\n pass\n\n\n @abstractmethod\n def sample(self):\n pass\n\n\n @abstractmethod\n def version(self):\n pass\n\n\n @abstractmethod\n def sample_interval(self):\n pass\n" }, { "alpha_fraction": 0.3977777659893036, "alphanum_fraction": 0.44055554270744324, "avg_line_length": 31.14285659790039, "blob_id": "d3604efa527243b111107a40eec1392216b4b925", "content_id": "44b844a09fec9d6cb8f0ec9514213edd4ae71d3d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3600, "license_type": "permissive", "max_line_length": 118, "num_lines": 112, "path": "/src/scs_core/osio/data/device_topic.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 2 Jul 2017\n\n@author: Bruno Beloff ([email protected])\n\nexample:\n {\n \"client_id\": \"5926\",\n \"owner\": \"southcoastscience-dev\",\n \"topic\": \"\\/orgs\\/south-coast-science-dev\\/development\\/loc\\/3\\/gases\",\n \"date\": \"2017-03-19T08:00:18.414Z\",\n \"payload\": {\n \"encoding\": \"utf-8\",\n \"content-type\": \"application\\/json\",\n \"text\": \"{\\\"tag\\\": \\\"scs-ap1-6\\\", \\\"rec\\\": \\\"2017-03-19T08:00:17.601+00:00\\\", \\\"val\\\": \n {\\\"NO2\\\": {\\\"weV\\\": 0.286379, \\\"aeV\\\": 0.288317, \\\"weC\\\": -0.014025, \\\"cnc\\\": -76.2}, \n \\\"Ox\\\": {\\\"weV\\\": 0.416506, \\\"aeV\\\": 0.407381, \\\"weC\\\": null, \\\"cnc\\\": null}, \n \\\"NO\\\": {\\\"weV\\\": 0.296255, \\\"aeV\\\": 0.31138, \\\"weC\\\": -0.036358, \\\"cnc\\\": -98.5}, \n \\\"CO\\\": {\\\"weV\\\": 0.33413, \\\"aeV\\\": 0.566696, \\\"weC\\\": 0.34805, \\\"cnc\\\": 1456.3}, \n \\\"pt1\\\": {\\\"v\\\": 0.324005, \\\"tmp\\\": 23.3}, \n \\\"sht\\\": {\\\"hmd\\\": 45.4, \\\"tmp\\\": 21.9}}}\"\n },\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom scs_core.data.json import JSONable\nfrom scs_core.data.localized_datetime import LocalizedDatetime\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass DeviceTopic(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_message_jdict(cls, jdict):\n if not jdict:\n return None\n\n client_id = jdict.get('device')\n path = jdict.get('topic')\n earliest_publication = LocalizedDatetime.construct_from_jdict(jdict.get('date'))\n\n client_id = DeviceTopic(client_id, path, earliest_publication)\n\n return client_id\n\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n if not jdict:\n return None\n\n client_id = jdict.get('client-id')\n path = jdict.get('path')\n earliest_publication = LocalizedDatetime.construct_from_jdict(jdict.get('earliest-pub'))\n\n client_id = DeviceTopic(client_id, path, earliest_publication)\n\n return client_id\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, client_id, path, earliest_publication):\n \"\"\"\n Constructor\n \"\"\"\n self.__client_id = client_id # string (int by convention)\n self.__path = path # string\n self.__earliest_publication = earliest_publication # LocalisedDatetime\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n jdict['client-id'] = self.client_id\n jdict['path'] = self.path\n jdict['earliest-pub'] = self.earliest_publication\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def client_id(self):\n return self.__client_id\n\n\n @property\n def path(self):\n return self.__path\n\n\n @property\n def earliest_publication(self):\n return self.__earliest_publication\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"DeviceTopic:{client_id:%s, path:%s, earliest_publication:%s}\" % \\\n (self.client_id, self.path, self.earliest_publication)\n" }, { "alpha_fraction": 0.2808510661125183, "alphanum_fraction": 0.40303951501846313, "avg_line_length": 33.27083206176758, "blob_id": "45a91cb290976bd335723e3bfb8f3ab5b5c95d7a", "content_id": "13acfec54f7cf0f40d73de24ac610a68741c22d4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1645, "license_type": "permissive", "max_line_length": 118, "num_lines": 48, "path": "/src/scs_core/sample/particulates_sample.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 20 Oct 2016\n\n@author: Bruno Beloff ([email protected])\n\nexample document:\n{\"tag\": \"scs-bgb-113\",\n \"rec\": \"2017-09-24T07:54:28.687+00:00\",\n \"val\": {\n \"pm1\": 14.0, \"pm2p5\": 16.4, \"pm10\": 22.2,\n \"bins\": {\"0\": 1482, \"1\": 230, \"2\": 72, \"3\": 14, \"4\": 7, \"5\": 6, \"6\": 7, \"7\": 2, \"8\": 5, \"9\": 3, \"10\": 0,\n \"11\": 0, \"12\": 0, \"13\": 0, \"14\": 0, \"15\": 0},\n \"mtf1\": 14, \"mtf3\": 18, \"mtf5\": 22, \"mtf7\": 31}}\n\n\n{\"tag\": \"scs-be2-3\",\n \"rec\": \"2017-10-11T18:16:15.615+00:00\",\n \"val\": {\n \"per\": null, \"pm1\": null, \"pm2p5\": null, \"pm10\": null,\n \"bins\": {\"0\": -1, \"1\": -1, \"2\": -1, \"3\": -1, \"4\": -1, \"5\": -1, \"6\": -1, \"7\": -1, \"8\": -1, \"9\": -1, \"10\": -1,\n \"11\": -1, \"12\": -1, \"13\": -1, \"14\": -1, \"15\": -1},\n \"mtf1\": 255, \"mtf3\": 255, \"mtf5\": 255, \"mtf7\": 255}}\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom scs_core.sample.sample import Sample\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass ParticulatesSample(Sample):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, tag, rec, sample):\n \"\"\"\n Constructor\n \"\"\"\n bins = OrderedDict([(i, sample.bins[i]) for i in range(len(sample.bins))])\n\n super().__init__(tag, rec, ('per', sample.period),\n ('pm1', sample.pm1), ('pm2p5', sample.pm2p5), ('pm10', sample.pm10), ('bins', bins),\n ('mtf1', sample.bin_1_mtof), ('mtf3', sample.bin_3_mtof), ('mtf5', sample.bin_5_mtof),\n ('mtf7', sample.bin_7_mtof))\n" }, { "alpha_fraction": 0.3908371031284332, "alphanum_fraction": 0.393947958946228, "avg_line_length": 25.19259262084961, "blob_id": "cda25ddc213dff9c38c95d3e2010984d092451b2", "content_id": "51136a463d68b37097969d44c6bbe63f4bf2bd25", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3536, "license_type": "permissive", "max_line_length": 118, "num_lines": 135, "path": "/src/scs_core/csv/csv_dict.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 21 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom collections import OrderedDict\n\n\n# TODO: deal with numeric index dictionaries\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass CSVDict(object):\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def as_dict(cls, header, row):\n dictionary = OrderedDict()\n\n for i in range(len(header)):\n cls.__as_dict(header[i].strip().split(\".\"), row[i], dictionary)\n\n return dictionary\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def __as_dict(cls, nodes, cell, dictionary):\n key = nodes[0]\n\n # scalar...\n if len(nodes) == 1:\n dictionary[key] = cell\n return\n\n # list...\n if cls.__is_list_path(nodes):\n if key not in dictionary:\n dictionary[key] = []\n\n dictionary[key].append(cell)\n return\n\n # object...\n if key not in dictionary:\n dictionary[key] = OrderedDict()\n\n cls.__as_dict(nodes[1:], cell, dictionary[key])\n\n\n @classmethod\n def __is_list_path(cls, nodes):\n if len(nodes) != 2:\n return False\n\n try:\n leaf_node = float(nodes[1])\n return leaf_node.is_integer()\n except ValueError:\n return False\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, dictionary):\n \"\"\"\n Constructor\n \"\"\"\n self.__dictionary = dictionary\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def dictionary(self):\n return self.__dictionary\n\n\n @property\n def header(self):\n return self.__header(self.__dictionary)\n\n\n @property\n def row(self):\n return self.__row(self.__dictionary)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __header(self, dictionary, prefix=None):\n dot_prefix = prefix + '.' if prefix else ''\n\n header = []\n for key in dictionary:\n # object...\n if isinstance(dictionary[key], dict):\n header.extend(self.__header(dictionary[key], dot_prefix + key))\n\n # list...\n elif isinstance(dictionary[key], list):\n header.extend([dot_prefix + key + '.' + str(i) for i in range(len(dictionary[key]))])\n\n # scalar...\n else:\n header.append(dot_prefix + key)\n\n return header\n\n\n def __row(self, dictionary):\n row = []\n for key in dictionary:\n # object...\n if isinstance(dictionary[key], dict):\n row.extend(self.__row(dictionary[key]))\n\n # list...\n elif isinstance(dictionary[key], list):\n row.extend(dictionary[key])\n\n # scalar...\n else:\n row.append(dictionary[key])\n\n return row\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"CSVDict:{dictionary:%s}\" % self.dictionary\n" }, { "alpha_fraction": 0.4217686951160431, "alphanum_fraction": 0.44671201705932617, "avg_line_length": 37.34782791137695, "blob_id": "8e815a08b400e12b9124c88c8a55d59d7ff53bf3", "content_id": "d77f283717f8192ade0292a62270cf6557c1797a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2646, "license_type": "permissive", "max_line_length": 118, "num_lines": 69, "path": "/src/scs_core/gas/a4.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom scs_core.gas.a4_datum import A4Datum\nfrom scs_core.gas.sensor import Sensor\nfrom scs_core.gas.a4_temp_comp import A4TempComp\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass A4(Sensor):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def init(cls):\n cls.SENSORS[cls.CODE_CO] = A4(cls.CODE_CO, 'CO', 3)\n cls.SENSORS[cls.CODE_H2S] = A4(cls.CODE_H2S, 'H2S', 3)\n cls.SENSORS[cls.CODE_NO] = A4(cls.CODE_NO, 'NO', 3)\n cls.SENSORS[cls.CODE_NO2] = A4(cls.CODE_NO2, 'NO2', 3)\n cls.SENSORS[cls.CODE_OX] = A4(cls.CODE_OX, 'Ox', 3)\n cls.SENSORS[cls.CODE_SO2] = A4(cls.CODE_SO2, 'SO2', 3)\n\n cls.SENSORS[cls.CODE_TEST_1] = A4(cls.CODE_TEST_1, 'SN1', 3)\n cls.SENSORS[cls.CODE_TEST_2] = A4(cls.CODE_TEST_2, 'SN2', 3)\n cls.SENSORS[cls.CODE_TEST_3] = A4(cls.CODE_TEST_3, 'SN3', 3)\n cls.SENSORS[cls.CODE_TEST_4] = A4(cls.CODE_TEST_4, 'SN4', 3)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, sensor_code, gas_name, adc_gain_index):\n \"\"\"\n Constructor\n \"\"\"\n Sensor.__init__(self, sensor_code, gas_name, adc_gain_index)\n\n self.__tc = A4TempComp.find(sensor_code)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def sample(self, afe, temp, sensor_index, no2_sample=None):\n we_v, ae_v = afe.sample_raw_wrk_aux(sensor_index, self.adc_gain_index)\n\n if self.has_no2_cross_sensitivity():\n if no2_sample is None:\n raise ValueError(\"A4.sample: no2_sample required, but none given.\")\n\n return A4Datum.construct(self.calib, self.baseline, self.__tc, temp, we_v, ae_v, no2_sample.cnc)\n\n return A4Datum.construct(self.calib, self.baseline, self.__tc, temp, we_v, ae_v)\n\n\n def null_datum(self):\n return A4Datum(None, None)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"A4:{sensor_code:%s, gas_name:%s, adc_gain_index:0x%04x, calib:%s, baseline:%s, tc:%s}\" % \\\n (self.sensor_code, self.gas_name, self.adc_gain_index, self.calib, self.baseline, self.__tc)\n" }, { "alpha_fraction": 0.3231036365032196, "alphanum_fraction": 0.3265778720378876, "avg_line_length": 25.16666603088379, "blob_id": "99188142a739e52489cdfb7f3abd607f731f1d2b", "content_id": "f8a118041bd89ead2869df895d930217b711eb23", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1727, "license_type": "permissive", "max_line_length": 118, "num_lines": 66, "path": "/src/scs_core/sample/sample.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 22 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass Sample(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, tag, rec, *values):\n \"\"\"\n Constructor\n \"\"\"\n self.__tag = tag # string\n self.__rec = rec # LocalizedDatetime\n self.__val = OrderedDict(values) # OrderedDict of (src, JSONable)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n if self.tag is not None:\n jdict['tag'] = self.tag\n\n jdict['rec'] = self.rec.as_json()\n jdict['val'] = self.val\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def tag(self):\n return self.__tag\n\n\n @property\n def rec(self):\n return self.__rec\n\n\n @property\n def val(self):\n return self.__val\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n vals = '[' + ', '.join(str(key) + ': ' + str(self.val[key]) for key in self.val) + ']'\n\n return self.__class__.__name__ + \":{tag:%s, rec:%s, val:%s}\" % (self.tag, self.rec, vals)\n" }, { "alpha_fraction": 0.3347131907939911, "alphanum_fraction": 0.33885276317596436, "avg_line_length": 25, "blob_id": "1ef0d6d37e7ed239220bc4bec676068621199b5e", "content_id": "55501a378a1beb59f75abf8e9d7fae8163807f2c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1691, "license_type": "permissive", "max_line_length": 118, "num_lines": 65, "path": "/src/scs_core/gas/sensor_calib.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 30 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom abc import abstractmethod\n\nfrom scs_core.data.json import JSONable\n\nfrom scs_core.gas.sensor import Sensor\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass SensorCalib(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def reports_no2_cross_sensitivity(cls): # the default - override as necessary\n return False\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, serial_number, sensor_type):\n \"\"\"\n Constructor\n \"\"\"\n self.__serial_number = serial_number # int\n self.__sensor_type = sensor_type # string\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def sensor(self, baseline):\n sensor = Sensor.find(self.__serial_number)\n\n sensor.calib = self\n sensor.baseline = baseline\n\n return sensor\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @abstractmethod\n def as_json(self):\n pass\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def serial_number(self):\n return self.__serial_number\n\n\n @property\n def sensor_type(self):\n return self.__sensor_type\n\n" }, { "alpha_fraction": 0.304824560880661, "alphanum_fraction": 0.4159356653690338, "avg_line_length": 27.5, "blob_id": "051a422e5367ca30a3812f5f0892cf5467071121", "content_id": "9aacd0402cec4c154e7d3e2b6b898a484e986909", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1368, "license_type": "permissive", "max_line_length": 118, "num_lines": 48, "path": "/src/scs_core/sample/gases_sample.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 20 Oct 2016\n\n@author: Bruno Beloff ([email protected])\n\nexample document:\n{\"tag\": \"scs-be2-2\",\n \"rec\": \"2017-09-24T07:51:21.510+00:00\",\n \"val\": {\n \"NO2\": {\"weV\": 0.312317, \"aeV\": 0.31038, \"weC\": -0.001, \"cnc\": 14.8},\n \"CO\": {\"weV\": 0.325005, \"aeV\": 0.254254, \"weC\": 0.077239, \"cnc\": 323.2},\n \"SO2\": {\"weV\": 0.277942, \"aeV\": 0.267754, \"weC\": 0.004136, \"cnc\": 27.6},\n \"H2S\": {\"weV\": 0.221816, \"aeV\": 0.269817, \"weC\": -0.006301, \"cnc\": 29.6},\n \"pt1\": {\"v\": 0.321411, \"tmp\": 21.9},\n \"sht\": {\"hmd\": 73.0, \"tmp\": 21.4}}}\n\"\"\"\n\nfrom scs_core.sample.sample import Sample\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GasesSample(Sample):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, tag, rec, ndir_datum, afe_datum, sht_datum):\n \"\"\"\n Constructor\n \"\"\"\n val = []\n\n if ndir_datum:\n val.append(('CO2', ndir_datum))\n\n if afe_datum:\n val.extend([(key, afe_datum.sns[key]) for key in afe_datum.sns])\n\n if afe_datum.pt1000:\n val.append(('pt1', afe_datum.pt1000))\n\n if sht_datum:\n val.append(('sht', sht_datum))\n\n super().__init__(tag, rec, *val)\n" }, { "alpha_fraction": 0.37936702370643616, "alphanum_fraction": 0.3908754587173462, "avg_line_length": 23.82653045654297, "blob_id": "485c45376d6b4eb2bf7f734c8ba855d4de69d048", "content_id": "f0a03d353b6b433901b929aac9df663685234303", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2433, "license_type": "permissive", "max_line_length": 119, "num_lines": 98, "path": "/src/scs_core/position/gps_location.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 10 Jan 2017\n\n@author: Bruno Beloff ([email protected])\n\nexample use:\n./socket_receiver.py | ./csv_writer.py status.csv -e | ./histo_chart.py val.loc.lat -v -e -o lat.csv -x 50.8228 50.8232\n./socket_receiver.py | ./csv_writer.py status.csv -e | ./histo_chart.py val.loc.lng -v -e -o lng.csv -x -0.1233 -0.1227\n\"\"\"\n\nfrom collections import OrderedDict\n\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass GPSLocation(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_jdict(cls, jdict):\n if not jdict:\n return None\n\n lat = jdict.get('lat')\n lng = jdict.get('lng')\n quality = jdict.get('qual')\n\n return GPSLocation(lat, lng, quality)\n\n\n @classmethod\n def construct(cls, gga):\n if gga is None:\n return None\n\n quality = gga.quality\n\n loc = gga.loc\n\n if loc is None:\n return GPSLocation(None, None, quality)\n\n lat = loc.deg_lat()\n lng = loc.deg_lng()\n\n return GPSLocation(lat, lng, quality)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, lat, lng, quality):\n \"\"\"\n Constructor\n \"\"\"\n self.__lat = lat\n self.__lng = lng\n self.__quality = quality\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n jdict = OrderedDict()\n\n jdict['lat'] = self.lat\n jdict['lng'] = self.lng\n jdict['qual'] = self.quality\n\n return jdict\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def lat(self):\n return self.__lat\n\n\n @property\n def lng(self):\n return self.__lng\n\n\n @property\n def quality(self):\n return self.__quality\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"GPSLocation:{lat:%s, lng:%s, quality:%s}\" % (self.lat, self.lng, self.quality)\n" }, { "alpha_fraction": 0.39004814624786377, "alphanum_fraction": 0.3946342468261719, "avg_line_length": 24.208091735839844, "blob_id": "84ac9c02b0dbb8929ad0f6f14259c0c83f729bba", "content_id": "fe5e244818b33b3a2d4bf297364495931ea321b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4361, "license_type": "permissive", "max_line_length": 119, "num_lines": 173, "path": "/src/scs_core/data/path_dict.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 27 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport json\n\nfrom collections import OrderedDict\nfrom copy import deepcopy\n\nfrom scs_core.data.json import JSONable\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass PathDict(JSONable):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def construct_from_jstr(cls, jstr):\n try:\n jdict = json.loads(jstr, object_pairs_hook=OrderedDict)\n except ValueError:\n return None\n\n return PathDict(jdict)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @classmethod\n def __is_list_item(cls, item, nodes):\n if not isinstance(item, list):\n return False\n\n return cls.__is_list_path(nodes)\n\n\n @classmethod\n def __is_list_path(cls, nodes):\n if len(nodes) != 2:\n return False\n\n try:\n leaf_node = float(nodes[1])\n return leaf_node.is_integer()\n except ValueError:\n return False\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, dictionary=None):\n \"\"\"\n Constructor\n \"\"\"\n self.__dictionary = dictionary if dictionary else OrderedDict()\n\n\n # -----------------------------------------------------------------------------------------------------------------\n # source...\n\n def has_path(self, path):\n if path is None:\n return True\n\n try:\n return self.__has_path(self.__dictionary, path.split(\".\"))\n\n except TypeError:\n return False\n\n\n def node(self, path=None):\n if path is None:\n return self.__dictionary\n\n return self.__node(self.__dictionary, path.split(\".\"))\n\n\n # ----------------------------------------------------------------------------------------------------------------\n # target...\n\n def copy(self, other, path=None):\n if path is None:\n self.__dictionary = deepcopy(other.__dictionary)\n return\n\n self.__append(self.__dictionary, path.split(\".\"), other.node(path))\n\n\n def append(self, path, value):\n self.__append(self.__dictionary, path.split(\".\"), value)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __has_path(self, dictionary, nodes):\n key = nodes[0]\n\n if key not in dictionary:\n return False\n\n # scalar...\n if len(nodes) == 1:\n return True\n\n item = dictionary[key]\n\n # list...\n if PathDict.__is_list_item(item, nodes):\n index = int(nodes[1])\n return 0 <= index < len(item)\n\n # object...\n return self.__has_path(item, nodes[1:])\n\n\n def __node(self, dictionary, nodes):\n key = nodes[0]\n\n item = dictionary[key]\n\n # scalar...\n if len(nodes) == 1:\n return item\n\n # list...\n if PathDict.__is_list_item(item, nodes):\n index = int(nodes[1])\n return item[index]\n\n # object...\n return self.__node(item, nodes[1:])\n\n\n def __append(self, dictionary, nodes, value):\n key = nodes[0]\n\n # scalar...\n if len(nodes) == 1:\n dictionary[key] = deepcopy(value)\n\n # list...\n elif PathDict.__is_list_path(nodes):\n if key not in dictionary:\n dictionary[key] = []\n\n dictionary[key].append(value)\n\n # object...\n else:\n if key not in dictionary:\n dictionary[key] = OrderedDict()\n\n self.__append(dictionary[key], nodes[1:], value)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def as_json(self):\n return self.__dictionary\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"PathDict:{dictionary:%s}\" % (self.node())\n" }, { "alpha_fraction": 0.3562772572040558, "alphanum_fraction": 0.3587009310722351, "avg_line_length": 23.85542106628418, "blob_id": "e533ac22de112d03329232ee3f0a48e14686cf02", "content_id": "24ad6d74ca6d7326d9f8539582c4b237f8b2c03f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2063, "license_type": "permissive", "max_line_length": 118, "num_lines": 83, "path": "/src/scs_core/csv/csv_reader.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 4 Aug 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport _csv\nimport sys\n\nfrom scs_core.csv.csv_dict import CSVDict\nfrom scs_core.data.json import JSONify\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass CSVReader(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @staticmethod\n def __recast(value):\n try:\n return int(value)\n except ValueError:\n pass\n\n try:\n return float(value)\n except ValueError:\n pass\n\n return value\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, filename=None):\n \"\"\"\n Constructor\n \"\"\"\n self.__filename = filename\n self.__file = sys.stdin if self.__filename is None else open(self.__filename, \"r\")\n\n self.__reader = _csv.reader(self.__file)\n self.__header = next(self.__reader)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def close(self):\n if self.__filename is None:\n return\n\n self.__file.close()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def rows(self):\n for row in self.__reader:\n datum = CSVDict.as_dict(self.__header, [CSVReader.__recast(cell) for cell in row])\n\n yield JSONify.dumps(datum)\n\n\n @property\n def filename(self):\n return self.__filename\n\n\n @property\n def header(self):\n return self.__header\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"CSVReader:{filename:%s, header:%s}\" % (self.filename, self.header)\n" }, { "alpha_fraction": 0.42914438247680664, "alphanum_fraction": 0.5223930478096008, "avg_line_length": 20.517986297607422, "blob_id": "6442f4510f021425fa59e2084d9cebbde3008ada", "content_id": "71088a40ca278ec01300e1a4b4cff275924e4a70", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2992, "license_type": "permissive", "max_line_length": 118, "num_lines": 139, "path": "/tests/position/nmea_test.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n\"\"\"\nCreated on 31 Dec 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nfrom scs_core.position.gpgga import GPGGA\nfrom scs_core.position.gpgll import GPGLL\nfrom scs_core.position.gpgsa import GPGSA\nfrom scs_core.position.gpgsv import GPGSV\nfrom scs_core.position.gprmc import GPRMC\nfrom scs_core.position.gpvtg import GPVTG\nfrom scs_core.position.nmea_sentence import NMEASentence\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPGGA,103228.00,5049.37823,N,00007.37872,W,2,07,1.85,34.0,M,45.4,M,,0000*75\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGGA.construct(s)\nprint(message)\nprint(\"-\")\n\nlat = message.loc.deg_lat()\nlng = message.loc.deg_lng()\nprint(\"%f, %f\" % (lat, lng))\nprint(\"-\")\n\nprint(message.time.as_iso8601())\nprint(\"=\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPGLL,5049.37823,N,00007.37872,W,103228.00,A,D*7F\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGLL.construct(s)\nprint(message)\nprint(\"-\")\n\nlat = message.loc.deg_lat()\nlng = message.loc.deg_lng()\nprint(\"%f, %f\" % (lat, lng))\nprint(\"-\")\n\nprint(message.time.as_iso8601())\nprint(\"=\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPGSV,3,1,10,23,38,230,44,29,71,156,47,07,29,116,41,08,09,081,36*7F\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGSV.construct(s)\nprint(message)\nprint(\"-\")\n\n\nline = \"$GPGSV,3,2,10,10,07,189,,05,05,220,,09,34,274,42,18,25,309,44*72\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGSV.construct(s)\nprint(message)\nprint(\"-\")\n\n\nline = \"$GPGSV,3,3,10,26,82,187,47,28,43,056,46*77\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGSV.construct(s)\nprint(message)\nprint(\"=\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPGSA,A,3,23,29,07,08,09,18,26,28,,,,,1.94,1.18,1.54*0D\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPGSA.construct(s)\nprint(message)\nprint(\"=\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPRMC,083559.00,A,4717.11437,N,00833.91522,E,0.004,77.52,091202,,,A*57\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPRMC.construct(s)\nprint(message)\nprint(\"-\")\n\nlat = message.loc.deg_lat()\nlng = message.loc.deg_lng()\nprint(\"%f, %f\" % (lat, lng))\nprint(\"-\")\n\nprint(message.datetime.as_iso8601())\nprint(\"=\")\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nline = \"$GPVTG,77.52,T,,M,0.004,N,0.008,K,A*06\"\nprint(line)\n\ns = NMEASentence.construct(line)\nprint(s)\n\nmessage = GPVTG.construct(s)\nprint(message)\nprint(\"=\")\n\n" }, { "alpha_fraction": 0.43145328760147095, "alphanum_fraction": 0.43351173400878906, "avg_line_length": 26.29213523864746, "blob_id": "af5cd2b33c90787365eb4b3d1ae3f86bd0947182", "content_id": "f7db9a606c3ceca4b8071985eceab404eda9dd06", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2429, "license_type": "permissive", "max_line_length": 118, "num_lines": 89, "path": "/src/scs_core/csv/csv_writer.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 2 Aug 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport _csv\nimport json\nimport os\nimport sys\n\nfrom collections import OrderedDict\n\nfrom scs_core.csv.csv_dict import CSVDict\nfrom scs_core.csv.csv_logger import CSVLogger\n\n\n# TODO: batch mode, where all rows are scanned for header fields and the data not released until input is complete\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass CSVWriter(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __init__(self, filename=None, cache=False, append=False):\n \"\"\"\n Constructor\n \"\"\"\n self.__filename = filename\n self.__cache = cache\n\n self.__has_header = False\n\n if self.__filename is None:\n self.__append = False\n\n self.__file = sys.stdout\n self.__writer = _csv.writer(self.__file)\n else:\n self.__append = append and os.path.exists(self.__filename)\n\n self.__file = open(self.__filename, \"a\" if self.__append else \"w\")\n self.__writer = CSVLogger(self.__file) if cache else _csv.writer(self.__file)\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def write(self, jstr):\n if not jstr:\n return\n\n jdict = json.loads(jstr, object_pairs_hook=OrderedDict)\n datum = CSVDict(jdict)\n\n if not self.__has_header and not self.__append:\n self.__writer.writerow(datum.header)\n self.__has_header = True\n\n self.__writer.writerow(datum.row)\n\n if not self.__cache:\n self.__file.flush()\n\n\n def close(self):\n if self.filename is None:\n return\n\n if self.__cache:\n self.__writer.flush()\n\n self.__file.close()\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @property\n def filename(self):\n return self.__filename\n\n\n # ----------------------------------------------------------------------------------------------------------------\n\n def __str__(self, *args, **kwargs):\n return \"CSVWriter:{filename:%s, cache:%s, append:%s}\" % (self.filename, self.__cache, self.__append)\n" }, { "alpha_fraction": 0.5324324369430542, "alphanum_fraction": 0.5986486673355103, "avg_line_length": 20.14285659790039, "blob_id": "5e063a2534f063b626db4d9a6bdf808357db8d2f", "content_id": "af051136513a60d2cf8cc35708a45856b615bc03", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 740, "license_type": "permissive", "max_line_length": 118, "num_lines": 35, "path": "/tests/position/gps_location_test.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\n\"\"\"\nCreated on 10 Jan 2017\n\n@author: Bruno Beloff ([email protected])\n\ncommand line example:\n./test/exception_report_test.py 2>> ex.log\n\"\"\"\n\nfrom scs_core.data.json import JSONify\n\nfrom scs_core.position.gpgga import GPGGA\nfrom scs_core.position.gps_location import GPSLocation\nfrom scs_core.position.nmea_sentence import NMEASentence\n\n\n# --------------------------------------------------------------------------------------------------------------------\n# run...\n\ns = NMEASentence.construct(\"$GPGGA,092725.00,4717.11399,N,00833.91590,E,1,08,1.01,499.6,M,48.0,M,,*5B\")\nprint(s)\nprint(\"-\")\n\ngga = GPGGA.construct(s)\nprint(gga)\nprint(\"-\")\n\nloc = GPSLocation.construct(gga)\nprint(loc)\nprint(\"-\")\n\nprint(JSONify.dumps(loc))\nprint(\"-\")\n" }, { "alpha_fraction": 0.4890633821487427, "alphanum_fraction": 0.49495232105255127, "avg_line_length": 21.287500381469727, "blob_id": "e91b0804e6857ed5204fde01bf70019fdedd9ec0", "content_id": "f88cbc0ffa58a288b9d7f09cfc39a19861975f1f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3566, "license_type": "permissive", "max_line_length": 118, "num_lines": 160, "path": "/src/scs_core/data/datum.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 24 Sep 2016\n\n@author: Bruno Beloff ([email protected])\n\"\"\"\n\nimport math\nimport struct\n\nfrom datetime import date\n\nfrom scs_core.data.localized_datetime import LocalizedDatetime\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass Datum(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n # encode byte array...\n\n @staticmethod\n def encode_int(value):\n unpacked = struct.unpack('BB', struct.pack('h', int(value)))\n\n return unpacked\n\n\n @staticmethod\n def encode_unsigned_int(value):\n unpacked = struct.unpack('BB', struct.pack('H', int(value)))\n\n return unpacked\n\n\n @staticmethod\n def encode_unsigned_long(value):\n unpacked = struct.unpack('BBBB', struct.pack('L', int(value)))\n\n return unpacked\n\n\n @staticmethod\n def encode_float(value):\n unpacked = struct.unpack('BBBB', struct.pack('f', float(value)))\n\n return unpacked\n\n\n # ----------------------------------------------------------------------------------------------------------------\n # decode byte array...\n\n @staticmethod\n def decode_int(byte_values):\n packed = struct.unpack('h', struct.pack('BB', *byte_values))\n return packed[0]\n\n\n @staticmethod\n def decode_unsigned_int(byte_values):\n packed = struct.unpack('H', struct.pack('BB', *byte_values))\n return packed[0]\n\n\n @staticmethod\n def decode_unsigned_long(byte_values):\n packed = struct.unpack('L', struct.pack('BBBB', *byte_values))\n return packed[0]\n\n\n @staticmethod\n def decode_float(byte_values):\n packed = struct.unpack('f', struct.pack('BBBB', *byte_values))\n\n return None if math.isnan(packed[0]) else packed[0]\n\n\n @staticmethod\n def decode_double(byte_values):\n packed = struct.unpack('d', struct.pack('BBBBBBBB', *byte_values))\n\n return None if math.isnan(packed[0]) else packed[0]\n\n\n # ----------------------------------------------------------------------------------------------------------------\n # cast or None...\n\n @staticmethod\n def bool(field):\n if field is None:\n return None\n\n try:\n value = bool(field)\n except ValueError:\n return None\n\n return value\n\n\n @staticmethod\n def float(number, ndigits):\n if number is None:\n return None\n\n try:\n value = float(number)\n except ValueError:\n return None\n\n return round(value, ndigits)\n\n\n @staticmethod\n def int(number):\n if number is None:\n return None\n\n try:\n value = float(number)\n except ValueError:\n return None\n\n return int(value)\n\n\n @staticmethod\n def date(iso_date):\n if iso_date is None:\n return None\n\n parts = iso_date.split(\"-\")\n\n if len(parts) != 3:\n return None\n\n try:\n year = int(float(parts[0]))\n month = int(float(parts[1]))\n day = int(float(parts[2]))\n except ValueError:\n return None\n\n return date(year, month, day)\n\n\n @staticmethod\n def datetime(iso_datetime):\n if iso_datetime is None:\n return None\n\n try:\n value = LocalizedDatetime.construct_from_iso8601(iso_datetime)\n except ValueError:\n return None\n\n return value\n" }, { "alpha_fraction": 0.3520140051841736, "alphanum_fraction": 0.360770583152771, "avg_line_length": 19.39285659790039, "blob_id": "8d74e38aaac8fb1ce9da1f624e7d9ee2f0fe9cdc", "content_id": "d36b90964bfdbe7bfb2d3c0a313cdcac5d5e79e0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 571, "license_type": "permissive", "max_line_length": 118, "num_lines": 28, "path": "/src/scs_core/sync/runner.py", "repo_name": "motius/scs_core", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on 1 Jul 2017\n\n@author: Bruno Beloff ([email protected])\n\nA device able to control a sampling process, by whatever method.\n\"\"\"\n\nfrom abc import abstractmethod\n\n\n# --------------------------------------------------------------------------------------------------------------------\n\nclass Runner(object):\n \"\"\"\n classdocs\n \"\"\"\n\n # ----------------------------------------------------------------------------------------------------------------\n\n @abstractmethod\n def reset(self):\n pass\n\n\n @abstractmethod\n def samples(self, sampler):\n pass\n" } ]
31
astahlman/journal-app
https://github.com/astahlman/journal-app
3b74b5082f2088772ef461530c8a779f55afa3eb
9f6ee5ad4e4f228697ca5e62c1c4c0410a1dfb0b
65f790bdd22938c561c404f963d2ff2f8ace0829
refs/heads/master
2021-07-03T21:20:39.869099
2012-09-16T23:38:53
2012-09-16T23:38:53
5,374,064
0
0
null
2012-08-10T20:19:11
2014-06-10T01:43:14
2021-06-10T17:43:40
JavaScript
[ { "alpha_fraction": 0.6892856955528259, "alphanum_fraction": 0.6892856955528259, "avg_line_length": 34, "blob_id": "a721d5144728fdda138291f46bb41a3d650b2770", "content_id": "007801fac8369b2b74996a4457f26ba5ed5a4b68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 280, "license_type": "no_license", "max_line_length": 60, "num_lines": 8, "path": "/journal/journal_app/custom_json.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "from django.utils import simplejson\nimport datetime\nclass JSONEncoder(simplejson.JSONEncoder):\n def default(self, obj):\n if isinstance(obj, datetime.datetime):\n return obj.isoformat()\n else:\n return simplejson.JSONEncoder.default(self, obj)\n" }, { "alpha_fraction": 0.5784675478935242, "alphanum_fraction": 0.6153678894042969, "avg_line_length": 38.376068115234375, "blob_id": "cd2621afc9d7597742f1dcca8a0a4ea57367fbcc", "content_id": "cb6301e13ec43e21e4825c6b15c16859b8ada04d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4607, "license_type": "no_license", "max_line_length": 83, "num_lines": 117, "path": "/assets/js/testing/NodeSearcherTests.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "requirejs.config({\n //By default load any module IDs from js/lib\n baseUrl: '../static/js',\n //except, if the module ID starts with \"app\",\n //load it from the js/app directory. paths\n //config is relative to the baseUrl, and\n //never includes a \".js\" extension since\n //the paths config could be for a directory.\n paths: {\n app: './app',\n\t\tlib: './lib',\n }\n});\n\nrequirejs(['app/Models', 'app/UtilityFunctions', 'app/NodeSearcher'],\nfunction (Models, UtilityFunctions, NodeSearcher) {\n\n\ntest (\"NodeSearcher ParsePattern Tests\", function () {\n\tvar p = '#A | TestContent | [,8/27/2012]';\n\tvar r = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 1, \"Parsed correct number of tags\");\n\tok (r.tags[0] === 'A', \"Parsed tag ok\");\n\tok (r.keywords.length === 1, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === \"TestContent\", \"Parsed correct keyword\");\n\tok (r.before === \"8/27/2012\" && !r.after, \"Parsed date range ok.\");\n\t\t\n\tp = 'Some content | #B | [8/27/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 1, \"Parsed correct number of tags\");\n\tok (r.tags[0] === 'B', \"Parsed tag ok\");\n\tok (r.keywords.length === 2, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === \"Some\", \"Parsed correct keyword\");\n\tok (r.keywords[1] === \"content\", \"Parsed correct keyword\");\n\tok (r.after === \"8/27/2012\" && !r.before, \"Parsed date range ok.\");\n\t\n\tp = 'Some content | #B #C | [8/27/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 2, \"Parsed correct number of tags\");\n\tok (r.tags[0] === 'B', \"Parsed tag ok\");\n\tok (r.tags[1] === 'C', \"Parsed tag ok\");\n\tok (r.keywords.length === 2, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === \"Some\", \"Parsed correct keyword\");\n\tok (r.keywords[1] === \"content\", \"Parsed correct keyword\");\n\tok (r.after === \"8/27/2012\" && !r.before, \"Parsed date range ok.\");\n\t\n\tp = 'More content | [8/27/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 0, \"Parsed correct number of tags\");\n\tok (r.keywords.length === 2, \"Parsed tag ok\");\n\tok (r.keywords[0] === \"More\", \"Parsed correct keyword\");\n\tok (r.keywords[1] === \"content\", \"Parsed correct keyword\");\n\tok (r.after === \"8/27/2012\" && !r.before, \"Parsed date range ok.\");\n\t\t\n\tp = '#C | [8/27/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 1, \"Parsed correct number of tags\");\n\tok (r.tags[0] === 'C', \"Parsed tag ok\");\n\tok (r.keywords.length === 0, \"Parsed correct number of keywords\");\n\tok (r.after === \"8/27/2012\" && !r.before, \"Parsed date range ok.\");\n\t\n\tp = '[8/27/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 0, \"Parsed correct number of tags\");\n\tok (r.keywords.length === 0, \"Parsed correct number of keywords\");\n\tok (r.after === \"8/27/2012\" && !r.before, \"Parsed date range ok.\");\n\t\t\n\tp = '\\\\[some keywords\\\\] | [8/27/2012]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 0, \"Parsed correct number of tags\");\n\tok (r.keywords.length === 2, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === '\\\\[some', \"Parsed keyword ok\");\n\tok (r.keywords[1] === 'keywords\\\\]', \"Parsed keyword ok\");\n\tok (r.after === '8/27/2012' && r.before === '8/27/2012', \"Parsed date range ok.\");\n\n\tp = 'some search \\\\|\\\\| | #A';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 1, \"Parsed correct number of tags\");\n\tok (r.tags[0] === 'A', \"Parsed tag ok\");\n\tok (r.keywords.length === 3, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === 'some', \"Parsed keyword ok\");\n\tok (r.keywords[1] === 'search', \"Parsed keyword ok\");\n\tok (r.keywords[2] === '\\\\|\\\\|', \"Parsed keyword ok\");\n\tok (!r.after && !r.before, \"Parsed date range ok.\");\n\n\tp = '\\\\#SomeSearch';\n\tr = NodeSearcher.parsePattern(p);\n\tok (r.tags.length === 0, \"Parsed correct number of tags\");\n\tok (r.keywords.length === 1, \"Parsed correct number of keywords\");\n\tok (r.keywords[0] === '\\\\#SomeSearch', \"Parsed keyword ok\");\n\tok (!r.after && !r.before, \"Parsed date range ok.\");\n\n\tp = '|';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n\n\tp = ' | ';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n\n\tp = 'someKey | anotherKey';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n\t\n\tp = '#someTag | #anotherTag';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n\n\tp = '[,8/27/2012] | [8/25/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n\t\n\tp = '[,8/27/2012] [8/25/2012,]';\n\tr = NodeSearcher.parsePattern(p);\n\tok (!r, \"Invalid pattern returned null.\")\n});\n});\n" }, { "alpha_fraction": 0.6882007718086243, "alphanum_fraction": 0.6908702850341797, "avg_line_length": 49.621620178222656, "blob_id": "9a23dacb8c0b07bbeae33b59d4532fc059bff48a", "content_id": "422308d2af54365effb72500d583346ef3b31c6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1873, "license_type": "no_license", "max_line_length": 102, "num_lines": 37, "path": "/journal/urls.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "from django.conf.urls import patterns, include, url\nfrom django.conf import settings\n\n# Uncomment the next two lines to enable the admin:\n# from django.contrib import admin\n# admin.autodiscover()\nhandler404 = 'journal.journal_app.views.not_found'\n\nurlpatterns = patterns('',\n # Examples:\n # url(r'^$', 'journal_site.views.home', name='home'),\n # url(r'^journal_site/', include('journal_site.foo.urls')),\n\n # Uncomment the admin/doc line below to enable admin documentation:\n # url(r'^admin/doc/', include('django.contrib.admindocs.urls')),\n\n # Uncomment the next line to enable the admin:\n # url(r'^admin/', include(admin.site.urls)),\n\turl(r'^$', 'journal.journal_app.views.splash'),\n\turl(r'^about/', 'journal.journal_app.views.about'),\n\turl(r'^save_entry/', 'journal.journal_app.views.save_entry'),\n\turl(r'^save_snippet/', 'journal.journal_app.views.save_snippet'),\n\turl(r'^get_snippets/', 'journal.journal_app.views.get_snippets'),\n\turl(r'^write_entry/', 'journal.journal_app.views.write_entry'),\n\turl(r'^read_entry/', 'journal.journal_app.views.read_entry'),\n\turl(r'^get_entries/', 'journal.journal_app.views.get_entries'),\n\turl(r'^get_node/', 'journal.journal_app.views.get_node'),\n\turl(r'^search_nodes/', 'journal.journal_app.views.search_nodes'),\n\turl(r'^toggle_public/', 'journal.journal_app.views.toggle_public'),\n\turl(r'^public/(?P<id_string>[A-Za-z0-9]+)/$', 'journal.journal_app.views.get_public_node'),\n\turl(r'^accounts/login/$', 'django.contrib.auth.views.login'),\n\turl(r'^accounts/logout/$', 'django.contrib.auth.views.logout', {'next_page' : '/'}),\n\turl(r'^accounts/register/$', 'journal.journal_app.views.register'),\n\turl(r'^comments/$', 'journal.journal_app.views.comments'),\n\turl(r'^tests/$', 'journal.journal_app.views.tests'),\n\turl(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root' : settings.STATIC_ROOT}),\n)\n" }, { "alpha_fraction": 0.48678213357925415, "alphanum_fraction": 0.4940747618675232, "avg_line_length": 23.377777099609375, "blob_id": "d2416e0259a77235488914911e55b02440555317", "content_id": "33d7aaf0e2481f9d2a90f13d67dac40ed14104c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1097, "license_type": "no_license", "max_line_length": 86, "num_lines": 45, "path": "/templates/registration/register.html", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "{% extends \"base.html\" %}\n\n{% block title %}Register{% endblock %}\n\n{% block content %}\n\t<div class=\"container\">\n\t{% if errors %}\n\t\t<div class=\"row\">\t\n\t\t\t<div class=\"span8 offset2\">\n\t\t\t\t{% if errors %}\n\t\t\t\t\t<div class='alert alert-error'>\n\t\t\t\t\t\t<h2>Oops. Looks like something went wrong...</h2>\n\t\t\t\t\t\t<ul>\n\t\t\t\t\t\t{% for err in errors %}\n\t\t\t\t\t\t\t<li>{{ err }}</li>\t\n\t\t\t\t\t\t{% endfor %}\n\t\t\t\t\t\t</ul>\n\t\t\t\t\t</div>\n\t\t\t\t{% endif %}\n\t\t\t</div>\n\t\t</div>\n\t{% endif %}\n\n\t\t<div class=\"row\">\n\t\t\t<div class=\"span8 offset2\">\n\t\t\t\t<form class=\"well\" method=\"post\" action=\"{% url journal_app.views.register %}\">\n\t\t\t\t\t{% csrf_token %}\n\t\t\t\t\t<!-- Custom form display -->\n\t\t\t\t\t{% for field in form %}\n\t\t\t\t\t\t<div class=\"fieldWrapper\">\n\t\t\t\t\t\t\t<p><label for=\"id_{{ field.html_name }}\"><h3>{{ field.label }}</h3></label></p>\n\t\t\t\t\t\t\t</p>{{ field }}</p>\n\t\t\t\t\t\t\t<p><small>{{ field.help_text }}</small></p>\n\t\t\t\t\t\t\t{% if field.errors %}\n\t\t\t\t\t\t\t\t<p>{{ field.errors }}</p>\n\t\t\t\t\t\t\t{% endif %}\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{% endfor %}\n\t\t\t\t\t<input type=\"submit\" class=\"btn\" value='Sign in'/>\n\t\t\t\t</form>\n\t\t\t</div>\n\t\t</div>\n\n\t</div>\n{% endblock %}\n" }, { "alpha_fraction": 0.6235551238059998, "alphanum_fraction": 0.6254295706748962, "avg_line_length": 23.25, "blob_id": "5fe745a0638b1276c90b4bcdf46de8a003675e87", "content_id": "4206252c508fbe593e107003fb096b0fca09c158", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3201, "license_type": "no_license", "max_line_length": 95, "num_lines": 132, "path": "/assets/js/app/Models.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\tvar UtilityFunctions = require('./UtilityFunctions');\n\t// start is inclusive, end is exclusive\n\tfunction Position (lineIndexIn, offsetIn) {\n\t\tthis.lineIndex = lineIndexIn;\n\t\tthis.offset = offsetIn;\n\t}\n\t\n\tfunction Range (startIn, endIn) {\n\t\tthis.start = startIn;\n\t\tthis.end = endIn;\n\t}\n\t\n\tRange.prototype.setStart = function (startIn) {\n\t\tif (startIn) {\n\t\t\tthis.start = { \n\t\t\t\tlineIndex : startIn.lineIndex, \n\t\t\t\toffset : startIn.offset, \n\t\t\t};\n\t\t}\n\t}\n\n\tRange.prototype.setEnd = function (endIn) {\n\t\tif (endIn) {\n\t\t\tthis.end = {\n\t\t\t\tlineIndex : endIn.lineIndex,\n\t\t\t\toffset : endIn.offset,\n\t\t\t};\n\t\t}\n\t}\n\n\tRange.prototype.containsPosition = function (pos) {\n\t\tif (this.end) {\n\t\t\tif (this.start.lineIndex < pos.lineIndex && this.end.lineIndex > pos.lineIndex) {\n\t\t\t\treturn true;\n\t\t\t} else if (this.start.lineIndex === pos.lineIndex || this.end.lineIndex === pos.lineIndex) {\n\t\t\t\treturn (this.start.offset <= pos.offset && this.end.offset >= pos.offset);\n\t\t\t} else {\n\t\t\t\treturn false;\n\t\t\t}\t\n\t\t} else { \n\t\t\tif (this.start.lineIndex < pos.lineIndex) {\n\t\t\t\treturn true;\n\t\t\t} else if (this.start.lineIndex === pos.lineIndex) {\n\t\t\t\treturn this.start.offset <= pos.offset;\n\t\t\t}\n\t\t}\n\t}\n\n\tRange.prototype.isValidRange = function () {\n\t\tif (this.start.offset < 0 || this.end.offset < 0) { \n\t\t\treturn false;\n\t\t}\n\t\tif (this.start.offset > this.end.offset && this.start.lineIndex === this.end.lineIndex) {\n\t\t\treturn false;\n\t\t}\n\t\tif (this.start.lineIndex > this.end.lineIndex) {\n\t\t\treturn false;\n\t\t}\n\t\tif (this.start.lineIndex < 0 || this.end.lineIndex < 0) {\n\t\t\treturn false;\n\t\t}\n\t\treturn true;\n\t}\n\n\tfunction Node (token, levelIn, rangeIn) {\n\t\tvar types = { \n\t\t\t\"tag\" : [\"tagOpen\", \"tagClose\"], \n\t\t\t\"ignore\" : [\"ignoreOpen\", \"ignoreClose\"],\n\t\t\t\"define\" : [\"defineOpen\", \"defineClose\"],\n\t\t\t\"content\" : [\"content\"],\n\t\t};\n\t\tthis.range = rangeIn || new Range();\n\t\tthis.nodeContent = \"\";\n\t\tthis.children = [];\n\t\t// Note: != catches null and undefined because it tries to \n\t\t// typecast levelIn for comparison, resulting in undefined\n\t\tthis.level = (levelIn != undefined) ? levelIn : -1;\n\t\tvar typeMatch;\n\t\tif (token) {\n\t\t\tfor (key in types) {\n\t\t\t\ttypes[key].forEach(function (t) {\n\t\t\t\t\tif (t === token.type) {\n\t\t\t\t\t\ttypeMatch = key;\t\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t}\n\t\t\tthis.nodeType = typeMatch;\n\t\t\tthis.nodeVal = token.val;\n\t\t}\n\t}\n\t\n\tNode.prototype.addChild = function (node) {\n\t\tthis.children.push(node);\n\t\tnode.parentNode = this;\n\t}\n\n\tNode.prototype.isNodeEqual = function (n) {\n\t\tif (!n) {\n\t\t\treturn false;\n\t\t}\n\t\treturn n.nodeVal === this.nodeVal && n.nodeType === this.nodeType;\n\t}\n\n\tNode.prototype.isTreeEqual = function (root) {\n\t\tvar count = this.children.length;\n\t\tif (count !== root.children.length) {\n\t\t\treturn false;\n\t\t} else if (!root) {\n\t\t\treturn false;\n\t\t} else if (!this.isNodeEqual(root)) {\n\t\t\treturn false;\n\t\t}\n\t\tfor (var i = 0; i < count; i++) {\n\t\t\tif (!this.children[i].isTreeEqual(root.children[i])) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t\treturn true;\n\t}\n\n\tNode.prototype.traverse = function (func) {\n\t\tfunc(this);\n\t\tthis.children.forEach(function (child) {\n\t\t\tchild.traverse(func);\n\t\t});\t\n\t}\n\n\texports.Node = Node;\n\texports.Position = Position;\n\texports.Range = Range; \n});\n" }, { "alpha_fraction": 0.7598627805709839, "alphanum_fraction": 0.7598627805709839, "avg_line_length": 71.75, "blob_id": "f661eb7f3e4dd3ac3ab84b3cea0beb2293c5ca0d", "content_id": "f4110921dac05dee318d5b6bb8e91f7ce4389331", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 583, "license_type": "no_license", "max_line_length": 271, "num_lines": 8, "path": "/Readme.md", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "# DearQwerty #\n\n## About ##\n___________\n\n<a href=\"http://dearqwerty.herokuapp.com/\">DearQwerty</a> is a hacker's journal web app. Users write entries using a simple, custom markup language which gets parsed in real-time. Entries are logically subdivided into tagged nodes to enable better searching and viewing. \n\nDearQwerty is now <a href=\"http://dearqwerty.herokuapp.com/\">live on Heroku.</a> The project represents my endeavor to learn Javascript. The backend is implemented (very messily) in Django. If anyone is interested in contributing, pull requests are more than welcome.\n\n" }, { "alpha_fraction": 0.6888781189918518, "alphanum_fraction": 0.692633867263794, "avg_line_length": 32.53658676147461, "blob_id": "4b1b2282f4fcd9da4dda3200875aa5dec4c5e1ee", "content_id": "f6c477978f2d79d36702fd07c00665b0d1e16b09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8254, "license_type": "no_license", "max_line_length": 161, "num_lines": 246, "path": "/journal/journal_app/models.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.db.models.signals import post_save\nfrom django.db.models import Q\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.utils.timezone import now\nfrom id_encoder import IdEncoder\nimport datetime\nimport logging\n\nclass NodeManager(models.Manager):\n\t\n\tdef get_public_node(self, s):\n\t\ttry:\n\t\t\treturn Node.objects.get(publicID=s) #TODO: Node.objects == self?\n\t\texcept ObjectDoesNotExist:\n\t\t\treturn None\n\nclass Node(models.Model):\n\tparent = models.ForeignKey('self', null=True, related_name='children')\n\tnodeVal = models.TextField()\n\tnodeType = models.CharField(max_length=75)\n\tnodeContent = models.TextField()\n\tlevel = models.IntegerField()\n\tindex = models.IntegerField()\n\tcontainingEntry = models.ForeignKey('Entry', related_name='nodes')\n\tpublicID = models.CharField(max_length=75, null=True)\n\n\tobjects = NodeManager()\n\n\tdef __unicode__(self):\n\t\treturn '{0} @ Lvl{1}: {2}'.format(self.nodeType, self.level, self.nodeVal)\n\n\tdef get_dict(self):\n\t\t\"\"\"Returns a dictionary for serialization.\"\"\"\n\t\td = {}\n\t\td['index'] = self.index\n\t\td['nodeVal'] = self.nodeVal\n\t\td['nodeType'] = self.nodeType\n\t\td['nodeContent'] = self.nodeContent\n\t\td['level'] = self.level\n\t\td['containingEntryNum'] = self.containingEntry.entryNum\n\t\td['publicID'] = self.publicID\n\t\td['children'] = []\n\t\tfor c in self.children.all().order_by('index'):\n\t\t\td['children'].append(c.get_dict())\n\t\treturn d\n\n\t@classmethod\n\tdef build_subtree(cls, index, nodeVal, nodeType, nodeContent, level, children, entry):\n\t\troot = cls.objects.create(index=index, nodeVal=nodeVal, nodeType=nodeType, nodeContent=nodeContent, level=level, containingEntry=entry)\n\t\ti = 0\n\t\tfor c in children:\n\t\t\troot.children.add(cls.build_subtree(i, c['nodeVal'], c['nodeType'], c['nodeContent'], c['level'], c['children'], entry))\n\t\t\ti += 1\n\t\treturn root\n\t\n\tdef make_public(self):\n\t\tself.publicID = IdEncoder.int_to_string(self.id)\n\t\tself.save()\n\n\tdef make_private(self):\n\t\tself.publicID = None \n\t\tself.save()\n\nclass Snippet(models.Model):\n\tname = models.CharField(max_length=50)\n\tcontent = models.TextField()\n\tauthor = models.ForeignKey(User, related_name=\"snippets\")\n\n\tdef get_dict(self):\n\t\td = {}\n\t\td['name'] = self.name\n\t\td['content'] = self.content\n\t\treturn d\n\nclass UserProfile(models.Model):\n\t\"\"\" Contains User data and provides methods to get entries for a User \"\"\"\n\tuser = models.OneToOneField(User)\n\tnumEntries = models.IntegerField()\n\tdateJoined = models.DateField(auto_now_add=True)\n\n\tdef get_snippets(self, names):\n\t\tsnips = Snippet.objects.filter(author=self.user)\n\t\tif names is not None and len(names) > 0:\n\t\t\tq = Q()\n\t\t\tfor n in names:\n\t\t\t\tq = q | Q(name=n)\n\t\t\tsnips = snips.filter(q)\n\t\treturn snips\n\n\tdef get_entries(self):\n\t\treturn Entry.objects.filter(author=self.user)\n\n\tdef get_last_entry(self):\n\t\tentries = self.get_entries()\n\t\tif len(entries) > 0:\n\t\t\tlogging.debug(\"Returning latest entry.\")\n\t\t\te = entries.latest('creationDate')\n\t\t\tlogging.debug(\"CreationDate=\" + e.creationDate.isoformat())\n\t\t\treturn e\n \t\treturn None\n\n\tdef get_entry_by_num(self, num):\n\t\tentries = self.get_entries()\n\t\tlogging.debug(\"User entries count: %d\" % entries.count())\n\t\tif len(entries) > 0:\n\t\t\ttry:\n\t\t\t\treturn entries.get(entryNum=num)\n\t\t\texcept ObjectDoesNotExist:\n\t\t\t\treturn None\n\t\treturn None\n\n\t# TODO: return all\n\tdef get_entry_by_date(self, datetimeIn):\n\t\tentries = self.get_entries()\n\t\tif len(entries) > 0:\n\t\t\tentries = entries.filter(creationDate__startswith=datetimeIn)\n\t\t\tif len(entries) >= 1:\n\t\t\t\treturn entries[0]\n\t\treturn None\n\n\tdef get_node(self, node_id):\n\t\treturn Node.objects.filter(author=self.user).filter(id=node_id)\n\n\tdef search_nodes(self, params):\n\t\tlogging.debug(\"Inside search nodes\")\n\t\tlogging.debug(\"Here are the params: \" + params.__str__())\n\t\tlogging.debug(\"Here are the params keys: \" + params.keys().__str__())\n\t\tnodes = Node.objects.filter(containingEntry__author=self.user)\n\t\tlogging.debug(\"All nodes for user: %d\" % len(nodes))\n\t\ttry:\n\t\t\tbefore = datetime.datetime.strptime(params['before'], '%m/%d/%Y')\n\t\t\tbefore = datetime.datetime.combine(before, datetime.time.max)\n\t\texcept Exception as e:\n\t\t\tlogging.debug(\"Error parsing before: %s - %s\" % (type(e), e))\n\t\t\tbefore = now() # time-zone aware\n\t\ttry:\n\t\t\tafter = datetime.datetime.strptime(params['after'], '%m/%d/%Y')\n\t\t\tafter = datetime.datetime.combine(after, datetime.time.min)\n\t\texcept Exception as e:\n\t\t\tlogging.debug(\"Error parsing before: %s - %s\" % (type(e), e))\n\t\t\tafter = self.dateJoined\n\t\tlogging.debug(\"Date range before: %s\" % before.strftime(\"%m-%d-%Y %H:%M\"))\n\t\tlogging.debug(\"Date range after: %s\" % after.strftime(\"%m-%d-%Y %H:%M\"))\n\t\tnodes = nodes.filter(containingEntry__creationDate__range=(after,before))\n\t\tlogging.debug(\"All nodes in date range: %d\" % len(nodes))\n\t\tif 'keywords' in params:\n\t\t\tq = Q()\n\t\t\tfor k in params['keywords']:\n\t\t\t\tq = q | Q(nodeContents_icontains=k)\n\t\t\tnodes = nodes.filter(q)\n\t\t\tlogging.debug(\"All nodes for keywords: %d\" % len(nodes))\n\t\tif 'tags' in params:\n\t\t\tq = Q()\n\t\t\tfor t in params['tags']:\n\t\t\t\tq = q | Q(nodeVal=t);\n\t\t\tnodes = nodes.filter(q)\n\t\t\tlogging.debug(\"All nodes for tags: %d\" % len(nodes))\n\t\tif 'entryNum' in params:\n\t\t\tnodes = nodes.filter(containingEntry_entry_num=params['entryNum'])\n\t\t\tlogging.debug(\"All nodes for entryNum: %d\" % len(nodes))\n\n\t\tresults = []\n\t\tfor n in nodes:\n\t\t\tpreview = n.nodeContent\n\t\t\tif (len(preview) > 25):\n\t\t\t\tpreview = preview[0:25] + '...'\n\t\t\tr = { 'nodeID' : n.id, 'nodePreview' : preview , 'date' : n.containingEntry.creationDate.strftime('%m/%d/%Y %H:%M'), 'entryNum' : n.containingEntry.entryNum }\n\t\t\tresults.append(r)\n\t\treturn results\n\n\tdef __unicode__(self):\n\t\treturn \"%s, joined on %s: %d entries\" % (str(self.user), self.dateJoined.isoformat(), self.numEntries)\n\n# register callback to create UserProfile on creation of User\ndef create_user_profile(sender, instance, created, **kwargs):\n\tif created:\n\t\tUserProfile.objects.create(user=instance, numEntries=0)\n\npost_save.connect(create_user_profile, sender=User)\n\nclass EntryManager(models.Manager):\n\t\"\"\"Custom Manager for Entry model.\"\"\"\n\tdef create_entry(self, rawText, author, rootData):\n\t\tlogging.debug(\"In create_entry. rootData = \" + str(rootData))\n\t\tlastEntry = author.get_profile().get_last_entry()\n\t\tnewEntryNum = lastEntry.entryNum + 1 if lastEntry is not None else 0\n\t\te = self.create(rawText=rawText, author=author, entryNum=newEntryNum, creationDate=now(), lastEditDate=now())\n\t\te.save()\n\t\te.treeRoot = Node.build_subtree(0, rootData['nodeVal'], rootData['nodeType'], rootData['nodeContent'], rootData['level'], rootData['children'], e) \n\t\te.save()\n\t\tauthor.get_profile().numEntries = newEntryNum + 1 # 0-indexed\n\t\tauthor.get_profile().save()\n\t\treturn e\n\n\tdef update_entry(self, entryNum, rawText, author, rootData):\n\t\ttry:\n\t\t\te = self.filter(author=author).get(entryNum=entryNum)\n\t\texcept ObjectDoesNotExist:\n\t\t\te = None\n\t\tif e is not None:\n\t\t\te.rawText = rawText\n\t\t\toldRoot = e.treeRoot\n\t\t\tlogging.debug(\"About to update the entry.\")\n\t\t\te.treeRoot = Node.build_subtree(0, rootData['nodeVal'], rootData['nodeType'], rootData['nodeContent'], rootData['level'], rootData['children'], e) \n\t\t\te.lastEditDate = now()\n\t\t\te.save()\n\t\t\t# make sure to delete AFTER resetting the entry rootNode so delete doesn't cascade\n\t\t\toldRoot.delete() \n\t\t\treturn e\n\n\tdef toggle_public(self, author, entryNum):\n\t\ttry:\n\t\t\te = self.filter(author=author).get(entryNum=entryNum)\n\t\texcept ObjectDoesNotExist:\n\t\t\treturn False\n\t\tn = e.treeRoot\n\t\tif n.publicID != None:\n\t\t\tn.make_private()\n\t\telse:\n\t\t\tn.make_public()\n\t\treturn n.publicID\n\nclass Entry(models.Model):\n\trawText = models.TextField()\n\tauthor = models.ForeignKey(User)\n\tentryNum = models.IntegerField()\n\tcreationDate = models.DateTimeField()\n\tlastEditDate = models.DateTimeField()\n\ttreeRoot = models.ForeignKey(Node, null=True)\n\t\n\tobjects = EntryManager() # install custom Manager\n\n\tdef get_dict(self):\n\t\t\"\"\"Returns a dictionary for serialization.\"\"\"\n\t\td = {}\n\t\td['rawText'] = self.rawText\n\t\td['creationDate'] = self.creationDate.isoformat()\n\t\td['entryNum'] = self.entryNum\n\t\td['treeRoot'] = self.treeRoot.get_dict()\n\t\treturn d\n\t\t\n\tdef __unicode__(self):\n\t\tl = min(25, len(self.rawText))\n\t\treturn self.creationDate.__str__() + \": \" + self.rawText[:l]\n\t\t\n\n" }, { "alpha_fraction": 0.7108257412910461, "alphanum_fraction": 0.7144795656204224, "avg_line_length": 38.912498474121094, "blob_id": "111a1ff77ed635a478de8c335688fbdad7859458", "content_id": "35e2f49c6e8f79270aee0561e8c3abee2b0140a2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9579, "license_type": "no_license", "max_line_length": 142, "num_lines": 240, "path": "/journal/journal_app/views.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "# Create your views here.\nfrom django.utils import simplejson\nfrom journal.journal_app.models import Node, Entry, Snippet\nfrom django.contrib.auth.models import User\nfrom django.contrib.auth import authenticate, login\nfrom django.contrib.auth.forms import AuthenticationForm\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.shortcuts import render_to_response\nfrom django.template import RequestContext\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.validators import validate_email\nfrom django.core.exceptions import ValidationError\nfrom journal.journal_app.custom_forms import RegisterForm, CommentsForm\nfrom dateutil.parser import parse as iso_date_parse\nfrom journal.journal_app.custom_json import JSONEncoder as customJSON\nfrom journal.journal_app.id_encoder import IdEncoder\nfrom django.conf import settings\nfrom django.http import Http404\nfrom django.core.mail import send_mail\nfrom journal.journal_app.email_helper import send_email\nimport logging\n\n@login_required\ndef save_entry(request):\n\tlogging.debug(\"Received request for save_entry\")\n\tresponseData = {'status' : 'fail'}\n\tprofile = request.user.get_profile()\n\tif request.method == 'POST':\n\t\tdata = simplejson.loads(request.raw_post_data)\n\t\tlogging.debug(\"Just deserialized data: %s\", data.__str__())\n\t\tif data['rawText'].__len__() > 0 and data['root'] is not None:\n\t\t\tentryNum = data.get('entryNum', None)\n\t\t\tchosenEntry = profile.get_entry_by_num(entryNum)\n\t\t\tlastEntry = profile.get_last_entry()\n\t\t\tsavedEntry = None\n\t\t\tif chosenEntry is not None:\n\t\t\t\tsavedEntry = Entry.objects.update_entry(data['entryNum'], data['rawText'], request.user, data['root'])\n\t\t\t\tresponseData['status'] = 'success'\n\t\t\telif entryNum < 0 or lastEntry is None or entryNum == lastEntry.entryNum + 1:\n\t\t\t\tlogging.debug(\"About to create entry: rootData = %s\", data['root'].__str__())\n\t\t\t\tsavedEntry = Entry.objects.create_entry(data['rawText'], request.user, data['root'])\n\t\t\t\tresponseData['status'] = 'success'\n\n\tresponseData['entryNum'] = savedEntry.entryNum if savedEntry is not None else -1\n\treturn HttpResponse(simplejson.dumps(responseData), mimetype=\"application/json\")\n\n@login_required\ndef write_entry(request):\n\tlogging.debug(\"Received request for write_entry\")\n\treturn render_to_response('write.html', {\"can_save\" : True}, context_instance=RequestContext(request))\n\n@login_required\ndef read_entry(request):\n\tlogging.debug(\"Received request for read_entry\")\n\treturn render_to_response('read.html', context_instance=RequestContext(request))\n\n@login_required\ndef get_entries(request):\n\tif request.method == 'GET':\n\t\te = None\n\t\tprofile = request.user.get_profile()\n\n\t\t# No GET params defaults to most recent entry\n\t\tif request.GET.items().__len__() == 0:\n\t\t\tlogging.debug(\"Getting most recent entry.\")\n\t\t\te = profile.get_last_entry()\n\t\telse:\n\t\t\ttry:\n\t\t\t\tentryNum = int(request.GET.get('entryNum', -1))\n\t\t\texcept ValueError:\n\t\t\t\tentryNum = -1\n\t\t\tcreationDate = request.GET.get('creationDate', '')\n\t\t\t# entryNum takes precedence over date\n\t\t\tif entryNum >= 0:\n\t\t\t\te = profile.get_entry_by_num(entryNum)\n\t\t\t\tlogging.debug(\"Just got e: \" + e.get_dict().__str__())\n\t\t\telif len(creationDate) > 0:\n\t\t\t\ttry:\n\t\t\t\t\tcreationDate = iso_date_parse(creationDate)\n\t\t\t\texcept ValueError:\n\t\t\t\t\tcreationDate = None\n\t\t\t\tif creationDate:\n\t\t\t\t\te = profile.get_entry_by_date(creationDate)\n\n\t\t# TODO: Allow for gte, lte, range of entryNums, etc., serialize to list\n\t\tresponseData = {}\n\t\tif not e is None:\n\t\t\tresponseData = e.get_dict()\n\t\t\teNext = Entry.objects.filter(author=request.user).filter(entryNum__gt=e.entryNum).order_by('entryNum')\n\t\t\tePrev = Entry.objects.filter(author=request.user).filter(entryNum__lt=e.entryNum).order_by('-entryNum')\n\t\t\tresponseData['nextEntry'] = eNext[0].entryNum if len(eNext) > 0 else -1\n\t\t\tresponseData['prevEntry'] = ePrev[0].entryNum if len(ePrev) > 0 else -1\n\t\telse:\n\t\t\tresponseData['nextEntry'] = -1\n\t\t\tresponseData['prevEntry'] = -1\n\tlogging.debug(\"About to json dump this entry: \" + responseData.__str__())\n\treturn HttpResponse(simplejson.dumps(responseData), mimetype=\"application/json\")\n\n@login_required\ndef search_nodes(request):\n\tresults = []\n\tif request.method == 'POST':\n\t\tparams = simplejson.loads(request.raw_post_data)\n\t\tlogging.debug(\"Just received a GET with params=\" + params.__str__())\n\t\tif params and len(params) > 0:\n\t\t\tresults = request.user.get_profile().search_nodes(params)\n\n\tlogging.debug(\"About to json dump this entry: \" + results.__str__())\n\treturn HttpResponse(simplejson.dumps(results, cls=customJSON), mimetype=\"application/json\")\n\n@login_required\ndef toggle_public(request):\n\tresponse = {}\n\tif request.method == 'GET':\n\t\tentry_num = request.GET.get('entryNum', None)\n\t\tentry_num = int(entry_num) if entry_num != None else None\n\t\tresult = None\n\t\tif entry_num is not None:\n\t\t\tresult = Entry.objects.toggle_public(request.user, entry_num)\n\t\tif result == False:\n\t\t\tresponse = { 'error' : 'Cannot modify entry' }\n\t\telse:\n\t\t\tresponse = { 'entryNum' : entry_num, 'publicID' : result }\n\treturn HttpResponse(simplejson.dumps(response, cls=customJSON), mimetype=\"application/json\")\n\t\t\n\t\t\t\n@login_required\ndef get_node(request):\n\tif request.method == 'GET':\n\t\tnode_id = int(request.GET.get('nodeID', -1))\n\t\tif node_id > -1:\n\t\t\tn = Node.objects.filter(containingEntry__author=request.user).filter(id=node_id) \n\t\t\tif len(n) == 1:\n\t\t\t\treturn HttpResponse(simplejson.dumps(n[0].get_dict(), cls=customJSON), mimetype=\"application/json\")\n\treturn HttpResponse(\"No nodes matching query.\")\n\ndef get_public_node(request, id_string=''):\n\tdata = {}\n\tif request.method == 'GET' and len(id_string) >= IdEncoder.MIN_LENGTH:\n\t\tn = Node.objects.get_public_node(id_string)\n\t\tif n is not None:\n\t\t\tdata = simplejson.dumps(n.get_dict(), cls=customJSON)\n\t\t\treturn render_to_response('public.html', { 'json_node' : data }, context_instance=RequestContext(request))\n\t\telse:\n\t\t\traise Http404\n\ndef comments(request):\n\terrors = []\n\tsuccess = False\n\tif request.method == 'POST':\n\t\tform = CommentsForm(request.POST)\n\t\tif form.is_valid():\n\t\t\temail_body = ''\n\t\t\tif form.cleaned_data['name'] is not None:\n\t\t\t\temail_body += 'Name: ' + form.cleaned_data['name'] + '\\n'\n\t\t\tif form.cleaned_data['email'] is not None:\n\t\t\t\temail_body += 'Email: ' + form.cleaned_data['email'] + '\\n'\n\t\t\temail_body += form.cleaned_data['text']\n\t\t\tgmail = settings.WEBMASTER_EMAIL\n\t\t\tgmail_pass = settings.WEBMASTER_PASS\n\t\t\tif not send_email(email_body, from_user=gmail, to_user=gmail, email_pass=gmail_pass):\n\t\t\t\tlogging.debug(\"Failed to send email.\")\n\t\t\t\terrors.append('Sorry, send failed. Maybe you could let [email protected] know. I know he would appreciate it.')\n\t\t\telse:\n\t\t\t\tsuccess = True\n\telse:\n\t\tform = CommentsForm\n\n\treturn render_to_response('comments.html', {'form' : form, 'errors' : errors, 'success' : success}, context_instance=RequestContext(request))\n\ndef register(request):\n\terrors = []\n\tif request.method == 'POST':\n\t\tform = RegisterForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tusername = form.cleaned_data['username']\n\t\t\tpassword1 = form.cleaned_data['password1']\n\t\t\tpassword2 = form.cleaned_data['password2']\n\t\t\temail = form.cleaned_data.get('email')\n\t\t\tlogging.debug(\"username: \" + username)\n\t\t\tlogging.debug(\"Number of duplicates: %d\" % User.objects.filter(username=username).count())\n\t\t\tif User.objects.filter(username=username).count() == 0:\n\t\t\t\tif password1 == password2: \t\t\t\t\n\t\t\t\t\tUser.objects.create_user(username=username, password=password1, email=email)\n\t\t\t\t\tuser = authenticate(username=username, password=password1)\n\t\t\t\t\tlogin(request, user)\n\t\t\t\t\treturn HttpResponseRedirect('/write_entry/')\n\t\t\t\telse:\n\t\t\t\t\terrors.append('Passwords do not match')\n\t\t\telse:\n\t\t\t\terrors.append('Username is taken')\n\telse:\n\t\tform = RegisterForm()\n\n\treturn render_to_response('registration/register.html', {'form' : form, 'errors' : errors}, context_instance=RequestContext(request))\n\n@login_required\ndef get_snippets(request):\n\tif request.method == 'POST':\n\t\tnames = []\n\t\ttry:\n\t\t\tdata = simplejson.loads(request.raw_post_data)\n\t\t\tnames = data.get('names', [])\n\t\texcept:\n\t\t\tlogging.debug(\"Couldn't load any parameters from the request. Returning all snippets\")\n\t\tsnips = request.user.get_profile().get_snippets(names)\n\t\tresults = []\n\t\tfor s in snips:\n\t\t\tresults.append(s.get_dict())\n\t\treturn HttpResponse(simplejson.dumps(results), mimetype=\"application/json\")\n\treturn HttpResponse(\"No snippets matching query.\")\n\ndef save_snippet(request):\n\tif request.method == 'POST':\n\t\tdata = simplejson.loads(request.raw_post_data)\n\t\tif data['name'] is not None and data['content'] is not None:\n\t\t\t(s, created) = Snippet.objects.get_or_create(name=data['name'], defaults={'content' : data['content'], 'author' : request.user})\n\t\t\tif not created:\n\t\t\t\ts.content = data['content']\n\t\t\treturn HttpResponse(simplejson.dumps(s.get_dict()), mimetype=\"application/json\")\n\treturn HttpResponse(\"Invalid request.\");\n\ndef tests(request):\n\treturn render_to_response('tests.html', context_instance=RequestContext(request))\n\ndef splash(request):\n\tdata = {}\n\tdata['can_save'] = False;\n\tdata['form'] = AuthenticationForm\n\tf = open(settings.STATIC_ROOT + 'misc/splash_editor.txt', 'r')\n\tdata['default_text'] = f.read()\n\tif request.user.is_authenticated():\n\t\tdata['num_entries'] = Entry.objects.filter(author=request.user).count()\n\treturn render_to_response('splash.html', data, context_instance=RequestContext(request))\n\ndef about(request):\n\treturn render_to_response('about.html', context_instance=RequestContext(request))\n\ndef not_found(request):\n\treturn render_to_response('404.html', context_instance=RequestContext(request))\n" }, { "alpha_fraction": 0.7525773048400879, "alphanum_fraction": 0.7525773048400879, "avg_line_length": 23.25, "blob_id": "0502fb5605909d79cadefac5ce1d9cf6bd245a67", "content_id": "33d47fb3e76a03a98879e862f7ffbf2f01925671", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 97, "license_type": "no_license", "max_line_length": 39, "num_lines": 4, "path": "/journal/journal_app/context_processors.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "from django.conf import settings\n\ndef url_base(context):\n\treturn {'URL_BASE': settings.URL_BASE}\n" }, { "alpha_fraction": 0.5283018946647644, "alphanum_fraction": 0.704402506351471, "avg_line_length": 16.66666603088379, "blob_id": "89836095c365e6d7fb6317f286858f84857affaa", "content_id": "689c708442c27c133c5c39964582d0c29668946e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 159, "license_type": "no_license", "max_line_length": 22, "num_lines": 9, "path": "/requirements.txt", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "Django==1.4.1\ndistribute==0.6.24\ndj-database-url==0.2.1\ndjango-extensions==0.9\ngunicorn==0.14.6\npsycopg2==2.4.5\npython-dateutil==2.1\nsix==1.2.0\nwsgiref==0.1.2\n" }, { "alpha_fraction": 0.6324906349182129, "alphanum_fraction": 0.6329588294029236, "avg_line_length": 29.95652198791504, "blob_id": "b8decf1d5d3d8b3487cc169ef46afc9f393beed1", "content_id": "7a8a2dccd4b157fe2dbabfb99bca944f4c521958", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2136, "license_type": "no_license", "max_line_length": 125, "num_lines": 69, "path": "/assets/js/write.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "requirejs.config({\n //By default load any module IDs from js/lib\n baseUrl: '../static/js',\n //except, if the module ID starts with \"app\",\n //load it from the js/app directory. paths\n //config is relative to the baseUrl, and\n //never includes a \".js\" extension since\n //the paths config could be for a directory.\n paths: {\n app: './app',\n\t\tlib: './lib',\n }\n});\n\n// Start the main app logic.\nrequirejs(['app/CreateEntryManager', 'app/Models', 'app/PersistenceManager', 'app/UtilityFunctions'],\nfunction (CreateEntryManager, Models, PersistenceManager, UtilityFunctions) {\n\t//jQuery is loaded and can be used here now.\n\t$(document).ready(function() {\n\t\tvar NEW_ENTRY = -1;\n\t\tvar curEntryNum = UtilityFunctions.extractURLParams()['entryNum'] || NEW_ENTRY;\n\t\tvar $editorView = $('#editor_view');\n\t\tvar $errTable = $(\"#errTable\");\n\t\tvar $saveButton = $(\"#saveButton\");\n\t\tvar initData = { \n\t\t\t$editorView : $editorView, \n\t\t\t$errorTable : $errTable,\n\t\t}\n\n\t\tCreateEntryManager.init(initData);\n\t\tvar editor = CreateEntryManager.getEditor();\n\t\tif (curEntryNum !== NEW_ENTRY) {\n\t\t\tvar params = { entryNum : curEntryNum };\n\t\t\tPersistenceManager.requestEntry(params, function(response) {\n\t\t\t\tif (!response['rawText']) {\n\t\t\t\t\talert(\"Error: Couldn't retrieve entry \" + curEntryNum + \". You will be redirected so that you can create a new entry.\");\n\t\t\t\t\tvar reg = /\\??entryNum=[^?=]*/g;\n\t\t\t\t\tvar redirect = document.URL.replace(reg, '');\n\t\t\t\t\twindow.location = redirect;\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\teditor.model.setContent(response['rawText']);\n\t\t\t\teditor.syncView();\n\t\t\t});\n\t\t}\n\t\t\n\n\t\t$saveButton.click(function (e) {\n\t\t\teditor.syncModel();\n\t\t\tvar lines = editor.model.getLines();\n\t\t\tvar entry = PersistenceManager.saveEntry(lines, \n\t\t\t\tcurEntryNum, \n\t\t\t\tfunction(response) {\n\t\t\t\t\tvar msg = '';\n\t\t\t\t\tcurEntryNum = response['entryNum'];\n\t\t\t\t\tif (response['status'] == 'success') {\n\t\t\t\t\t\tmsg = 'Successfully saved entry ' + response['entryNum'];\n\t\t\t\t\t} else {\n\t\t\t\t\t\tmsg = 'Save failed.';\n\t\t\t\t\t}\n\t\t\t\t\talert(msg);\n\t\t\t\t\twindow.location = '/write_entry/?entryNum=' + response['entryNum'];\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t);\n\t\t});\n\n\t});\n});\n" }, { "alpha_fraction": 0.6333803534507751, "alphanum_fraction": 0.6474847197532654, "avg_line_length": 31.925697326660156, "blob_id": "8042da456545ef9a38e285f6101dc8e48d69859e", "content_id": "eb77b02cd59f4955dd2adb04d38aee6b48521c5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 10635, "license_type": "no_license", "max_line_length": 127, "num_lines": 323, "path": "/assets/js/testing/EditorTests.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "requirejs.config({\n //By default load any module IDs from js/lib\n baseUrl: '../static/js',\n //except, if the module ID starts with \"app\",\n //load it from the js/app directory. paths\n //config is relative to the baseUrl, and\n //never includes a \".js\" extension since\n //the paths config could be for a directory.\n paths: {\n app: './app',\n\t\tlib: './lib',\n }\n});\n\nfunction areArraysEqual(arr1, arr2) {\n\tif (arr1.length !== arr2.length) {\n\t\treturn false;\n\t} else {\n\t\tfor (var i = 0; i < arr1.length; i++) {\n\t\t\tif (arr1[i] !== arr2[i]) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t}\n\treturn true;\n}\n\nrequirejs(['app/Models', 'app/Editor', 'app/EditorView'],\nfunction (Models, Editor, EditorView) {\n\ttest (\"Editor Model Tests\", function () {\n\t\tvar e = new Editor.Model();\n\t\t// Editor methods\n\t\t// insertLine(lineIndex, text)\n\t\t// appendLine(text)\n\t\t// deleteLine(lineIndex)\n\t\t// getLines()\n\t\t// setContent()\n\t\t// insertText(text, position)\n\t\t// deleteText(range)\n\t\t// clear()\n\t\tfunction setTestContent() {\n\t\t\tvar lines = [];\n\t\t\tlines.push(\"This is a content node before any tags.\");\n\t\t\tlines.push(\"<*A*>\");\n\t\t\tlines.push(\"\\t<*B*>\");\n\t\t\tlines.push(\"\\t\\tHere is content in B\");\n\t\t\tlines.push(\"\\t\\t##ignore##\");\n\t\t\tlines.push(\"\\t\\t\\t<*/B*>\");\n\t\t\tlines.push(\"\\t\\t##endignore##\");\n\t\t\tlines.push(\"\\t<*/B*>\");\n\t\t\tlines.push(\"\\tMore content at level A\");\n\t\t\tlines.push(\"<*/A*>\");\n\t\t\tlines.push(\"Final content\");\n\t\t\tvar content = lines.join('\\n');\n\n\t\t\te.setContent(content);\n\t\t\tvar c1 = e.getLines();\n\n\t\t\te.clear();\n\t\t\tok (e.getLines().length === 0, \"0 lines after clear\");\n\n\t\t\tfor (var i = 0; i < lines.length; i++) {\n\t\t\t\te.appendLine(lines[i]);\n\t\t\t}\n\t\t\tvar c2 = e.getLines();\n\n\t\t\tok (areArraysEqual(c1, c2), \"Set content === append lines\");\n\t\t}\n\t\t\n\n\t\t// INSERT\n\t\tsetTestContent(); // initialize content for insertText testing\n\t\t\n\t\t// insertText case 1\n\t\t// test insert at beginning of first line\n\t\tp = new Models.Position(0, 0);\n\t\tok(e.insertText(\"Editor Test: \", p), \"insertText 1 returned true\");\n\t\tdeepEqual(e.getLines()[0], \"Editor Test: This is a content node before any tags.\", \"insertText case 1.\");\n\n\t\t// insertText case 2\n\t\t// test insert in middle of line\n\t\tp = new Models.Position(0, 23);\n\t\tok(e.insertText(\"test \", p), \"insertText 2 returned true\"); \n\t\tdeepEqual(e.getLines()[0], \"Editor Test: This is a test content node before any tags.\", \"insertText case 2.\");\n\t\n\t\t// insertText case 3\n\t\t// test insert at end of line\n\t\tvar l = e.getLines()[0].length;\n\t\tp = new Models.Position(0, l);\n\t\tok(e.insertText(\"..\", p), \"insertText 3 returned true\");\n\t\tdeepEqual(e.getLines()[0], \"Editor Test: This is a test content node before any tags...\", \"insertText case 3.\");\n\t\t\n\t\t// insertText case 4\n\t\t// test multi-line insert\n\t\tvar l = e.getLines()[0].length;\n\t\tp = new Models.Position(0, l);\n\t\tok(e.insertText(\" Inserting a\\nnewline\", p), \"insertText 4 returned true\");\n\t\tdeepEqual(e.getLines()[0], \"Editor Test: This is a test content node before any tags... Inserting a\", \"insertText case 4 .\");\n\t\tdeepEqual(e.getLines()[1], \"newline\", \"insertText case 4.\");\n\n\t\t// insertText case 5\n\t\t// test insert at end of last line\n\t\tvar l = e.getLines();\n\t\tvar last = l.length - 1;\n\t\tp = new Models.Position(last, l[last].length);\n\t\tok(e.insertText(\" here.\", p), \"insertText 5 returned false\");\n\t\tl = e.getLines();\n\t\tlast = l.length - 1;\n\t\tdeepEqual(l[last], \"Final content here.\", \" insertText case 5.\");\n\n\t\t// save content for comparison, text shouldn't change anymore\n\t\tvar saved = e.getLines();\n\n\t\t// insertText case 6\n\t\t// test insert after last line\n\t\t// This should return false, as it is invalid\n\t\tl = e.getLines();\n\t\tp = new Models.Position(l.length, 0);\n\t\tok(e.insertText(\"Doesn't matter\", p) == false, \"insertText case 6.\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// insertText case 7\n\t\t// test insert before first line\n\t\t// This should return false, as it is invalid\n\t\tl = e.getLines();\n\t\tp = new Models.Position(-1, 0);\n\t\tok(e.insertText(\"Doesn't matter\", p) == false, \"insertText case 7.\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// insertText case 8\n\t\t// test insert before beginning of line \n\t\t// This should return false, as it is invalid\n\t\tl = e.getLines();\n\t\tp = new Models.Position(0, -1);\n\t\tok(e.insertText(\"Doesn't matter\", p) == false, \"insertText case 8.\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// DELETE \n\t\tsetTestContent(); // reset content for deleteText testing\n\n\t\t// deleteText case 1\n\t\t// test delete at beginning of first line\n\t\tvar start, end;\n\t\tstart = new Models.Position(0, 0);\n\t\tend = new Models.Position(0, 5);\n\t\tvar r = new Models.Range(start, end);\n\t\tok(e.deleteText(r), \"deleteText returned true\");\n\t\tdeepEqual(e.getLines()[0], \"is a content node before any tags.\", \"deleteText case 1 failed.\");\n\n\t\t// deleteText case 2\n\t\t// test delete in middle of line\n\t\tstart = new Models.Position(0, 5);\n\t\tend = new Models.Position(0, 13);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r), \"deleteText returned true\");\n\t\tdeepEqual(e.getLines()[0], \"is a node before any tags.\", \"deleteText case 2 failed.\");\n\n\t\t// deleteText case 3\n\t\t// test delete at end of line\n\t\tl = e.getLines()[0];\n\t\tstart = new Models.Position(0, l.length - 1);\n\t\tend = new Models.Position(0, l.length);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r), \"deleteText returned true\");\n\t\tdeepEqual(e.getLines()[0], \"is a node before any tags\", \"deleteText case 3 failed.\");\n\n\t\t// save content for comparison, text shouldn't change anymore\n\t\tsaved = e.getLines();\n\n\t\t// deleteText case 4\n\t\t// range starts before first line\n\t\tstart = new Models.Position(-1, 0);\n\t\tend = new Models.Position(0, 1);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r) == false, \"invalid delete failed\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// deleteText case 5\n\t\t// range ends after last line\n\t\tl = e.getLines();\n\t\tstart = new Models.Position(0, 0);\n\t\tend = new Models.Position(l.length, 0);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r) == false, \"invalid delete failed\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// deleteText case 6\n\t\t// range starts after end of line\n\t\tl = e.getLines();\n\t\tstart = new Models.Position(0, l[0].length + 1);\n\t\tend = new Models.Position(0, l[0].length + 1);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r) == false, \"invalid delete failed\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\t\t// deleteText case 7\n\t\t// range ends after end of line\n\t\tl = e.getLines();\n\t\tstart = new Models.Position(0, 1);\n\t\tend = new Models.Position(0, l[0].length + 1);\n\t\tr = new Models.Range(start, end);\n\t\tok(e.deleteText(r) == false, \"invalid delete failed\");\n\t\tok(areArraysEqual(e.getLines(), saved), \"Invalid insert modified content.\");\n\n\n\t\t// INSERT LINE\n\t\tsetTestContent(); // reset content for deleteText testing\n\t\t\n\t\t// insertLine case 1\n\t\t// insert before first line\n\t\tvar oldFirst = e.getLines()[0];\n\t\tvar s = \"Added with insertLine.\";\n\t\te.insertLine(0, s);\n\t\tdeepEqual(e.getLines()[0], s);\n\t\tdeepEqual(e.getLines()[1], oldFirst);\n\t\t\n\t\t// insertLine case 2\n\t\t// insert after first line and before last line\n\t\tvar oldThird = e.getLines()[2];\n\t\ts = \"Another added with insertLine.\";\n\t\te.insertLine(2, s);\n\t\tdeepEqual(e.getLines()[2], s);\n\t\tdeepEqual(e.getLines()[3], oldThird);\n\t\n\t\t// insertLine case 3\n\t\t// insert after first line\n\t\tvar lines = e.getLines();\n\t\tvar oldLast = lines[lines.length - 1];\n\t\ts = \"Final added with insertLine.\";\n\t\te.insertLine(lines.length, s);\n\t\tlines = e.getLines();\n\t\tdeepEqual(lines[lines.length - 1], s);\n\t\tdeepEqual(lines[lines.length - 2], oldLast);\n\t\t\n\t\t// These should fail\n\t\tvar saved = e.getLines();\n\n\t\t// insertLine case 4\n\t\t// insert at invalid index: -1\n\t\ts = \"Doesn't matter - should fail.\";\n\t\tok (e.insertLine(-1, s) === false, \"Invalid insert at -1 fails.\");\n\n\t\t// insertLine case 5\n\t\t// insert at invalid index: length + 1\n\t\tlines = e.getLines();\n\t\tok (e.insertLine(lines.length + 1, s) === false, \"Invalid insert at at length + 1 fails.\");\n\t\n\t\tok(areArraysEqual(saved, e.getLines()), \"Content not modified by invalid insert cases.\");\n\n\t\t// DELETE LINE\n\t\tsetTestContent(); // reset content for deleteText testing\n\t\t\n\t\t// deleteLine case 1\n\t\t// delete first line\n\t\tvar oldSecond = e.getLines()[1];\n\t\te.deleteLine(0);\n\t\tdeepEqual(e.getLines()[0], oldSecond);\n\t\t\n\t\t// deleteLine case 2\n\t\t// delete line between first and last\n\t\tvar oldThird = e.getLines()[2];\n\t\te.deleteLine(1);\n\t\tdeepEqual(e.getLines()[1], oldThird);\n\t\n\t\t// deleteLine case 3\n\t\t// delete last line\n\t\tvar lines = e.getLines();\n\t\tvar oldAlmostLast = lines[lines.length - 2];\n\t\te.deleteLine(lines.length - 1);\n\t\tlines = e.getLines();\n\t\tdeepEqual(lines[lines.length - 1], oldAlmostLast);\n\n\t\t// These should fail\n\t\tvar saved = e.getLines();\n\t\n\t\t// deleteLine case 4\n\t\t// delete at invalid index: -1\n\t\tok (e.deleteLine(-1) === false, \"Invalid delete at -1 fails.\");\n\n\t\t// deleteLine case 5\n\t\t// delete at invalid index: length\n\t\tlines = e.getLines();\n\t\tok (e.deleteLine(lines.length) === false, \"Invalid delete at index length fails.\");\n\t\n\t\tok(areArraysEqual(saved, e.getLines()), \"Content not modified by invalid delete cases.\");\n\n\t\t// APPEND\n\t\tsetTestContent(); // reset content for deleteText testing\n\t\t\n\t\t// appendLine Case 1\n\t\t// append with existing lines\n\t\ts = \"This line was appended.\";\n\t\te.appendLine(s);\n\t\tvar lines = e.getLines(); \n\t\tdeepEqual(lines[lines.length - 1], s, \"appendLine case 1\");\n\n\t\te.clear();\n\t\tok (e.getLines().length === 0, \"Cleared lines ok\");\n\n\t\t// appendLine case 2\n\t\t// append with no existing lines\n\t\te.appendLine(s);\n\t\tok (e.getLines().length === 1, \"appendLine case 2 - length ok\");\n\t\tdeepEqual (e.getLines()[0], s, \"appendLine case 2\");\n\t\n\t\t// Presenter-TextView\n\t\tsetTestContent();\n\t\tvar expected = e.getLines().join('\\n');\n\t\tvar $textarea = $('<textarea>');\n\t\tvar view = new EditorView.TextAreaView($textarea);\n\t\tvar presenter = new Editor.Presenter(view, e);\n\t\tok (e.getLines().join('\\n') !== presenter.view.getContent(), \"Content != before sync\");\n\t\tpresenter.syncView();\n\t\tdeepEqual(e.getLines().join('\\n'), expected, \"Model content == expected\");\n\t\tdeepEqual(presenter.view.getContent(), expected, \"SyncView ok\");\n\n\t\tvar s = \"\\nAdded line from view.\";\n\t\t$textarea.val(presenter.view.getContent() + s);\n\t\tok (e.getLines().join('\\n') !== presenter.view.getContent(), \"Content != before sync\");\n\t\tpresenter.syncModel();\n\t\tdeepEqual(e.getLines().join('\\n'), expected + s, \"SyncModel ok\");\n\t});\n});\n" }, { "alpha_fraction": 0.6084254384040833, "alphanum_fraction": 0.6160221099853516, "avg_line_length": 21.984127044677734, "blob_id": "2ba480b02196d37c17e19d88e752a1c6c2fca3cb", "content_id": "480a0e9910bdccd3c3f41debe402bdc50ebb426e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1448, "license_type": "no_license", "max_line_length": 76, "num_lines": 63, "path": "/static/js/app/UtilityFunctions.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\n\tfunction splitLines(content) {\n\t\tvar split = content.split('\\n');\n\t\tif (split.length === 0) {\n\t\t\treturn [content];\n\t\t}\n\t\treturn split;\n\t}\n\n\tfunction tabLines(lines, tabCount) {\n\t\tvar count = tabCount || 1;\n\t\tvar tabs = dupChar('\\t', count);\n\t\tvar copy = [];\n\t\tfor (var i = 0; i < lines.length; i++) {\n\t\t\tcopy.push(tabs + lines[i]);\n\t\t}\n\t\treturn copy;\n\t}\n\n\tfunction countLeadingTabs(lineText, matchLineBreaks) {\n\t\tif (matchLineBreaks === false) {\n\t\t\tvar regex = /^(\\t)+/g;\n\t\t} else {\n\t\t\tvar regex = /^(\\t)+/gm;\n\t\t}\n\t\tvar tabs = regex.exec(lineText);\n\t\tvar tabCount = 0;\n\t\tif (tabs) {\n\t\t\ttabCount = tabs[0].length;\n\t\t}\n\t\treturn tabCount;\n\t}\n\n\tfunction dupChar(charIn, count) {\n\t\tif (count > 0) {\n\t\t\treturn (new Array(count + 1)).join(charIn);\n\t\t} else if (count === 0) {\n\t\t\treturn '';\n\t\t}\n\t}\n\n\tfunction extractURLParams() {\n\t\tvar urlParams = {};\n\t\tvar match,\n\t\t\tpl = /\\+/g, // Regex for replacing addition symbol with a space\n\t\t\tsearch = /([^&=]+)=?([^&]*)/g,\n\t\t\tdecode = function (s) { return decodeURIComponent(s.replace(pl, \" \")); },\n\t\t\tquery = window.location.search.substring(1);\n\n\t\twhile (match = search.exec(query)) {\n\t\t urlParams[decode(match[1])] = decode(match[2]);\n\t\t}\n\t\treturn urlParams;\n\t}\n\n\texports.splitLines = splitLines;\n\texports.countLeadingTabs = countLeadingTabs;\n\texports.dupChar = dupChar;\n\texports.tabLines = tabLines;\n\texports.extractURLParams = extractURLParams;\n\t\n});\n" }, { "alpha_fraction": 0.6269035339355469, "alphanum_fraction": 0.6315182447433472, "avg_line_length": 26.60509490966797, "blob_id": "fb0d1d5713184e040b3a098033ad2243242ce5c9", "content_id": "2f4b57e96d911c87be355ed1dd7b2c2575b918bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4334, "license_type": "no_license", "max_line_length": 80, "num_lines": 157, "path": "/assets/js/app/NodeViewer.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\tvar Editor = require('./Editor');\n\tvar PersistenceManager = require('./PersistenceManager');\n\tvar UtilityFunctions = require('./UtilityFunctions');\n\t\n\tvar $viewerDiv;\n\tvar curNode;\n\t\n\tvar TAB_CHAR = \"&nbsp;&nbsp;&nbsp;&nbsp;\";\n\tvar defaultExpand = true;\n\n\tfunction init(initData) {\n\t\t\n\t\t$viewerDiv = initData.$viewerDiv;\n\t\tcurNode = initData.node;\n\t\tif (curNode) {\n\t\t\tbuildNodeView(curNode);\n\t\t}\n\t\tif (typeof initData.defaultExpand !== 'undefined') {\n\t\t\tdefaultExpand = initData.defaultExpand;\n\t\t}\n\t}\n\n\tfunction _shouldOpenDiv(n) {\n\t\treturn (n.children && n.children.length > 0);\n\t}\n\n\tfunction _getColor(level) {\n\t\tvar colors = [\"Salmon\", \"MediumTurquoise\", \"DarkSeaGreen\", \"MediumPurple\"];\n\t\treturn colors[level % colors.length];\n\t}\n\n\tvar divIDBuilder = function () {\n\t\tvar PREFIX = 'node-'\n\t\tvar inUse = [];\n\t\tfunction buildID(node) {\n\t\t\tvar invalidChars = /[^a-zA-Z0-9:_\\.\\s-]/g;\n\t\t\tvar id = PREFIX + node.nodeVal + '-' + node.level;\n\t\t\tid = id.replace(invalidChars, \"\");\n\t\t\tid = id.replace(/\\s/g, '_');\n\t\t\tvar dup = 1;\n\t\t\twhile (inUse.indexOf(id) > -1) {\n\t\t\t\tif (dup == 1) {\n\t\t\t\t\tid += '-' + dup;\n\t\t\t\t} else {\n\t\t\t\t\tid = id.substring(0, length - 1) + dup;\n\t\t\t\t}\n\t\t\t}\n\t\t\tinUse.push(id);\n\t\t\treturn id;\n\t\t}\n\t\tfunction reset() {\n\t\t\tinUse = [];\n\t\t}\n\t\treturn { build : buildID, reset : reset };\n\t}();\n\n\n\tfunction WrapperDiv(parentWrapperIn, nodeIn) {\n\n\t\tfunction getAccordionIcon (initData) {\n\t\t\tvar PLUS_CLASS = 'icon-plus-sign', MINUS_CLASS = 'icon-minus-sign';\n\t\t\tvar isOpen = typeof initData.isOpen === 'undefined' ? true : initData.isOpen;\n\t\t\tvar $el = $('<i>');\n\t\t\tvar defaultClass = isOpen ? MINUS_CLASS : PLUS_CLASS;\n\t\t\t$el.addClass(defaultClass);\n\t\t\t$el.click(function () {\n\t\t\t\t$el.toggleClass(PLUS_CLASS + ' ' + MINUS_CLASS);\n\t\t\t});\n\t\t\t$el.attr('data-toggle', 'collapse');\n\t\t\t$el.attr('data-target', initData.dataTarget);\n\t\t\t$el.attr('data-parent', initData.dataParent);\n\t\t\t$el.css('float', 'left');\n\t\t\treturn $el;\n\t\t}\n\t\tthis.level = nodeIn.level;\n\t\tthis.nodes = [nodeIn];\n\n\t\tvar colorLevel = _shouldOpenDiv(nodeIn) ? nodeIn.level + 1: nodeIn.level;\n\t\tvar baseID = divIDBuilder.build(nodeIn);\n\n\t\tthis.$outerDiv= $('<div>').attr('id', baseID + '-outer');\n\t\tthis.$headerDiv = $('<div>').attr('id', baseID + '-header');\n\t\tthis.$contentDiv = $('<div>').attr('id', baseID + '-content');\n\t\tthis.$contentDiv.addClass('collapse');\n\t\tif (defaultExpand || nodeIn.nodeType === 'root') {\n\t\t\tthis.$contentDiv.addClass('in');\n\t\t}\n\n\t\tthis.$outerDiv.append(this.$headerDiv);\n\t\tthis.$outerDiv.append(this.$contentDiv);\n\t\tthis.$outerDiv.css(\"margin\", \"5px\");\n\t\tthis.$outerDiv.css(\"position\", \"relative\");\n\t\tthis.$outerDiv.css(\"left\", \"20\");\n\t\tthis.$outerDiv.css(\"border-radius\", \"10px\");\n\t\tthis.$outerDiv.css(\"padding\", \"15px\");\n\t\tthis.$outerDiv.css(\"background-color\", _getColor(colorLevel));\n\n\t\tif (nodeIn.nodeType !== 'root') {\n\t\t\tvar initData = {\n\t\t\t\tdataTarget : '#' + baseID + '-content',\n\t\t\t\tisOpen : defaultExpand,\n\t\t\t};\n\t\t\tvar $icon = getAccordionIcon(initData);\n\t\t\tthis.$headerDiv.append($icon);\n\t\t\tthis.$headerDiv.append('<h3>' + nodeIn.nodeVal+ '</h3>');\n\t\t}\n\n\t\tthis.children = [];\n\t\tif (parentWrapperIn) {\n\t\t\tthis.parentWrapper = parentWrapperIn;\n\t\t\tthis.parentWrapper.$contentDiv.append(this.$outerDiv);\n\t\t}\n\t}\n\n\tWrapperDiv.prototype.addChild = function (node) {\n\t\tvar child = new WrapperDiv(this, node);\n\t\tthis.children.push(child);\n\t\treturn child;\n\t}\n\n\tWrapperDiv.prototype.append = function (n) {\n\t\tthis.nodes.push(n);\n\t\tvar appendText = n.nodeVal;\n\t\tvar tabs = UtilityFunctions.countLeadingTabs(appendText, false);\n\t\tappendText = appendText.replace(/^(\\t)+/g, \"\");\n\t\tvar extra = tabs - n.level;\n\t\tfor (var i = 0; i < extra; i++) {\n\t\t\tappendText = TAB_CHAR + appendText;\n\t\t}\n\t\tthis.$contentDiv.append(appendText.replace(/\\n/g, '<br>'));\n\t}\n\n\tfunction buildNodeView(nodeIn) {\n\t\tdivIDBuilder.reset();\n\t\tvar div = (function buildWrapper(n, wrapper) {\n\t\t\tif (!wrapper) {\n\t\t\t\twrapper = new WrapperDiv(null, n);\n\t\t\t} else if (_shouldOpenDiv(n)) {\n\t\t\t\twrapper = wrapper.addChild(n);\n\t\t\t} else {\n\t\t\t\twrapper.append(n);\n\t\t\t}\n\n\t\t\tfor (var i = 0; i < n.children.length; i++) {\n\t\t\t\tbuildWrapper(n.children[i], wrapper);\n\t\t\t}\n\n\t\t\treturn wrapper;\n\t\t}(nodeIn, null));\n\t\t$viewerDiv.empty();\n\t\t$viewerDiv.append(div.$outerDiv);\n\t}\n\n\texports.init = init;\n\texports.buildNodeView = buildNodeView;\n});\n" }, { "alpha_fraction": 0.6283783912658691, "alphanum_fraction": 0.6283783912658691, "avg_line_length": 17.5, "blob_id": "bb5560a28d6638823b6b5996421f133a413a2635", "content_id": "1eeb261ff3b4b2864c408af9881659fcbca4656d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 148, "license_type": "no_license", "max_line_length": 45, "num_lines": 8, "path": "/static/js/app/Logger.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\tfunction log(msg) {\n\t\tif (window.DEBUG_EXPOSE) {\n\t\t\tconsole.log(msg);\n\t\t}\n\t}\n\texports.log = log;\n});\n" }, { "alpha_fraction": 0.7368420958518982, "alphanum_fraction": 0.7368420958518982, "avg_line_length": 25.772727966308594, "blob_id": "cf263f8fc735d1c24a491f728fadcd985d159270", "content_id": "371b548ce2fb3dedf8acfb8cf73a8fc9af846ad4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 589, "license_type": "no_license", "max_line_length": 79, "num_lines": 22, "path": "/journal/settings_prod.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "# Django settings for journal_site project.\nimport dj_database_url\nfrom os import environ\n\n# Helper lambda for gracefully degrading environmental variables:\nenv = lambda e, d: environ[e] if environ.has_key(e) else d\n\nDEBUG = False\n\nDATABASES = {'default': dj_database_url.config(default='postgres://localhost')}\n\nADDITIONAL_INSTALLED_APPS = (\n\t'gunicorn',\n)\n\n# Make this unique, and don't share it with anybody.\nSECRET_KEY = env('SECRET_KEY', None)\n\nWEBMASTER_EMAIL = env('WEBMASTER_EMAIL', None)\nWEBMASTER_PASS = env('WEBMASTER_PASS', None)\n\nURL_BASE = 'http://dearqwerty.herokuapp.com/'\n" }, { "alpha_fraction": 0.6431089639663696, "alphanum_fraction": 0.6544871926307678, "avg_line_length": 37.28220748901367, "blob_id": "506243adf8e0f32cda746bf0eca49ab860b2b714", "content_id": "ec3591f1333d0fa1db7911602ee8d9273100ba04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 6240, "license_type": "no_license", "max_line_length": 169, "num_lines": 163, "path": "/static/js/testing/CreateEntryManagerTests.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "requirejs.config({\n //By default load any module IDs from js/lib\n baseUrl: '../static/js',\n //except, if the module ID starts with \"app\",\n //load it from the js/app directory. paths\n //config is relative to the baseUrl, and\n //never includes a \".js\" extension since\n //the paths config could be for a directory.\n paths: {\n app: './app',\n\t\tlib: './lib',\n }\n});\n\n// Easier to see the difference between strings in debugging.\nfunction stringDiff(s1,s2) {\n\tfor (var i = 0; i < Math.max(s1.length, s2.length); i++) {\n\t\tif (s1[i] !== s2[i]) {\n\t\t\tconsole.log(\"String diff - not equal:\");\n\t\t\tconsole.log(\"s1[\"+i+\"] = \" + s1[i]);\n\t\t\tconsole.log(\"s2[\"+i+\"] = \" + s2[i]);\n\t\t\treturn i;\n\t\t}\n\t}\n\treturn -1;\n}\n\nrequirejs(['app/Models', 'app/Editor', 'app/EditorView', 'app/CreateEntryManager', 'app/UtilityFunctions'],\nfunction (Models, Editor, EditorView, CreateEntryManager, UtilityFunctions) {\n\n\tfunction insertAtCaret(editor, text) {\n\t\tvar r = editor.getSelectionRange();\n\t\tvar caret;\n\t\tif (r) {\n\t\t\tcaret = r.start;\n\t\t} else {\n\t\t\tcaret = new Models.Position(0, 0);\n\t\t}\n\t\teditor.model.insertText(text, caret);\n\t\teditor.syncView();\n\t\tcaret.offset = text.length;\n\t\teditor.setSelectionRange(new Models.Range(caret, caret))\n}\n\ntest (\"CreateEntryManager Tests\", function () {\n\tvar $textarea = $('<textarea>').appendTo('body');\n\t$textarea.get(0).rows = 20;\n\t$textarea.get(0).cols = 20;\n\tvar $errTable = $('<table>');\n\tvar $saveButton = $('<button>');\n\tvar initData = { \n\t\t$editorView : $textarea,\n\t\t$errorTable : $errTable,\n\t\t$saveButton : $saveButton,\n\t};\n\tCreateEntryManager.init(initData);\n\tvar editor = CreateEntryManager.getEditor();\n\n\t// TAG COMPLETION\n\tvar testContent = \"<*A*>\\n\\t<*B*>\\n\\t\\tContent\\n\\t<*/B*>\\n<*/A*\";\n\tvar s = testContent;\n\teditor.model.setContent(testContent);\n\teditor.syncView();\n\tdeepEqual($errTable.find('tr').length, 0, \"errTable is empty before parse.\");\n\t$(editor.view).trigger('keyupTimeout');\n\tstop();\n\tdeepEqual ($errTable.find('tr').length, 1, \"Error displayed after parse.\");\n\tvar lines = editor.model.getLines();\n\tvar p = new Models.Position(lines.length - 1, lines[lines.length - 1].length);\n\teditor.model.insertText('>', p);\n\t\teditor.syncView();\n\t\t$(editor.view).trigger('keyupTimeout');\n\t\tdeepEqual ($errTable.find('tr').length, 0, \"Error fixed after adding '>'.\");\n\t\ts = $textarea.val();\n\t\tvar t = \"\\n<*A2*>\";\n\t\t$textarea.val(s + t);\n\t\teditor.syncModel();\n\t\tlines = editor.model.getLines();\n\t\tp = new Models.Position(lines.length - 1, lines[lines.length - 1].length);\n\t\teditor.setSelectionRange(new Models.Range(p,p));\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), s + t + \"\\n\\t\\n<*/A2*>\") === -1, \"Completed tag at level 0 ok.\");\n\n\t\ts = \"<*A*>\";\n\t\t$textarea.val(s);\n\t\teditor.syncModel();\n\t\tlines = editor.model.getLines();\n\t\tp = new Models.Position(lines.length - 1, lines[lines.length - 1].length);\n\t\teditor.setSelectionRange(new Models.Range(p,p));\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), s + \"\\n\\t\\n<*/A*>\") === -1, \"Completed tag at level 0 ok.\");\n\n\t\tinsertAtCaret(editor, \"<*B*>\");\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), \"<*A*>\\n\\t<*B*>\\n\\t\\t\\n\\t<*/B*>\\n<*/A*>\") === -1, \"Completed tag at level 1 ok.\");\n\t\ts = editor.view.getContent();\n\t\t\n\t\tt = \"<*C*> But then more content on the same line.\";\n\t\tinsertAtCaret(editor, t);\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), \"<*A*>\\n\\t<*B*>\\n\\t\\t\" + t + \"\\n\\t<*/B*>\\n<*/A*>\") === -1, \"Didn't complete tag with trailing content on line.\");\n\n\t\teditor.model.clear();\n\t\teditor.syncView();\n\t\tp = new Models.Position(0,0);\n\t\teditor.setSelectionRange(new Models.Range(p, p));\n\t\tinsertAtCaret(editor, '##def SomeDef##');\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), \"##def SomeDef##\\n\\t\\n##enddef SomeDef##\") === -1, \"Completed def at level 0.\");\n\n\t\tinsertAtCaret(editor, '##ignore##');\n\t\tCreateEntryManager.completeTags();\n\t\tok(stringDiff(editor.view.getContent(), \"##def SomeDef##\\n\\t\" + \"##ignore##\\n\\t\\t\\n\\t##endignore##\" + \"\\n##enddef SomeDef##\") === -1, \"Completed def at level 0.\");\n\n\t/*\n\t\tTODO: This has moved to CreateEntryManager (User is eliminated). Fix these and test extract snippets in PersistenceManager.\n\t\t// SNIPPETS\n\t\tvar snipA = testUser.getSnippetLines('WorkoutA');\n\t\tvar tabbedA = testUser.getSnippetLines('WorkoutATabbed');\t\t\n\t\tvar textA = snipA.join('\\n');\n\t\tvar textTabbedA = tabbedA.join('\\n');\n\t\t\n\t\teditor.model.clear();\n\t\teditor.model.appendLine('<*Tag1*>');\n\t\teditor.model.appendLine('\\t');\n\t\teditor.model.appendLine('<*//*Tag1*>');\n\t\teditor.syncView();\n\t\tvar saved = editor.view.getContent();\n\t\tvar p = new Models.Position(1, 1);\n\t\teditor.setSelectionRange(new Models.Range(p, p));\n\t\tinsertAtCaret(editor, '##insert WorkoutA##');\n\t\tCreateEntryManager.insertSnippets();\n\t\t// test\n\t\tok (stringDiff(editor.view.getContent(), '<*Tag1*>\\n' + textTabbedA + '\\n<*//*Tag1*>') === -1, 'insertSnippet at level 1 ok.');\n\t\t\n\t\teditor.model.clear();\n\t\teditor.model.appendLine('Test content.##insert WorkoutA##');\n\t\teditor.syncView();\n\t\tCreateEntryManager.insertSnippets();\n\t\tok (stringDiff(editor.view.getContent(), 'Test content.\\n' + textA) === -1, 'insertSnippet at level 0 with text preceding insert ok.');\n\n\t\teditor.model.clear();\n\t\teditor.model.appendLine('##insert WorkoutA##Test content.');\n\t\teditor.syncView();\n\t\tCreateEntryManager.insertSnippets();\n\t\tok (stringDiff(editor.view.getContent(), textA + '\\nTest content.') === -1, 'insertSnippet at level 0 with text following insert ok.');\n\n\t\teditor.model.setContent(saved);\n\t\teditor.syncView();\n\t\tp = new Models.Position(1, 1);\n\t\teditor.model.insertText('<*Tag2*>\\n\\t\\t\\n\\t<*//*Tag2*>', p);\n\t\tp = new Models.Position(2, 2);\n\t\tvar t = '##insert WorkoutA##';\n\t\teditor.model.insertText(t, p);\n\t\teditor.syncView();\n\t\tp = new Models.Position(2, 2 + t.length);\n\t\teditor.setSelectionRange(new Models.Range(p, p));\n\t\tCreateEntryManager.insertSnippets();\n\t\tvar doubleTabbedA = UtilityFunctions.tabLines(snipA, 2);\n\t\tok (stringDiff(editor.view.getContent(), '<*Tag1*>\\n\\t<*Tag2*>\\n' + doubleTabbedA.join('\\n') + '\\n\\t<*//*Tag2*>\\n<*//*Tag1*>') === -1, 'insertSnippet at level 2 ok.');\n\t*/\n\t});\n});\n" }, { "alpha_fraction": 0.7211267352104187, "alphanum_fraction": 0.737089216709137, "avg_line_length": 87.75, "blob_id": "5e80501527043a409cd8a4b6c0e3b285b70f591f", "content_id": "348b8805f67b66af372133e0f91acd835d61ce8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1065, "license_type": "no_license", "max_line_length": 200, "num_lines": 12, "path": "/journal/journal_app/custom_forms.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "from django import forms\n\nclass RegisterForm(forms.Form):\n\tusername = forms.CharField(max_length=30, label=\"Username\", help_text=\"30 characters or fewer - may contain letters, numbers, _, @, +, . and -. It's cool to use your email address.\")\n\tpassword1 = forms.CharField(min_length=6, label=\"Password\", help_text=\"6 characters or more.\", widget=forms.PasswordInput())\n\tpassword2 = forms.CharField(min_length=6, label=\"Re-type Password.\", widget=forms.PasswordInput())\n\temail = forms.EmailField(required=False, label=\"Email\", help_text=\"Optional, but recommended in case you forget your password. (I won't give it out to spammers. Not even for a backrub or a burrito.\")\n\nclass CommentsForm(forms.Form):\n\tname = forms.CharField(required=False, max_length=50, label=\"Your name:\", help_text=\"(Optional)\")\n\temail = forms.EmailField(required=False, label=\"Email:\", help_text=\"(Optional)\")\n\ttext = forms.CharField(min_length=1, widget=forms.Textarea(attrs={'rows' : 20, 'cols' : 150}), label=\"Comment:\", help_text=\"Lavish praise, cutting insults or helpful suggestions.\")\n" }, { "alpha_fraction": 0.588837206363678, "alphanum_fraction": 0.5944185853004456, "avg_line_length": 28.83333396911621, "blob_id": "339900838f22c90c3b609a19c5c536efeff363e6", "content_id": "b9c63c7a7070e3c0ac04fcc415f2dc3da32ce256", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2150, "license_type": "no_license", "max_line_length": 95, "num_lines": 72, "path": "/static/js/testing/ParserTests.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "requirejs.config({\n //By default load any module IDs from js/lib\n baseUrl: '../static/js',\n //except, if the module ID starts with \"app\",\n //load it from the js/app directory. paths\n //config is relative to the baseUrl, and\n //never includes a \".js\" extension since\n //the paths config could be for a directory.\n paths: {\n app: './app',\n\t\tlib: './lib',\n }\n});\n\nfunction parseTestInput(data) {\n\tvar regex = /^SEPARATOR\\s*=\\s*['\"]([^'\"\\n]+)['\"]\\s*$/;\n\tvar lines = data.split('\\n');\n\tvar m, delim;\n\tfor (var i = 0; i < lines.length; i++) {\n\t\tif (m = regex.exec(lines[i])) {\n\t\t\tdelim = m[1];\n\t\t\tlines = lines.slice(i + 1);\n\t\t}\n\t\ti = lines.length;\n\t}\n\n\tvar cases = [];\n\n\tif (delim) {\n\t\tregex = new RegExp(\"^\" + escapeRegExp(delim) + \"$\");\n\t\tfor (var i = 0; i < lines.length; i++) {\n\t\t\tif (regex.test(lines[i])) {\n\t\t\t\tcases.push('');\n\t\t\t} else if (cases.length > 0) {\n\t\t\t\tcases[cases.length - 1] += (lines[i] + '\\n');\n\t\t\t}\n\t\t}\n\t}\n\n\treturn cases;\n}\n\nfunction escapeRegExp(str) {\n return str.replace(/[\\-\\[\\]\\/\\{\\}\\(\\)\\*\\+\\?\\.\\\\\\^\\$\\|]/g, \"\\\\$&\");\n}\n\nrequirejs(['app/Parser', 'app/UtilityFunctions', 'app/Models', 'app/Editor', 'app/EditorView'],\nfunction (Parser, UtilityFunctions, Models, Editor, EditorView) {\n\ttest( \"Parser Test\", function () {\n\t\tvar inputs = parseTestInput(PARSER_VALID_TXT);\n\t\tok (inputs.length > 0, \"Parsed valid inputs ok.\");\n\t\tfor (var i = 0; i < inputs.length; i++) {\t\n\t\t\tconsole.log(\"About to get lines\");\n\t\t\tconsole.log(\"UtlilityFunctions = \" + UtilityFunctions);\n\t\t\tvar lines = UtilityFunctions.splitLines(inputs[i]);\n\t\t\tconsole.log(\"Got lines\");\n\t\t\tvar r = Parser.buildParseTree(lines);\n\t\t\tconsole.log(\"Built parse tree\");\n\t\t\tif (r.errors) {\n\t\t\t\tok (r.errors.length == 0, \"No parse errors - Passed.\");\n\t\t\t}\n\t\t}\n\t\tinputs = parseTestInput(PARSER_INVALID_TXT);\n\t\tok (inputs.length > 0, \"Parsed invalid inputs ok.\");\n\t\tfor (var i = 0; i < inputs.length; i++) {\t\n\t\t\tvar lines = UtilityFunctions.splitLines(inputs[i]);\n\t\t\tvar r = Parser.buildParseTree(lines);\n\t\t\tok (r.errors, \"Invalid input produced errors - Passed.\");\n\t\t\tok (r.errors.length > 0, \"Caught parse errors - Passed.\");\n\t\t}\n\t});\n});\n\n\n" }, { "alpha_fraction": 0.5943293571472168, "alphanum_fraction": 0.6292257308959961, "avg_line_length": 21.924999237060547, "blob_id": "dfd7423bf0a0efff2808d57129ad0fe4ec56dc38", "content_id": "f852c1c90880e19e2fd5d0a4ec6830e38c5c3321", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 917, "license_type": "no_license", "max_line_length": 64, "num_lines": 40, "path": "/journal/journal_app/id_encoder.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "import random\n\nclass IdEncoder():\n\tLOWER = 'bcdfghjklmnpqrstvwxyz'\n\tUPPER = LOWER.upper()\n\tNUMS = '23456789'\n\tALPHABET = LOWER + UPPER + NUMS\n\n\tbase = len(ALPHABET)\n\tMIN_LENGTH = 5\n\t@classmethod\n\tdef int_to_string(cls, n):\n\t\tnum = n + pow(cls.base, cls.MIN_LENGTH - 1)\n\t\ts = ''\n\t\twhile num > 0:\n\t\t\ti = num % cls.base\n\t\t\ts = cls.ALPHABET[i:i+1] + s\n\t\t\tnum /= cls.base\n\t\treturn s\n\n\t@classmethod\n\tdef string_to_int(cls, s):\n\t\tn = 0\n\t\tfor i in range(len(s)):\n\t\t\tn += cls.ALPHABET.index(s[i]) * pow(cls.base, len(s) - 1 - i)\n\t\treturn n - pow(cls.base, cls.MIN_LENGTH - 1)\n\n\t@classmethod\n\tdef test(cls):\n\t\tmax_r = 10000000\n\t\ttrials = 1000000\n\t\tcases = []\n\t\tfor i in range(trials):\n\t\t\tcases.append(random.randrange(0, max_r))\n\t\tcases.append(0) # edge case\n\t\tcases.append(max_r) # edge case\n\t\tfor n in cases:\n\t\t\tif (cls.string_to_int(cls.int_to_string(n)) != n):\n\t\t\t\traise Exception(\"IdStringGen test failed.\")\n\t\treturn True\n" }, { "alpha_fraction": 0.5574691295623779, "alphanum_fraction": 0.5705727934837341, "avg_line_length": 29.689655303955078, "blob_id": "960c2215dfa8755c0e388c52d1e5264b6f4ad4c8", "content_id": "586460e8a232c839a8685663706f36202851806f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2671, "license_type": "no_license", "max_line_length": 107, "num_lines": 87, "path": "/static/js/app/Lexer.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\n\tvar Models = require('./Models');\n\tvar Logger = require('./Logger');\n\n\tvar types = [\"content\", \"tagOpen\", \"tagClose\", \"defineOpen\", \"defineClose\", \"ignoreOpen\", \"ignoreClose\"];\n\n\tfunction Token(typeIn, valIn, contentIn, range) {\n\t\tif (types.indexOf(typeIn) > -1) {\n\t\t\tthis.type = typeIn;\n\t\t\tthis.val = valIn;\n\t\t\tthis.content = contentIn;\n\t\t\tthis.range = range;\n\t\t}\n\t}\n\n\tfunction extractTokens(lines) {\n\t\tvar tokens = [];\n\t\tvar pattern = new RegExp(\"<\\\\*(?:\\\\/)?([^>\\\\/<*]+)\\\\*>|##(?:end)?def ([^#]+)##|##(?:end)?ignore##\", \"g\");\n\t\tvar m;\n\t\tvar lineIndex = 0;\n\t\tvar p1, p2;\n\t\tlines.forEach(function (line) {\n\t\t\tvar linePos = 0;\n\t\t\tLogger.log(\"Lexing line: \" + line);\n\t\t\twhile (m = pattern.exec(line)) {\n\t\t\t\tvar content = line.substring(linePos, m.index);\n\t\t\t\tif (content.length > 0) {\n\t\t\t\t\tp1 = new Models.Position(lineIndex, linePos);\n\t\t\t\t\tp2 = new Models.Position(lineIndex, m.index);\n\t\t\t\t\ttokens.push(new Token(\"content\", content, content, new Models.Range(p1, p2)));\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tvar capture = undefined, i = 1;\n\t\t\t\twhile (typeof capture === \"undefined\" && i < m.length) {\n\t\t\t\t\tcapture\t= typeof m[i] === \"undefined\" ? capture : m[i];\n\t\t\t\t\ti++;\n\t\t\t\t}\n\t\t\t\tcapture = capture || '';\n\n\t\t\t\tvar type, content;\n\t\t\t\tif (m[0].indexOf(\"<*/\") > -1) {\n\t\t\t\t\tcontent = \"<*/\" + capture + \"*>\";\n\t\t\t\t\ttype = \"tagClose\";\n\t\t\t\t} else if (m[0].indexOf(\"<*\") > -1) {\n\t\t\t\t\tcontent = \"<*\" + capture + \"*>\";\n\t\t\t\t\ttype = \"tagOpen\";\n\t\t\t\t} else if (m[0].indexOf(\"##enddef\") > -1) {\n\t\t\t\t\tcontent = \"##enddef \" + capture + \"##\";\n\t\t\t\t\ttype = \"defineClose\";\n\t\t\t\t} else if (m[0].indexOf(\"##def\") > -1) {\n\t\t\t\t\tcontent = \"##def \" + capture + \"##\";\n\t\t\t\t\ttype = \"defineOpen\";\n\t\t\t\t} else if (m[0].indexOf(\"##endignore\") > -1) {\n\t\t\t\t\tcontent = \"##endignore##\";\n\t\t\t\t\ttype = \"ignoreClose\";\n\t\t\t\t} else if (m[0].indexOf(\"##ignore\") > -1) {\n\t\t\t\t\tcontent = \"##ignore##\";\n\t\t\t\t\ttype = \"ignoreOpen\";\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tp1 = new Models.Position(lineIndex, m.index);\n\t\t\t\tp2 = new Models.Position(lineIndex, m.index + m[0].length);\n\t\t\t\tvar range = new Models.Range(p1, p2);\n\t\t\t\ttokens.push(new Token(type, capture, content, new Models.Range(p1, p2)));\n\t\t\t\tlinePos = m.index + m[0].length;\n\t\t\t}\n\t\t\t// add any remaining content\t\n\t\t\tif (linePos < line.length) {\n\t\t\t\tp1 = new Models.Position(lineIndex, linePos);\n\t\t\t\tp2 = new Models.Position(lineIndex, line.length);\n\t\t\t\tvar remaining = line.substring(linePos);\n\t\t\t\ttokens.push(new Token(\"content\", remaining, remaining, new Models.Range(p1,p2)));\n\t\t\t}\n\t\t\tlineIndex++;\n\t\t});\n\t\t\n\t\treturn tokens.reverse();\n\t}\n\n\tfunction getTypes() {\n\t\treturn types;\n\t}\t\n\n\texports.extractTokens = extractTokens;\n\texports.getTypes = getTypes;\n});\n\n" }, { "alpha_fraction": 0.6118178367614746, "alphanum_fraction": 0.6157160401344299, "avg_line_length": 33.323944091796875, "blob_id": "04745162b31f7860da499b9717ee52d127ad87b4", "content_id": "7c7a28a5a3eaef1dbdf03452b2115fd90f381245", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4874, "license_type": "no_license", "max_line_length": 114, "num_lines": 142, "path": "/static/js/app/Parser.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\tvar Models = require('./Models');\n\tvar Lexer = require('./Lexer');\n\tvar Logger = require('./Logger');\n\n\tfunction buildParseTree (lines) {\n\t\tvar tokens = Lexer.extractTokens(lines);\n\t\tfunction isOpener(t) {\n\t\t\tvar openers = [\"tagOpen\", \"defineOpen\", \"ignoreOpen\"];\n\t\t\treturn openers.indexOf(t) > -1;\n\t\t}\n\t\tfunction isCloser(t) {\n\t\t\tvar closers = [\"tagClose\", \"defineClose\", \"ignoreClose\"];\n\t\t\treturn closers.indexOf(t) > -1;\n\t\t}\n\t\tfunction textInRange(range) {\n\t\t\tif (range.start.lineIndex === range.end.lineIndex) {\n\t\t\t\treturn lines[range.start.lineIndex].substring(range.start.offset, range.end.offset);\n\t\t\t}\n\n\t\t\tvar t = \"\";\n\t\t\tt += lines[range.start.lineIndex].substring(range.start.offset);\n\t\t\tfor (var i = range.start.lineIndex + 1; i < range.end.lineIndex; i++) {\n\t\t\t\tt += lines[i];\n\t\t\t}\n\t\t\tt += lines[range.end.lineIndex].substring(0, range.end.offset);\n\t\t\treturn t;\n\t\t}\n\t\tvar root = new Models.Node(null);\n\t\troot.nodeType = \"root\";\n\t\troot.nodeVal = \"root\";\n\t\t// root content is all lines\n\t\tfor (var i = 0; i < lines.length; i++) {\n\t\t\troot.nodeContent += lines[i];\n\t\t}\n\t\troot.range.setStart({lineIndex : 0, offset : 0});\n\t\troot.range.setEnd({\n\t\t\tlineIndex : lines.length - 1,\n\t\t\toffset : lines[lines.length - 1].length,\n\t\t});\n\t\tvar results = { rootNode: root, errors : [] };\n\t\tvar cur = root;\n\t\tvar t;\n\t\tvar n;\n\t\tvar level = 0;\n\t\tvar line = 0;\n\t\tvar newLineRegex = new RegExp(\"\\n\", \"g\");\n\t\t\n\t\tfunction logToken(t) {\n\t\t\tLogger.log(\"Token: val=\" + t.val + \", type=\" + t.type + \", line=\" + t.range.start.lineIndex);\n\t\t}\n\t\tLogger.log(\"Here are the tokens:\");\n\t\ttokens.forEach(logToken);\n\t\twhile (tokens.length > 0) {\n\t\t\tt = tokens.pop(); \n\t\t\tif (isOpener(t.type)) {\n\t\t\t\tif (t.type === \"ignoreOpen\") { // ignore node\n\t\t\t\t\tvar contents = \"\";\n\t\t\t\t\tvar ignored = t;\n\t\t\t\t\tvar ignoreRange = new Models.Range();\n\t\t\t\t\tignoreRange.setStart(ignored.range.start);\n\t\t\t\t\twhile ((ignored = tokens.pop()) && ignored.type !== \"ignoreClose\") {\n\t\t\t\t\t\tcontents += ignored.content; \n\t\t\t\t\t}\n\t\t\t\t\tif (!ignored) {\n\t\t\t\t\t\tresults.errors.push({ line : t.range.start.lineIndex, message : \"Must close ignore tag.\"});\n\t\t\t\t\t\treturn results;\n\t\t\t\t\t}\n\t\t\t\t\tignoreRange.setEnd(ignored.range.end);\n\t\t\t\t\tt.val = t.contents = contents;\n\t\t\t\t\tLogger.log(\"Creating ignore node: \" + t.val + \" at level \" + level + \" on line \" + t.range.start.lineIndex);\n\t\t\t\t\tn = new Models.Node(t, level, ignoreRange);\n\t\t\t\t\tcur.addChild(n);\n\t\t\t\t} else if (t.val === cur.nodeVal) { // duplicate of parent\n\t\t\t\t\tresults.errors.push({ line : t.range.start.lineIndex, message : \"Parent can't be its own child.\"});\n\t\t\t\t\treturn results;\n\t\t\t\t} else { // valid node with children\n\t\t\t\t\tLogger.log(\"Creating node: \" + t.type + \", \" + t.val + \n\t\t\t\t\t\t\" at level \" + level + \" on line \" + t.range.start.lineIndex);\n\t\t\t\t\tn = new Models.Node(t, level);\n\t\t\t\t\tn.range.setStart(t.range.start);\n\t\t\t\t\tcur.addChild(n);\n\t\t\t\t\tlevel++;\n\t\t\t\t\tcur = n;\n\t\t\t\t}\n\t\t\t} else if (isCloser(t.type)) {\n\t\t\t\tif (cur.nodeVal === \"root\") { // can't close at top level\n\t\t\t\t\tresults.errors.push({ line : t.range.start.lineIndex, message : \"Closing tag can't come before an opener.\"});\n\t\t\t\t\treturn results;\n\t\t\t\t} else if (t.val !== cur.nodeVal && t.type !== cur.nodeType) { // imbalanced closing\n\t\t\t\t\tresults.errors.push({ line : t.range.start.lineIndex, message : \"Expected close of \" + cur.nodeVal});\n\t\t\t\t\treturn results;\n\t\t\t\t}\n\t\t\t\tcur.range.setEnd(t.range.end);\n\t\t\t\tcur.nodeContent = textInRange(cur.range);\n\t\t\t\tcur = cur.parentNode;\n\t\t\t\tlevel--;\n\t\t\t} else { // plain content\n\t\t\t\t// compact subsequent content nodes\n\t\t\t\tvar numChildren = cur.children.length;\n\t\t\t\tif (numChildren > 0 && cur.children[numChildren - 1].nodeType === \"content\") {\n\t\t\t\t\tvar sibling = cur.children[numChildren - 1];\n\t\t\t\t\tsibling.nodeVal += \"\\n\" + t.val;\n\t\t\t\t\tsibling.range.setEnd(t.range.end);\n\t\t\t\t\tsibling.nodeContent = textInRange(n.range);\n\t\t\t\t} else { // create a new content node\n\t\t\t\t\tLogger.log(\"Creating content node at level \" + level + \". Content: \" + t.val);\n\t\t\t\t\tn = new Models.Node(t, level);\n\t\t\t\t\tn.range.setStart(t.range.start);\n\t\t\t\t\tn.range.setEnd(t.range.end);\n\t\t\t\t\tn.nodeContent = textInRange(n.range);\n\t\t\t\t\tcur.addChild(n);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif (cur.nodeType !== \"root\") {\n\t\t\tresults.errors.push({line : lines.length, message : \"Expected close of \" + cur.nodeVal});\n\t\t}\n\n\t\tresults.rootNode = root;\n\t\tLogger.log(\"No more tokens...\");\n\t\treturn results;\n\t}\n\n\tfunction printParseTree(root, level) {\n\t\tvar nodes = \"\";\n\t\troot.children.forEach( function(child) {\n\t\t\tnodes += child.nodeType + \": \" + child.nodeVal + \", \";\n\t\t});\n\t\tif (nodes.length > 0) {\n\t\t\tnodes = nodes.substring(0, nodes.length - 2);\n\t\t\tLogger.log(\"Level: \" + level + \" - \" + nodes);\t\n\t\t}\n\t\troot.children.forEach( function(child) {\n\t\t\tprintParseTree(child, level + 1);\n\t\t});\n\t}\n\t\n\texports.buildParseTree = buildParseTree;\n\texports.printParseTree = printParseTree;\n});\n" }, { "alpha_fraction": 0.7107843160629272, "alphanum_fraction": 0.7215686440467834, "avg_line_length": 29, "blob_id": "b3ef2383464ea888d6140b0f29b91de8132a9fbc", "content_id": "9b9a31c9de2aa8f301af3f08d30a11c596c20c23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1020, "license_type": "no_license", "max_line_length": 71, "num_lines": 34, "path": "/journal/journal_app/email_helper.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "\"\"\"\nThis module adapted from:\nhttp://codecomments.wordpress.com/2008/01/04/python-gmail-smtp-example/\n\"\"\"\nimport smtplib\nimport datetime\nfrom email.MIMEMultipart import MIMEMultipart\nfrom email.MIMEText import MIMEText\n\ndef send_email(text, **kwargs):\n\trequired = ['from_user', 'email_pass', 'to_user']\n\tfor r in required:\n\t\tif not r in kwargs:\n\t\t\tprint(\"Failed to send email with out required arg %s\" % r)\n\t\t\treturn False\n\tgmailUser = kwargs['from_user']\n\tgmailPassword = kwargs['email_pass']\n\trecipient = kwargs['to_user']\n\tmsg = MIMEMultipart()\n \tmsg['From'] = gmailUser\n \tmsg['To'] = recipient\n \tif not 'subject' in kwargs:\n\t\tmsg['Subject'] = 'DearQwerty Comment - ' + str(datetime.date.today())\n\telse:\n\t\tmsg['Subject'] = kwargs['subject']\n\tmsg.attach(MIMEText(text))\n\tmailServer = smtplib.SMTP('smtp.gmail.com', 587)\n\tmailServer.ehlo()\n\tmailServer.starttls()\n\tmailServer.ehlo()\n\tmailServer.login(gmailUser, gmailPassword)\n\tmailServer.sendmail(gmailUser, recipient, msg.as_string())\n\tmailServer.close()\n\treturn True\n" }, { "alpha_fraction": 0.6611478924751282, "alphanum_fraction": 0.6661148071289062, "avg_line_length": 23.821918487548828, "blob_id": "937c553f667c44425862718e8d6a34064bfd4f1e", "content_id": "dbc10390e384abd0757cdec61da6041975798d83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1812, "license_type": "no_license", "max_line_length": 79, "num_lines": 73, "path": "/static/js/app/EditorView.js", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "define (function (require, exports, module) {\n\tvar KEYUP_INTERVAL = 750;\n\tvar TAB = 9;\n\tvar ENTER = 13;\n\n\tfunction EditorView() {\n\t\tvar that = this;\n\t\tthis.keyupTimer = $.timer(function() {\n\t\t\t$(that).trigger('keyupTimeout');\n\t\t});\n\t\tthis.startTimer(KEYUP_INTERVAL);\n\t}\n\n\tEditorView.prototype.stopTimer = function () {\n\t\tthis.keyupTimer.stop();\n\t}\n\t\n\tEditorView.prototype.startTimer = function (interval) {\n\t\tvar t = interval || KEYUP_INTERVAL;\n\t\tthis.keyupTimer.set({time : t, autostart : true});\n\t}\n\n\tTextAreaView.prototype = Object.create(EditorView.prototype);\n\tTextAreaView.prototype.constructor = TextAreaView;\n\n\tfunction TextAreaView($textarea) {\n\t\tEditorView.call(this);\n\t\tvar that = this;\n\t\tthis.$textarea = $textarea;\n\t\tthis.$textarea.on('keyup', function (e) {\n\t\t\tvar newE = $.Event('keyup');\n\t\t\tnewE.which = e.which;\n\t\t\t$(that).trigger(newE);\n\t\t\tthat.startTimer();\n\t\t});\n\t\tthis.$textarea.on('keydown', function (e) {\n\t\t\tif (e.which === TAB || e.which === ENTER) {\n\t\t\t\te.preventDefault();\n\t\t\t}\n\t\t\tvar newE = $.Event('keydown');\n\t\t\tnewE.which = e.which;\n\t\t\t$(that).trigger(newE);\n\t\t});\n\t}\n\t\n\tTextAreaView.prototype.setContent = function(lines) {\n\t\tthis.$textarea.val(lines.join('\\n'));\n\t}\n\n\tTextAreaView.prototype.getContent = function() {\n\t\treturn this.$textarea.val();\n\t}\n\n\tTextAreaView.prototype.getSelectionOffsets = function() {\n\t\treturn {\n\t\t\tstart : this.$textarea.get(0).selectionStart,\n\t\t\tend : this.$textarea.get(0).selectionEnd,\n\t\t};\n\t}\n\t\n\tTextAreaView.prototype.setSelectionOffsets = function(cursor) {\n\t\tvar input = this.$textarea.get(0);\n\t\tif (!input.setSelectionRange) {\n\t\t\treturn;\n\t\t}\n\t\tif (cursor.start <= input.value.length && cursor.end <= input.value.length) {\n\t\t\tinput.focus();\n\t\t\tinput.setSelectionRange(cursor.start, cursor.end);\n\t\t}\n\t}\n\n\texports.TextAreaView = TextAreaView;\n});\n" }, { "alpha_fraction": 0.6496211886405945, "alphanum_fraction": 0.6723484992980957, "avg_line_length": 26.789474487304688, "blob_id": "48f46b5b240d2941a468b59d32ca172f6b005709", "content_id": "2144cbe2e410db3824171ed1ea0339052ff11394", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 528, "license_type": "no_license", "max_line_length": 75, "num_lines": 19, "path": "/journal/settings_dev.py", "repo_name": "astahlman/journal-app", "src_encoding": "UTF-8", "text": "# Django settings for journal_site project.\nimport os\nROOT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')\n\nDEBUG = True\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.sqlite3',\n 'NAME': os.path.join(ROOT_PATH, 'development/database/sqlite3.db'),\n }\n}\n\nkey_path = os.path.join(ROOT_PATH, 'development/extra_settings.txt')\nextra_settings = open(key_path, 'r')\nfor line in extra_settings:\n\texec line # evaluates the strings as assignments\n\nURL_BASE = 'http://127.0.0.1:8000'\n" } ]
25
Gaurav-1213/Face-Eyes-detection-using-OpenCV
https://github.com/Gaurav-1213/Face-Eyes-detection-using-OpenCV
c07627878bd17339194abbc5f91ef6867ee1c126
14aa63501940bed93931136c7e78bd7813607736
2a6389aec79c93fe163ba784c4acdcb46ed5f5a1
refs/heads/master
2023-06-10T05:15:30.289434
2021-07-05T13:14:00
2021-07-05T13:14:00
383,117,413
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5649809837341309, "alphanum_fraction": 0.584013044834137, "avg_line_length": 39.88888931274414, "blob_id": "e7a6c98cc951782b0884baebd4c0f3945538cfb9", "content_id": "8d48b53292a1b50d892230d67741e6b1d2a3893a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1839, "license_type": "no_license", "max_line_length": 179, "num_lines": 45, "path": "/app.py", "repo_name": "Gaurav-1213/Face-Eyes-detection-using-OpenCV", "src_encoding": "UTF-8", "text": "#from typing_extensions import runtime\nfrom flask import Flask,render_template,Response, request\nimport cv2\n\n\napp = Flask(__name__)\n\ncamera = cv2.VideoCapture(0,cv2.CAP_DSHOW)\n\ndef gen_frames():\n while True:\n success,frame = camera.read() # read camera frames\n if not success:\n break # in case camera is faulty\n else:\n detector = cv2.CascadeClassifier('haar-cascade-files/haarcascade_frontalface_default.xml') # creating object of class to extract methods (of class) throuh that object\n eye_cascade = cv2.CascadeClassifier('haar-cascade-files/haarcascade_eye.xml')\n faces = detector.detectMultiScale(frame,1.1,7) # detect faces from it\n gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY) # convert color frame into gray scale\n \n # draw rectangle around each face\n for (x,y,w,h) in faces:\n cv2.rectangle(frame, (x,y), (x+w, y+h), (255,0,0), 2) # (255,0,0) = (R,G,B)\n roi_gray= gray[y:y+h, x:x+w]\n roi_color = frame[y:y+h, x:x+w]\n eyes = eye_cascade.detectMultiScale( roi_gray, 1.1,3)\n for (ex,ey,ew,eh) in eyes: # in face for loop/ in face rectangle detect Eyes\n cv2.rectangle( roi_color, (ey,ex), (ex+ew, ey+eh), (0,255,0), 2) # and draw rectangle on eyes too.\n \n ret,buffer = cv2.imencode('.jpg', frame)\n frame = buffer.tobytes()\n yield(b'--frame\\r\\n'\n b'Content-Type: image/jpeg\\r\\n\\r\\n' + frame + b'\\r\\n')\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\[email protected]('/video')\ndef video():\n return Response(gen_frames(), mimetype='mutipart/x-mixed-replace; boudary = frame')\n \n\nif __name__ ==\"__main__\":\n app.run(debug =True)" }, { "alpha_fraction": 0.7958236932754517, "alphanum_fraction": 0.7981438636779785, "avg_line_length": 70.83333587646484, "blob_id": "3254eee91dc6e921d41719b03f047af5a9090499", "content_id": "2a36691d8bad20628ddda677cc0064e61b0f7bf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 431, "license_type": "no_license", "max_line_length": 123, "num_lines": 6, "path": "/README.md", "repo_name": "Gaurav-1213/Face-Eyes-detection-using-OpenCV", "src_encoding": "UTF-8", "text": "# Face-Eyes-detection-using-OpenCV\n# Pls note -- App2.py is final python file to run and test the code.\n\nIn this repo I have described how we can build face detection and eyes detection using OpenCV package without DeepLearning.\nHere u can also add multiple Harcascade filters like smile detection or FullBody detection etc. \nFor time being I have shown only two of them but u can enhance this for learning purpose. Happy Learning\n" } ]
2
banxia1994/GAN
https://github.com/banxia1994/GAN
f169b7b8952a747f8fb295b1c09ad4ab06a9dbb7
b42170d38edb62e7743fc739023acdc8edaa97f7
34a6bd62a9268f4d8381c0767288a21b996c72c0
refs/heads/master
2021-05-10T16:14:53.941686
2018-01-23T07:36:33
2018-01-23T07:36:33
118,574,426
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5599055886268616, "alphanum_fraction": 0.5970883369445801, "avg_line_length": 36.105838775634766, "blob_id": "34079625c2a99990887a36ec93acb7bd34b8da1e", "content_id": "c99948873e5263f565ffb5486b3722b748ee199c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10166, "license_type": "permissive", "max_line_length": 109, "num_lines": 274, "path": "/layer_test.py", "repo_name": "banxia1994/GAN", "src_encoding": "UTF-8", "text": "# import tensorflow as tf\n# import numpy as np\n# # import matplotlib.pyplot as plt\n#\n# from tensorflow.examples.tutorials.mnist import input_data\n#\n# tf.set_random_seed(777) # reproducibility\n#\n# mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n#\n# x = tf.placeholder(tf.float32,[None,784])\n# x_img = tf.reshape(x,[-1,28,28,1])\n# y = tf.placeholder(tf.float32,[None,10])\n# keep_prob = tf.placeholder(tf.float32)\n# conv1 = tf.layers.conv2d(inputs=x_img,filters=32,kernel_size=[3,3],padding='SAME',activation=tf.nn.relu)\n# pool1 = tf.layers.max_pooling2d(inputs=conv1,pool_size=[2,2],padding='SAME',strides=2)\n# dropout1 = tf.layers.dropout(inputs=pool1,rate=0.7,training=True)\n#\n# conv2 = tf.layers.conv2d(inputs=dropout1,filters=64,kernel_size=[3,3],padding='SAME',activation=tf.nn.relu)\n# pool2 = tf.layers.max_pooling2d(inputs=conv2,pool_size=[2,2],padding='SAME',strides=2)\n# dropout2 = tf.layers.dropout(inputs=pool2,rate=0.7,training=True)\n#\n# conv3 = tf.layers.conv2d(inputs=dropout1,filters=64,kernel_size=[3,3],padding='SAME',activation=tf.nn.relu)\n# pool3 = tf.layers.max_pooling2d(inputs=conv3,pool_size=[2,2],padding='SAME',strides=2)\n# dropout3 = tf.layers.dropout(inputs=pool3,rate=0.7,training=True)\n#\n# flat = tf.reshape(dropout3,[-1,64*7*7])\n# dense4 = tf.layers.dense(inputs=flat,units=625,activation=tf.nn.relu)\n# dropout4 = tf.layers.dropout(inputs=dense4,rate=0.5,training=True)\n#\n# logits = tf.layers.dense(inputs=dropout4,units=10)\n#\n# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits,labels=y))\n# opti = tf.train.AdadeltaOptimizer(learning_rate=0.001).minimize(cost)\n#\n#\n# corr_predict = tf.equal(tf.argmax(logits,1),tf.argmax(y,1))\n# accuracy = tf.reduce_mean(tf.cast(corr_predict,tf.float32))\n#\n#\n# sess = tf.Session()\n# sess.run(tf.global_variables_initializer())\n#\n# batch_size = 100\n# training_epoch = 15\n#\n# for epoch in range(training_epoch):\n# avg_cost = 0\n# total_batch = int(mnist.test.num_examples/batch_size)\n#\n# for i in range(total_batch):\n# xs,ys = mnist.train.next_batch(batch_size)\n# c,_ = sess.run([cost,opti],feed_dict={x:xs,y:ys,keep_prob:0.7})\n# avg_cost = c/total_batch\n#\n# print 'epoch:','%04d'%(epoch+1),'cost=','{:.9f}'.format(avg_cost)\n# print 'accuracy:',sess.run(accuracy,feed_dict={x:mnist.test.images,y:mnist.test.labels,keep_prob:1})\n\n\n\n\n\n################\n\n# import tensorflow as tf\n# # import matplotlib.pyplot as plt\n#\n# from tensorflow.examples.tutorials.mnist import input_data\n#\n# tf.set_random_seed(777) # reproducibility\n#\n# mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n# # Check out https://www.tensorflow.org/get_started/mnist/beginners for\n# # more information about the mnist dataset\n#\n# # hyper parameters\n# learning_rate = 0.001\n# training_epochs = 15\n# batch_size = 100\n#\n#\n# class Model:\n#\n# def __init__(self, sess, name):\n# self.sess = sess\n# self.name = name\n# self._build_net()\n#\n# def _build_net(self):\n# with tf.variable_scope(self.name):\n# # dropout (keep_prob) rate 0.7~0.5 on training, but should be 1\n# # for testing\n# self.training = tf.placeholder(tf.bool)\n#\n# # input place holders\n# self.X = tf.placeholder(tf.float32, [None, 784])\n#\n# # img 28x28x1 (black/white), Input Layer\n# X_img = tf.reshape(self.X, [-1, 28, 28, 1])\n# self.Y = tf.placeholder(tf.float32, [None, 10])\n#\n# # Convolutional Layer #1\n# conv1 = tf.layers.conv2d(inputs=X_img, filters=32, kernel_size=[3, 3],\n# padding=\"SAME\", activation=tf.nn.relu)\n# # Pooling Layer #1\n# pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2],\n# padding=\"SAME\", strides=2)\n# dropout1 = tf.layers.dropout(inputs=pool1,\n# rate=0.7, training=self.training)\n#\n# # Convolutional Layer #2 and Pooling Layer #2\n# conv2 = tf.layers.conv2d(inputs=dropout1, filters=64, kernel_size=[3, 3],\n# padding=\"SAME\", activation=tf.nn.relu)\n# pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2],\n# padding=\"SAME\", strides=2)\n# dropout2 = tf.layers.dropout(inputs=pool2,\n# rate=0.7, training=self.training)\n#\n# # Convolutional Layer #2 and Pooling Layer #2\n# conv3 = tf.layers.conv2d(inputs=dropout2, filters=128, kernel_size=[3, 3],\n# padding=\"same\", activation=tf.nn.relu)\n# pool3 = tf.layers.max_pooling2d(inputs=conv3, pool_size=[2, 2],\n# padding=\"same\", strides=2)\n# dropout3 = tf.layers.dropout(inputs=pool3,\n# rate=0.7, training=self.training)\n#\n# # Dense Layer with Relu\n# flat = tf.reshape(dropout3, [-1, 128 * 4 * 4])\n# dense4 = tf.layers.dense(inputs=flat,\n# units=625, activation=tf.nn.relu)\n# dropout4 = tf.layers.dropout(inputs=dense4,\n# rate=0.5, training=self.training)\n#\n# # Logits (no activation) Layer: L5 Final FC 625 inputs -> 10 outputs\n# self.logits = tf.layers.dense(inputs=dropout4, units=10)\n#\n# # define cost/loss & optimizer\n# self.cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(\n# logits=self.logits, labels=self.Y))\n# self.optimizer = tf.train.AdamOptimizer(\n# learning_rate=learning_rate).minimize(self.cost)\n#\n# correct_prediction = tf.equal(\n# tf.argmax(self.logits, 1), tf.argmax(self.Y, 1))\n# self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n#\n# def predict(self, x_test, training=False):\n# return self.sess.run(self.logits,\n# feed_dict={self.X: x_test, self.training: training})\n#\n# def get_accuracy(self, x_test, y_test, training=False):\n# return self.sess.run(self.accuracy,\n# feed_dict={self.X: x_test,\n# self.Y: y_test, self.training: training})\n#\n# def train(self, x_data, y_data, training=True):\n# return self.sess.run([self.cost, self.optimizer], feed_dict={\n# self.X: x_data, self.Y: y_data, self.training: training})\n#\n# # initialize\n# sess = tf.Session()\n# m1 = Model(sess, \"m1\")\n#\n# sess.run(tf.global_variables_initializer())\n#\n# print('Learning Started!')\n#\n# # train my model\n# for epoch in range(training_epochs):\n# avg_cost = 0\n# total_batch = int(mnist.train.num_examples / batch_size)\n#\n# for i in range(total_batch):\n# batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n# c, _ = m1.train(batch_xs, batch_ys)\n# avg_cost += c / total_batch\n#\n# print('Epoch:', '%04d' % (epoch + 1), 'cost =', '{:.9f}'.format(avg_cost))\n#\n# print('Learning Finished!')\n#\n# # Test model and check accuracy\n# print('Accuracy:', m1.get_accuracy(mnist.test.images, mnist.test.labels))\n#\n\nimport tensorflow as tf\nimport numpy as np\n\ntf.set_random_seed(777) # for reproducibility\nlearning_rate = 0.01\n\nx_data = [[0, 0],\n [0, 1],\n [1, 0],\n [1, 1]]\ny_data = [[0],\n [1],\n [1],\n [0]]\nx_data = np.array(x_data, dtype=np.float32)\ny_data = np.array(y_data, dtype=np.float32)\n\nX = tf.placeholder(tf.float32, [None, 2], name='x-input')\nY = tf.placeholder(tf.float32, [None, 1], name='y-input')\n\nwith tf.name_scope(\"layer1\") as scope:\n W1 = tf.Variable(tf.random_normal([2, 2]), name='weight1')\n b1 = tf.Variable(tf.random_normal([2]), name='bias1')\n layer1 = tf.sigmoid(tf.matmul(X, W1) + b1)\n\n w1_hist = tf.summary.histogram(\"weights1\", W1)\n b1_hist = tf.summary.histogram(\"biases1\", b1)\n layer1_hist = tf.summary.histogram(\"layer1\", layer1)\n\n\nwith tf.name_scope(\"layer2\") as scope:\n W2 = tf.Variable(tf.random_normal([2, 1]), name='weight2')\n b2 = tf.Variable(tf.random_normal([1]), name='bias2')\n hypothesis = tf.sigmoid(tf.matmul(layer1, W2) + b2)\n\n w2_hist = tf.summary.histogram(\"weights2\", W2)\n b2_hist = tf.summary.histogram(\"biases2\", b2)\n hypothesis_hist = tf.summary.histogram(\"hypothesis\", hypothesis)\n\n# cost/loss function\nwith tf.name_scope(\"cost\") as scope:\n cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) *\n tf.log(1 - hypothesis))\n cost_summ = tf.summary.scalar(\"cost\", cost)\n\nwith tf.name_scope(\"train\") as scope:\n train = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n\n# Accuracy computation\n# True if hypothesis>0.5 else False\npredicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)\naccuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))\naccuracy_summ = tf.summary.scalar(\"accuracy\", accuracy)\n\n# Launch graph\nwith tf.Session() as sess:\n # tensorboard --logdir=./logs/xor_logs\n merged_summary = tf.summary.merge_all()\n writer = tf.summary.FileWriter(\"./logs/xor_logs_r0_01\")\n writer.add_graph(sess.graph) # Show the graph\n\n # Initialize TensorFlow variables\n sess.run(tf.global_variables_initializer())\n\n for step in range(10001):\n summary, _ = sess.run([merged_summary, train], feed_dict={X: x_data, Y: y_data})\n writer.add_summary(summary, global_step=step)\n\n if step % 100 == 0:\n print(step, sess.run(cost, feed_dict={\n X: x_data, Y: y_data}), sess.run([W1, W2]))\n\n # Accuracy report\n h, c, a = sess.run([hypothesis, predicted, accuracy],\n feed_dict={X: x_data, Y: y_data})\n print(\"\\nHypothesis: \", h, \"\\nCorrect: \", c, \"\\nAccuracy: \", a)\n\n\n'''\nHypothesis: [[ 6.13103184e-05]\n [ 9.99936938e-01]\n [ 9.99950767e-01]\n [ 5.97514772e-05]]\nCorrect: [[ 0.]\n [ 1.]\n [ 1.]\n [ 0.]]\nAccuracy: 1.0\n'''" }, { "alpha_fraction": 0.6854304671287537, "alphanum_fraction": 0.7052980065345764, "avg_line_length": 20.64285659790039, "blob_id": "8531098e6e43dafabecba7d95862e9719c5261f8", "content_id": "a4d0965717b1e92f189eac04f7187930b5e1ae2c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 302, "license_type": "permissive", "max_line_length": 52, "num_lines": 14, "path": "/WGANlayers.py", "repo_name": "banxia1994/GAN", "src_encoding": "UTF-8", "text": "import tensorflow as tf\nimport tensorflow.contrib.layers as tcl\n\n\ndef leaky_relu(x, alpha=0.2):\n return tf.maximum(tf.minimum(0.0, alpha * x), x)\n\n\ndef leaky_relu_batch_norm(x, alpha=0.2):\n return leaky_relu(tcl.batch_norm(x), alpha)\n\n\ndef relu_batch_norm(x):\nreturn tf.nn.relu(tcl.batch_norm(x))" }, { "alpha_fraction": 0.5584169030189514, "alphanum_fraction": 0.5777310729026794, "avg_line_length": 43.854103088378906, "blob_id": "eb80f4dd36f984805b1d0759fa3148df3036e9ef", "content_id": "b3c8c3da88e6811c620e94c590428e3d6221ae30", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14756, "license_type": "permissive", "max_line_length": 163, "num_lines": 329, "path": "/WGAN.py", "repo_name": "banxia1994/GAN", "src_encoding": "UTF-8", "text": "# from __future__ import print_function\n# import os\n# os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\" # see issue #152\n# os.environ[\"CUDA_VISIBLE_DEVICES\"]=\"0\"\n#\n# from six.moves import xrange\n# import tensorflow.contrib.slim as slim\n# import os\n# import tensorflow as tf\n# import numpy as np\n# import tensorflow.contrib.layers as ly\n# #from load_svhn import load_svhn\n# from tensorflow.examples.tutorials.mnist import input_data\n# from functools import partial\n#\n# def lrelu(x, leak=0.3, name=\"lrelu\"):\n# with tf.variable_scope(name):\n# f1 = 0.5 * (1 + leak)\n# f2 = 0.5 * (1 - leak)\n# return f1 * x + f2 * abs(x)\n#\n# batch_size = 64\n# z_dim = 128\n# learning_rate_ger = 5e-5\n# learning_rate_dis = 5e-5\n# device = '/gpu:1'\n# # img size\n# s = 32\n# # update Citers times of critic in one iter(unless i < 25 or i % 500 == 0, i is iterstep)\n# Citers = 5\n# # the upper bound and lower bound of parameters in critic\n# clamp_lower = -0.01\n# clamp_upper = 0.01\n# # whether to use mlp or dcgan stucture\n# is_mlp = False\n# # whether to use adam for parameter update, if the flag is set False, use tf.train.RMSPropOptimizer\n# # as recommended in paper\n# is_adam = False\n# # whether to use SVHN or MNIST, set false and MNIST is used\n# is_svhn = False\n# channel = 3 if is_svhn is True else 1\n# # 'gp' for gp WGAN and 'regular' for vanilla\n# mode = 'gp'\n# # if 'gp' is chosen the corresponding lambda must be filled\n# lam = 10.\n# s2, s4, s8, s16 =\\\n# int(s / 2), int(s / 4), int(s / 8), int(s / 16)\n# # hidden layer size if mlp is chosen, ignore if otherwise\n# ngf = 64\n# ndf = 64\n# # directory to store log, including loss and grad_norm of generator and critic\n# log_dir = './log_wgan'\n# ckpt_dir = './ckpt_wgan'\n# if not os.path.exists(ckpt_dir):\n# os.makedirs(ckpt_dir)\n# # max iter step, note the one step indicates that a Citers updates of critic and one update of generator\n# max_iter_step = 20000\n#\n#\n# def generator_conv(z):\n# train = ly.fully_connected(\n# z, 4 * 4 * 512, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# train = tf.reshape(train, (-1, 4, 4, 512))\n# train = ly.conv2d_transpose(train, 256, 3, stride=2,\n# activation_fn=tf.nn.relu, normalizer_fn=ly.batch_norm, padding='SAME', weights_initializer=tf.random_normal_initializer(0, 0.02))\n# train = ly.conv2d_transpose(train, 128, 3, stride=2,\n# activation_fn=tf.nn.relu, normalizer_fn=ly.batch_norm, padding='SAME', weights_initializer=tf.random_normal_initializer(0, 0.02))\n# train = ly.conv2d_transpose(train, 64, 3, stride=2,\n# activation_fn=tf.nn.relu, normalizer_fn=ly.batch_norm, padding='SAME', weights_initializer=tf.random_normal_initializer(0, 0.02))\n# train = ly.conv2d_transpose(train, channel, 3, stride=1,\n# activation_fn=tf.nn.tanh, padding='SAME', weights_initializer=tf.random_normal_initializer(0, 0.02))\n# print(train.name)\n# return train\n#\n# def generator_mlp(z):\n# train = ly.fully_connected(\n# z, 4 * 4 * 512, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# train = ly.fully_connected(\n# train, ngf, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# train = ly.fully_connected(\n# train, ngf, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# train = ly.fully_connected(\n# train, s*s*channel, activation_fn=tf.nn.tanh, normalizer_fn=ly.batch_norm)\n# train = tf.reshape(train, tf.stack([batch_size, s, s, channel]))\n# return train\n#\n# def critic_conv(img, reuse=False):\n# with tf.variable_scope('critic') as scope:\n# if reuse:\n# scope.reuse_variables()\n# size = 64\n# img = ly.conv2d(img, num_outputs=size, kernel_size=3,\n# stride=2, activation_fn=lrelu)\n# img = ly.conv2d(img, num_outputs=size * 2, kernel_size=3,\n# stride=2, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# img = ly.conv2d(img, num_outputs=size * 4, kernel_size=3,\n# stride=2, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# img = ly.conv2d(img, num_outputs=size * 8, kernel_size=3,\n# stride=2, activation_fn=lrelu, normalizer_fn=ly.batch_norm)\n# logit = ly.fully_connected(tf.reshape(\n# img, [batch_size, -1]), 1, activation_fn=None)\n# return logit\n#\n# def critic_mlp(img, reuse=False):\n# with tf.variable_scope('critic') as scope:\n# if reuse:\n# scope.reuse_variables()\n# size = 64\n# img = ly.fully_connected(tf.reshape(\n# img, [batch_size, -1]), ngf, activation_fn=tf.nn.relu)\n# img = ly.fully_connected(img, ngf,\n# activation_fn=tf.nn.relu)\n# img = ly.fully_connected(img, ngf,\n# activation_fn=tf.nn.relu)\n# logit = ly.fully_connected(img, 1, activation_fn=None)\n# return logit\n#\n# def build_graph():\n# # z = tf.placeholder(tf.float32, shape=(batch_size, z_dim))\n# noise_dist = tf.contrib.distributions.Normal(0., 1.)\n# z = noise_dist.sample((batch_size, z_dim))\n# generator = generator_mlp if is_mlp else generator_conv\n# critic = critic_mlp if is_mlp else critic_conv\n# with tf.variable_scope('generator'):\n# train = generator(z)\n# real_data = tf.placeholder(\n# dtype=tf.float32, shape=(batch_size, 32, 32, channel))\n# true_logit = critic(real_data)\n# fake_logit = critic(train, reuse=True)\n# c_loss = tf.reduce_mean(fake_logit - true_logit)\n# if mode is 'gp':\n# alpha_dist = tf.contrib.distributions.Uniform(low=0., high=1.)\n# alpha = alpha_dist.sample((batch_size, 1, 1, 1))\n# interpolated = real_data + alpha*(train-real_data)\n# inte_logit = critic(interpolated, reuse=True)\n# gradients = tf.gradients(inte_logit, [interpolated,])[0]\n# grad_l2 = tf.sqrt(tf.reduce_sum(tf.square(gradients), axis=[1,2,3]))\n# gradient_penalty = tf.reduce_mean((grad_l2-1)**2)\n# gp_loss_sum = tf.summary.scalar(\"gp_loss\", gradient_penalty)\n# grad = tf.summary.scalar(\"grad_norm\", tf.nn.l2_loss(gradients))\n# c_loss += lam*gradient_penalty\n# g_loss = tf.reduce_mean(-fake_logit)\n# g_loss_sum = tf.summary.scalar(\"g_loss\", g_loss)\n# c_loss_sum = tf.summary.scalar(\"c_loss\", c_loss)\n# img_sum = tf.summary.image(\"img\", train, max_outputs=10)\n# theta_g = tf.get_collection(\n# tf.GraphKeys.TRAINABLE_VARIABLES, scope='generator')\n# theta_c = tf.get_collection(\n# tf.GraphKeys.TRAINABLE_VARIABLES, scope='critic')\n# counter_g = tf.Variable(trainable=False, initial_value=0, dtype=tf.int32)\n# opt_g = ly.optimize_loss(loss=g_loss, learning_rate=learning_rate_ger,\n# optimizer=partial(tf.train.AdamOptimizer, beta1=0.5, beta2=0.9) if is_adam is True else tf.train.RMSPropOptimizer,\n# variables=theta_g, global_step=counter_g,\n# summaries = ['gradient_norm'])\n# counter_c = tf.Variable(trainable=False, initial_value=0, dtype=tf.int32)\n# opt_c = ly.optimize_loss(loss=c_loss, learning_rate=learning_rate_dis,\n# optimizer=partial(tf.train.AdamOptimizer, beta1=0.5, beta2=0.9) if is_adam is True else tf.train.RMSPropOptimizer,\n# variables=theta_c, global_step=counter_c,\n# summaries = ['gradient_norm'])\n# if mode is 'regular':\n# clipped_var_c = [tf.assign(var, tf.clip_by_value(var, clamp_lower, clamp_upper)) for var in theta_c]\n# # merge the clip operations on critic variables\n# with tf.control_dependencies([opt_c]):\n# opt_c = tf.tuple(clipped_var_c)\n# if not mode in ['gp', 'regular']:\n# raise(NotImplementedError('Only two modes'))\n# return opt_g, opt_c, real_data\n#\n# def main():\n# if is_svhn is True:\n# dataset = load_svhn()\n# else:\n# dataset = input_data.read_data_sets('MNIST_data', one_hot=True)\n# with tf.device(device):\n# opt_g, opt_c, real_data = build_graph()\n# merged_all = tf.summary.merge_all()\n# saver = tf.train.Saver()\n# config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=True)\n# config.gpu_options.allow_growth = True\n# config.gpu_options.per_process_gpu_memory_fraction = 0.8\n# def next_feed_dict():\n# train_img = dataset.train.next_batch(batch_size)[0]\n# train_img = 2*train_img-1\n# if is_svhn is not True:\n# train_img = np.reshape(train_img, (-1, 28, 28))\n# npad = ((0, 0), (2, 2), (2, 2))\n# train_img = np.pad(train_img, pad_width=npad,\n# mode='constant', constant_values=-1)\n# train_img = np.expand_dims(train_img, -1)\n# feed_dict = {real_data: train_img}\n# return feed_dict\n# with tf.Session(config=config) as sess:\n# sess.run(tf.global_variables_initializer())\n# summary_writer = tf.summary.FileWriter(log_dir, sess.graph)\n# for i in range(max_iter_step):\n# if i < 25 or i % 500 == 0:\n# citers = 100\n# else:\n# citers = Citers\n# for j in range(citers):\n# feed_dict = next_feed_dict()\n# if i % 100 == 99 and j == 0:\n# run_options = tf.RunOptions(\n# trace_level=tf.RunOptions.FULL_TRACE)\n# run_metadata = tf.RunMetadata()\n# _, merged = sess.run([opt_c, merged_all], feed_dict=feed_dict,\n# options=run_options, run_metadata=run_metadata)\n# summary_writer.add_summary(merged, i)\n# summary_writer.add_run_metadata(\n# run_metadata, 'critic_metadata {}'.format(i), i)\n# else:\n# sess.run(opt_c, feed_dict=feed_dict)\n# feed_dict = next_feed_dict()\n# if i % 100 == 99:\n# _, merged = sess.run([opt_g, merged_all], feed_dict=feed_dict,\n# options=run_options, run_metadata=run_metadata)\n# summary_writer.add_summary(merged, i)\n# summary_writer.add_run_metadata(\n# run_metadata, 'generator_metadata {}'.format(i), i)\n# else:\n# sess.run(opt_g, feed_dict=feed_dict)\n# if i % 1000 == 999:\n# saver.save(sess, os.path.join(\n# ckpt_dir, \"model.ckpt\"), global_step=i)\n#\n# main()\n\nimport os\nimport time\nimport argparse\nimport importlib\nimport tensorflow as tf\nimport tensorflow.contrib as tc\n\nfrom WGANvisualize import *\n\n\nclass WassersteinGAN(object):\n def __init__(self, g_net, d_net, x_sampler, z_sampler, data, model):\n self.model = model\n self.data = data\n self.g_net = g_net\n self.d_net = d_net\n self.x_sampler = x_sampler\n self.z_sampler = z_sampler\n self.x_dim = self.d_net.x_dim\n self.z_dim = self.g_net.z_dim\n self.x = tf.placeholder(tf.float32, [None, self.x_dim], name='x')\n self.z = tf.placeholder(tf.float32, [None, self.z_dim], name='z')\n\n self.x_ = self.g_net(self.z)\n\n self.d = self.d_net(self.x, reuse=False)\n self.d_ = self.d_net(self.x_)\n\n self.g_loss = tf.reduce_mean(self.d_)\n self.d_loss = tf.reduce_mean(self.d) - tf.reduce_mean(self.d_)\n\n self.reg = tc.layers.apply_regularization(\n tc.layers.l1_regularizer(2.5e-5),\n weights_list=[var for var in tf.global_variables() if 'weights' in var.name]\n )\n self.g_loss_reg = self.g_loss + self.reg\n self.d_loss_reg = self.d_loss + self.reg\n with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):\n self.d_rmsprop = tf.train.RMSPropOptimizer(learning_rate=5e-5)\\\n .minimize(self.d_loss_reg, var_list=self.d_net.vars)\n self.g_rmsprop = tf.train.RMSPropOptimizer(learning_rate=5e-5)\\\n .minimize(self.g_loss_reg, var_list=self.g_net.vars)\n\n self.d_clip = [v.assign(tf.clip_by_value(v, -0.01, 0.01)) for v in self.d_net.vars]\n gpu_options = tf.GPUOptions(allow_growth=True)\n self.sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))\n\n def train(self, batch_size=64, num_batches=1000000):\n plt.ion()\n self.sess.run(tf.global_variables_initializer())\n start_time = time.time()\n for t in range(0, num_batches):\n d_iters = 5\n if t % 500 == 0 or t < 25:\n d_iters = 100\n\n for _ in range(0, d_iters):\n bx = self.x_sampler(batch_size)\n bz = self.z_sampler(batch_size, self.z_dim)\n self.sess.run(self.d_clip)\n self.sess.run(self.d_rmsprop, feed_dict={self.x: bx, self.z: bz})\n\n bz = self.z_sampler(batch_size, self.z_dim)\n self.sess.run(self.g_rmsprop, feed_dict={self.z: bz, self.x: bx})\n\n if t % 100 == 0:\n bx = self.x_sampler(batch_size)\n bz = self.z_sampler(batch_size, self.z_dim)\n\n d_loss = self.sess.run(\n self.d_loss, feed_dict={self.x: bx, self.z: bz}\n )\n g_loss = self.sess.run(\n self.g_loss, feed_dict={self.z: bz, self.x: bx}\n )\n print('Iter [%8d] Time [%5.4f] d_loss [%.4f] g_loss [%.4f]' %\n (t, time.time() - start_time, d_loss - g_loss, g_loss))\n\n if t % 100 == 0:\n bz = self.z_sampler(batch_size, self.z_dim)\n bx = self.sess.run(self.x_, feed_dict={self.z: bz})\n bx = xs.data2img(bx)\n fig = plt.figure(self.data + '.' + self.model)\n grid_show(fig, bx, xs.shape)\n fig.savefig('logs/{}/{}.pdf'.format(self.data, t/100))\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser('')\n parser.add_argument('--data', type=str, default='mnist')\n parser.add_argument('--model', type=str, default='mlp')\n parser.add_argument('--gpus', type=str, default='0')\n args = parser.parse_args()\n os.environ['CUDA_VISIBLE_DEVICES'] = args.gpus\n data = importlib.import_module(args.data)\n model = importlib.import_module(args.data + '.' + args.model)\n xs = data.DataSampler()\n zs = data.NoiseSampler()\n d_net = model.Discriminator()\n g_net = model.Generator()\n wgan = WassersteinGAN(g_net, d_net, xs, zs, args.data, args.model)\n wgan.train()" }, { "alpha_fraction": 0.6247654557228088, "alphanum_fraction": 0.6472795605659485, "avg_line_length": 27.105262756347656, "blob_id": "c578908be796900a97b72d220d05ce413a28a457", "content_id": "7719bb651d240fd7034e2f5c4c8daecb6e390aa0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 533, "license_type": "permissive", "max_line_length": 64, "num_lines": 19, "path": "/mnist/__init__.py", "repo_name": "banxia1994/GAN", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom tensorflow.examples.tutorials.mnist import input_data\nmnist = input_data.read_data_sets('MNIST_data/')\n\n\nclass DataSampler(object):\n def __init__(self):\n self.shape = [28, 28, 1]\n\n def __call__(self, batch_size):\n return mnist.train.next_batch(batch_size)[0]\n\n def data2img(self, data):\n return np.reshape(data, [data.shape[0]] + self.shape)\n\n\nclass NoiseSampler(object):\n def __call__(self, batch_size, z_dim):\n return np.random.uniform(-1.0, 1.0, [batch_size, z_dim])" } ]
4
kaozgaia/teoria_matematica
https://github.com/kaozgaia/teoria_matematica
296591e798871e8ca5033cba5d626d04cae558c5
bb3389c25990c9000c9766fb05343de9d0c7c031
dbe4d07d810455938d097aa069d5abf8277f9f35
refs/heads/master
2019-07-29T07:08:26.106022
2014-03-22T20:52:34
2014-03-22T20:52:34
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7008547186851501, "alphanum_fraction": 0.7008547186851501, "avg_line_length": 28.25, "blob_id": "d5c32408cc3293101851df07a26ddc7f60e011c4", "content_id": "06878a2c0fa6c20335e8a36eedadbbed27600bbf", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 118, "license_type": "permissive", "max_line_length": 79, "num_lines": 4, "path": "/README.md", "repo_name": "kaozgaia/teoria_matematica", "src_encoding": "UTF-8", "text": "teoria_matematica\n=================\n\nEn este repo se maneja el proyecto final de Teoria Matematica de la Computación\n" }, { "alpha_fraction": 0.7066102027893066, "alphanum_fraction": 0.7245197296142578, "avg_line_length": 25.033897399902344, "blob_id": "1b222260760386465612592ad55fe505e451e4ad", "content_id": "cd7c3fdaed852d5e73fdec1114b82e4d3d665e59", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3071, "license_type": "permissive", "max_line_length": 121, "num_lines": 118, "path": "/Main.py", "repo_name": "kaozgaia/teoria_matematica", "src_encoding": "UTF-8", "text": "import funcionesExternas\nimport ImprimeCinta\nimport sys\nimport time\nfrom subprocess import call\n\n\n# La cinta se introduce como parametro de la siguiente manera ----->>> |q0,1,A,D,q1|q1,0,A,D,q0|q1,0,A,D,H|1010101010|\n\n\n#print \"Este es el nombre del programa: \", sys.argv[0]\n#print \"Numero de argumentos: \", len(sys.argv)\n#print \"Los argumentos son: \" , str(sys.argv[1])\n\ncall([\"clear\"])\nestadoActual = 'q0'\npivote = sys.argv[2]\narregloDeEstados = []\ndiccionarioDeInstrucciones = {}\n\n\nprint \"El estado de comienzo es: \", estadoActual\n\nprint \"La pocision inicial en la cinta es: \", pivote\ntime.sleep(0.5)\n\n# Convertimos el argumento de entrada en un objeto cadena \ncadenaDeEntrada = str(sys.argv[1])\n\nmaquinaDeEntrada = cadenaDeEntrada.split('|')\n\n#print \"Las intrucciones de la maquina son:\", maquinaDeEntrada\n#print len(maquinaDeEntrada)\n\n# Cortamos del arreglo de la maquina la primera y ultima posicion\ndel maquinaDeEntrada[0]\ndel maquinaDeEntrada[len(maquinaDeEntrada)-1]\n\n# Aqui inicializamos la cinta de la maquina, la cual tiene longitud finita y una pocicion inicial\ncinta = maquinaDeEntrada[len(maquinaDeEntrada)-1]\n\n\n\nfor i in range(len(maquinaDeEntrada)-1):\n\tarregloDeEstados.append(maquinaDeEntrada[i])\n\nprint \"Estas son las instrucciones de la maquina que recibimos: \", arregloDeEstados\ntime.sleep(1)\n\ninstrucciones = []\n\nfor i in arregloDeEstados:\n\t#print i\n\tarregloDeInstrucciones = []\n\testadoIndice = i.split(',')[0]\n\tmiInstruccion = i.split(',')\n\t#print type(miInstruccion)\n\tdel miInstruccion[0]\n\n\n\tif diccionarioDeInstrucciones.has_key(estadoIndice) == False:\n\t\tdiccionarioDeInstrucciones[estadoIndice] = []\n\t\tdiccionarioDeInstrucciones[estadoIndice].append(miInstruccion)\n\telif diccionarioDeInstrucciones.has_key(estadoIndice) == True:\n\t\tdiccionarioDeInstrucciones[estadoIndice].append(miInstruccion)\n\nprint \"Se procesa a partir de cada estado actual como sigue: \", diccionarioDeInstrucciones\ntime.sleep(1)\n\n\nwhile 1:\n\tprint \" \"\n\tcintaFake = [\" \"]*len(cinta)\n\ttime.sleep(1)\n\t_instruct = diccionarioDeInstrucciones[estadoActual]\n\n\t#print \"Caracter a analizar:\", cinta[int(pivote)]\n\tprint _instruct\n\n\tfor i in _instruct:\n\t\tif i[0] == cinta[int(pivote)]:\n\t\t\tif i[2] == 'N' or i[2] == ' ':\n\t\t\t\tprint \"La Maquina termino\"\n\t\t\t\tsys.exit(0)\n\t\t\tcaracter = i[1]\n\t\t\tdireccion = i[2]\n\t\t\testadoActual = i[3]\n\t\t\tbreak\n\t\t\n\tnuevosDatos = ImprimeCinta.ImprimeCinta(cinta, int(pivote), caracter , direccion, cintaFake)\n\tpivote = str(nuevosDatos[0])\n\tcinta = nuevosDatos[1]\n\tprint \"Estado actual: \", estadoActual \n\t#print estadoActual , pivote , cinta[int(pivote)]\n\n\n\t#if diccionarioDeInstrucciones.has_key(estadoIndice):\n\n\t#diccionarioDeInstrucciones[estadoIndice]\n\t# estadoInicial = myString[0]\n\t# datoEntrada = myString[1]\n\t# datoEscritura = myString[2]\n\t# movimiento = myString[3]\n\t# estadoFinal = myString[4]\n\t# tupla = (estadoInicial, datoEntrada, datoEscritura, movimiento, estadoFinal)\n\t# instrucciones.append(tupla)\n\n\n\n#print instrucciones\n \n\n # for x in cadenaDeEntrada:\n# \t#tupla = ()\n# \tif x == '|':\n\n# ['1','0','1','1','1','1']\n# [' ','I',' ',' ',' ',' ']" }, { "alpha_fraction": 0.35366860032081604, "alphanum_fraction": 0.36026379466056824, "avg_line_length": 31.66666603088379, "blob_id": "e6e72b6bdf4e903a390bc04cf01f6fa735d39ef9", "content_id": "cd6a502401e5eaa26914210a0fd2bfa0583b05b9", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2426, "license_type": "permissive", "max_line_length": 65, "num_lines": 72, "path": "/ImprimeCinta.py", "repo_name": "kaozgaia/teoria_matematica", "src_encoding": "UTF-8", "text": "def ImprimeCinta (cinta,pos,caracter,direccion, cintaFake):\r\n cinta = list(cinta)\r\n\r\n p = 0\r\n if((cinta[pos]==caracter)and(direccion=='I')):\r\n aux=cinta\r\n print \"Cinta original\"\r\n # print aux \r\n # print \"Cinta modificada\"\r\n print cinta\r\n p=pos-1\r\n\r\n cintaFake[p] = \"T\"\r\n print cintaFake\r\n print \"Cabecera\",p\r\n \r\n elif ((cinta[pos]==caracter)and(direccion=='D')):\r\n aux=cinta\r\n print \"Cinta original\"\r\n print cinta\r\n print \"Cinta modificada\"\r\n print aux\r\n p=pos+1\r\n\r\n cintaFake[p] = \"T\"\r\n print cintaFake\r\n print \"Cabecera\",p\r\n \r\n elif ((cinta[pos]!=caracter)and(direccion=='I')):\r\n aux=cinta\r\n print \"Cinta original\"\r\n print cinta\r\n for x in range (len(aux)):\r\n if x==pos:\r\n aux[x]=caracter\r\n print \"Cinta modificada\"\r\n print aux\r\n p=pos-1\r\n\r\n cintaFake[p] = \"T\"\r\n print cintaFake\r\n print \"Cabecera\",p\r\n elif ((cinta[pos]!=caracter)and(direccion=='D')):\r\n aux=cinta\r\n print \"Cinta original\"\r\n print cinta\r\n for x in range (len(aux)):\r\n if x==pos:\r\n aux[x]=caracter\r\n print \"Cinta modificada\"\r\n print aux\r\n p=pos+1\r\n\r\n cintaFake[p] = \"T\"\r\n print cintaFake\r\n print \"Cabecera\",p\r\n return [p, aux]\r\n\r\n# cadena=[]\r\n# cadena.append(1)\r\n# cadena.append(0)\r\n# cadena.append(0)\r\n# cadena.append(1)\r\n# cadena.append(0)\r\n# cadena.append(1)\r\n# cadena.append(0)\r\n# cadena.append(1)\r\n# cadena.append(1)\r\n# print \"Entrada\"\r\n# print cadena\r\n\r\n# ImprimeCinta(cadena,0,1,'I')\r\n\r\n" }, { "alpha_fraction": 0.6914893388748169, "alphanum_fraction": 0.6914893388748169, "avg_line_length": 16.090909957885742, "blob_id": "feac85f5eef4fa9cdd2f378b3dcf03b9abf367dc", "content_id": "12d6eb2dfef008aee1b4e502f7e65f77a2ad642f", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 188, "license_type": "permissive", "max_line_length": 25, "num_lines": 11, "path": "/funcionesExternas.py", "repo_name": "kaozgaia/teoria_matematica", "src_encoding": "UTF-8", "text": "def imprimeCinta():\n\tprint \"Hello World\"\n\ndef nombreFuncion(a,b,c):\n\tarreglo = []\n\tarreglo.append(a)\n\tarreglo.append(b)\n\tarreglo.append(c)\n\t# for x in arreglo:\n\t# \tprint x\n\treturn arreglo\n" } ]
4
StombieIT/flask-app
https://github.com/StombieIT/flask-app
9a661d22bf5e5168b5d4c64078d0752308210cbb
af64c6bf95cbdbdc48fe0c21bb45fc5229210f4c
a8bab89f039c782b90580c9c902ff01a6f06f545
refs/heads/master
2022-12-09T21:10:08.786512
2020-09-05T16:04:40
2020-09-05T16:04:40
293,107,856
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6769663095474243, "alphanum_fraction": 0.7106741666793823, "avg_line_length": 18.941177368164062, "blob_id": "81cf632bbe39612bb5f4bb8642910bbd823720c3", "content_id": "7222e789af10ad7b10d6af21886e17b5f4c8e474", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 356, "license_type": "no_license", "max_line_length": 57, "num_lines": 17, "path": "/appvue/views.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask import render_template\r\nfrom app import app\r\n\r\n\r\[email protected]('/')\r\ndef index():\r\n\treturn render_template('index/index.html')\r\n\r\n\r\[email protected](404)\r\ndef page_not_found(error):\r\n\treturn render_template('index/page_not_found.html'), 404\r\n\r\n\r\[email protected](403)\r\ndef forbidden(error):\r\n\treturn render_template('index/forbidden.html'), 403\r\n" }, { "alpha_fraction": 0.6419832706451416, "alphanum_fraction": 0.6445589065551758, "avg_line_length": 31.042552947998047, "blob_id": "beb614f409341765fedae90acd70c9c6d81c40eb", "content_id": "9e0a6facf714de59267724127409ebdbd11a03a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1553, "license_type": "no_license", "max_line_length": 118, "num_lines": 47, "path": "/appvue/blueprints/api/api.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask import Blueprint, jsonify, request, url_for\r\nfrom flask_login import current_user\r\nfrom blueprints.auth.models import User\r\nfrom blueprints.post.models import Post\r\n\r\napi = Blueprint('api', __name__)\r\n\r\n\r\[email protected]('/posts/<int:page>/', methods=['GET'])\r\ndef posts(page):\r\n\tpage_ = Post.query.order_by(Post.publication_datetime.desc()).paginate(page=page, per_page=3)\r\n\tpage_json = {\r\n\t\t'page': page,\r\n\t\t'prev_url': url_for('api.posts', page=page_.prev_num) if page_.has_prev else False,\r\n\t\t'next_url': url_for('api.posts', page=page_.next_num) if page_.has_next else False,\r\n\t\t'current_user_create_post_url_if_authenticated': url_for('post.create') if current_user.is_authenticated else False,\r\n\t\t'posts': [\r\n\t\t\t{\r\n\t\t\t\t'title': post.title,\r\n\t\t\t\t'edit': post.edit,\r\n\t\t\t\t'user_login': post.user.login,\r\n\t\t\t\t'view_url': url_for('post.view', id=post.id),\r\n\t\t\t\t'edit_url': url_for('post.edit', id=post.id),\r\n\t\t\t\t'delete_url': url_for('post.delete', id=post.id),\r\n\t\t\t\t'current_user_is_allowed_to_change': current_user.is_authenticated and current_user.id == post.user.id\r\n\t\t\t}\r\n\t\t\tfor post in page_.items\r\n\t\t]\r\n\t}\r\n\treturn jsonify(page_json)\r\n\r\n\r\[email protected]('/post/<int:id>/')\r\ndef post(id):\r\n\tpost = Post.query.get_or_404(id)\r\n\tpost_json = {\r\n\t\t'id': post.id,\r\n\t\t'title': post.title,\r\n\t\t'content': post.content,\r\n\t\t'edit': post.edit,\r\n\t\t'publication': {\r\n\t\t\t'date': post.publication_datetime.strftime('%d.%m.%Y'),\r\n\t\t\t'time': post.publication_datetime.strftime('%H:%M:%S')\r\n\t\t},\r\n\t\t'user_login': post.user.login\r\n\t}\r\n\treturn jsonify(post_json)\r\n" }, { "alpha_fraction": 0.7266666889190674, "alphanum_fraction": 0.7316666841506958, "avg_line_length": 28, "blob_id": "53d49655b3b94ac45d35bccc761bf23311cadf6b", "content_id": "36b3a82913e3a63e3f4277983d54cebf31bb7990", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 659, "license_type": "no_license", "max_line_length": 105, "num_lines": 20, "path": "/appvue/blueprints/post/admin.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask import (\r\n\tabort,\r\n\tflash,\r\n\tredirect,\r\n\turl_for\r\n)\r\nfrom flask_admin.contrib.sqla import ModelView\r\nfrom flask_login import current_user\r\n\r\n\r\nclass PostModelView(ModelView):\r\n\tdef is_accessible(self):\r\n\t\treturn current_user.is_authenticated and current_user.is_allowed(current_user.role.Permission.MODERATE)\r\n\r\n\tdef inaccessible_callback(self, name, **kwargs):\r\n\t\tif current_user.is_authenticated:\r\n\t\t\tabort(403)\r\n\t\telse:\r\n\t\t\tflash(u'Пожалуйста, войдите в аккаунт, чтобы получить доступ к данной странице', 'info')\r\n\t\t\treturn redirect(url_for('auth.login', next=url_for('admin.index')))\r\n" }, { "alpha_fraction": 0.7118958830833435, "alphanum_fraction": 0.7230483293533325, "avg_line_length": 24.899999618530273, "blob_id": "0773da85e8580ff172b0383a0d307711ed0bc842", "content_id": "45a251fca965b1eebac77b0d24ce456da4e7a86f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 645, "license_type": "no_license", "max_line_length": 90, "num_lines": 20, "path": "/appvue/blueprints/post/forms.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask_wtf import FlaskForm\r\nfrom wtforms import StringField, TextAreaField, SubmitField\r\nfrom wtforms.validators import DataRequired, Length\r\n\r\n\r\nclass PostForm(FlaskForm):\r\n\ttitle = StringField(\r\n\t\tu'Заголовок',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Обязательное поле'),\r\n\t\t\tLength(1, 64, message=u'Длина заголовка должна быть не менее 1 и не более 64 символов')\r\n\t\t]\r\n\t)\r\n\tcontent = TextAreaField(\r\n\t\tu'Содержание',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Обязательное поле')\r\n\t\t]\r\n\t)\r\n\tsubmit = SubmitField(u'Отправить')\r\n" }, { "alpha_fraction": 0.7613019943237305, "alphanum_fraction": 0.7649186253547668, "avg_line_length": 40.53845977783203, "blob_id": "3d79a435460cf812d7eede3e31f949f3a41b8adc", "content_id": "3ead43061e43f729089c251bc24c6c48d044232f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1182, "license_type": "no_license", "max_line_length": 105, "num_lines": 26, "path": "/appvue/services/admin.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask import abort, flash, redirect, url_for\r\nfrom flask_admin import Admin, AdminIndexView\r\nfrom flask_login import current_user\r\nfrom .db import db\r\nfrom blueprints.auth.models import User, Role\r\nfrom blueprints.auth.admin import UserModelView, RoleModelView\r\nfrom blueprints.post.models import Post\r\nfrom blueprints.post.admin import PostModelView\r\n\r\n\r\nclass IndexView(AdminIndexView):\r\n\tdef is_accessible(self):\r\n\t\treturn current_user.is_authenticated and current_user.is_allowed(current_user.role.Permission.MODERATE)\r\n\r\n\tdef inaccessible_callback(self, name, **kwargs):\r\n\t\tif current_user.is_authenticated:\r\n\t\t\tabort(403)\r\n\t\telse:\r\n\t\t\tflash(u'Пожалуйста, войдите в аккаунт, чтобы получить доступ к данной странице', 'info')\r\n\t\t\treturn redirect(url_for('auth.login', next=url_for('admin.index')))\r\n\r\n\r\nadmin = Admin(index_view=IndexView(), name='Администрирование', template_mode='bootstrap3')\r\nadmin.add_view(UserModelView(User, db.session, endpoint='users'))\r\nadmin.add_view(RoleModelView(Role, db.session, endpoint='roles'))\r\nadmin.add_view(PostModelView(Post, db.session, endpoint='posts'))\r\n" }, { "alpha_fraction": 0.677258312702179, "alphanum_fraction": 0.6954153180122375, "avg_line_length": 32.96825408935547, "blob_id": "b8bad5280c7466ae757aab9aa1e4280429e4f46d", "content_id": "e823f3d3d6242dc9681eba3b71ca09ad2dd1ea26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2882, "license_type": "no_license", "max_line_length": 99, "num_lines": 63, "path": "/appvue/blueprints/auth/forms.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask_wtf import FlaskForm\r\nfrom wtforms import StringField, SubmitField, PasswordField\r\nfrom wtforms.validators import DataRequired, Length, EqualTo, Email, Regexp\r\n\r\nLOGIN_RE = r'^[a-z0-9]+$'\r\nPASSWORD_RE = r'^[a-zA-Z0-9`\\-=;\"\\\\,.\\/~!@#\\$%\\^&\\*\\(\\)_\\+:\\|<>\\?]+$'\r\n\r\n\r\nclass UserLoginForm(FlaskForm):\r\n\tlogin = StringField(\r\n\t\tu'Логин',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Логин обязателен к заполнению'),\r\n\t\t\tLength(1, 64, message=u'Длина логина должна быть не менее 1 и не более 64 символов'),\r\n\t\t\tRegexp(LOGIN_RE, message=u'Логин указан некорректно')\r\n\t\t]\r\n\t)\r\n\tpassword = PasswordField(\r\n\t\tu'Пароль',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Пароль обязателен к заполнению'),\r\n\t\t\tLength(6, 64, message=u'Длина пароля должна быть не менее 6 и не более 64 символов'),\r\n\t\t\tRegexp(PASSWORD_RE, message=u'Пароль указан некорректно')\r\n\t\t]\r\n\t)\r\n\tsubmit = SubmitField(u'Отправить')\r\n\r\n\r\nclass UserRegisterForm(FlaskForm):\r\n\tlogin = StringField(\r\n\t\tu'Логин',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Логин обязателен к заполнению'),\r\n\t\t\tLength(1, 64, message=u'Длина логина должна быть не менее 1 и не более 64 символов'),\r\n\t\t\tRegexp(LOGIN_RE, message=u'Логин указан некорректно')\r\n\t\t],\r\n\t)\r\n\temail = StringField(\r\n\t\tu'Почтовый адрес',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Почтовый адрес обязателен к заполнению'),\r\n\t\t\tLength(1, 64, message=u'Длина почтового адреса должна быть не менее 1 и не более 64 символов'),\r\n\t\t\tEmail(message=u'Почтовый адрес указан некорректно')\r\n\t\t]\r\n\t)\r\n\tpassword = PasswordField(\r\n\t\tu'Пароль',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Пароль обязателен к заполнению'),\r\n\t\t\tLength(6, 64, message=u'Длина пароля должна быть не менее 6 и не более 64 символов'),\r\n\t\t\tRegexp(PASSWORD_RE, message=u'Пароль указан некорректно')\r\n\t\t]\r\n\t)\r\n\tpassword_repeat = PasswordField(\r\n\t\tu'Повторный пароль',\r\n\t\tvalidators=[\r\n\t\t\tDataRequired(message=u'Повторный пароль обязателен к заполнению'),\r\n\t\t\tLength(6, 64, message=u'Длина повторного пароля должна быть не менее 6 и не более 64 символов'),\r\n\t\t\tRegexp(PASSWORD_RE, message=u'Повторный пароль указан некорректно'),\r\n\t\t\tEqualTo('password', message=u'Пароли должны совпадать')\r\n\t\t]\r\n\t)\r\n\tsubmit = SubmitField(u'Отправить')\r\n" }, { "alpha_fraction": 0.8053097128868103, "alphanum_fraction": 0.8053097128868103, "avg_line_length": 26.75, "blob_id": "5f88ff0528da4b7fe77a2a5235ddc68312b35f11", "content_id": "3d2caa2ea4a273cf83fcb3901385df084a1014ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 113, "license_type": "no_license", "max_line_length": 41, "num_lines": 4, "path": "/appvue/services/login_manager.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask_login import LoginManager\r\n\r\nlogin_manager = LoginManager()\r\nlogin_manager.session_protection='strong'" }, { "alpha_fraction": 0.6718606948852539, "alphanum_fraction": 0.6730827689170837, "avg_line_length": 26.46086883544922, "blob_id": "8c3ad716da48be21acc2d2d8d924299a77a5536c", "content_id": "bd0a64ff651af87631b044b7deec2f0ce3deadb0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3576, "license_type": "no_license", "max_line_length": 106, "num_lines": 115, "path": "/appvue/blueprints/auth/auth.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\nfrom flask import (\r\n\tBlueprint,\r\n\trender_template,\r\n\tredirect,\r\n\turl_for,\r\n\tflash,\r\n\trequest,\r\n\tabort,\r\n\tcurrent_app\r\n)\r\nfrom flask_login import (\r\n\tlogin_user,\r\n\tlogin_required,\r\n\tlogout_user,\r\n\tcurrent_user\r\n)\r\nfrom flask_mail import Message\r\nfrom services.login_manager import login_manager\r\nfrom services.db import db\r\nfrom services.mail import mail\r\nfrom .forms import UserLoginForm, UserRegisterForm\r\nfrom .models import User\r\n\r\nauth = Blueprint('auth', __name__)\r\n\r\nlogin_manager.login_view = 'auth.login'\r\nlogin_manager.login_message = u'Пожалуйста, войдите в аккаунт, чтобы получить доступ к данной странице'\r\nlogin_manager.login_message_category = 'info'\r\n\r\n\r\n@login_manager.user_loader\r\ndef load_user(id):\r\n\treturn User.query.get(int(id))\r\n\r\n\r\[email protected]('/logout/')\r\n@login_required\r\ndef logout():\r\n\tlogout_user()\r\n\tflash(u'Вы вышли с аккаунта', 'success')\r\n\treturn redirect(url_for('auth.login'))\r\n\r\n\r\[email protected]('/login/', methods=['GET', 'POST'])\r\ndef login():\r\n\tform = UserLoginForm()\r\n\tif form.validate_on_submit():\r\n\t\tuser = User.query.filter(User.login == form.login.data).first()\r\n\t\tif user is not None and user.check_password(form.password.data):\r\n\t\t\tlogin_user(user, True)\r\n\t\t\tflash(u'Вы вошли в аккаунт', 'success')\r\n\t\t\treturn redirect(request.args.get('next') or url_for('index'))\r\n\t\telse:\r\n\t\t\tflash(u'Не удаётся войти', 'danger')\r\n\t\t\treturn redirect(url_for('auth.login', next=request.args.get('next')))\r\n\telse:\r\n\t\tfor errors in form.errors.values():\r\n\t\t\tfor error in errors:\r\n\t\t\t\tflash(error, 'danger')\r\n\t\treturn render_template('auth/login.html', form=form)\r\n\r\n\r\[email protected]('/register/', methods=['GET', 'POST'])\r\ndef register():\r\n\tform = UserRegisterForm()\r\n\tif form.validate_on_submit():\r\n\t\tuser = User.query.filter(\r\n\t\t\tUser.login == form.login.data\r\n\t\t\tor User.email == form.email.data\r\n\t\t).first()\r\n\t\tif user is not None:\r\n\t\t\tflash(u'Аккаунт с указанными логином или почтовым адресом уже существует', 'danger')\r\n\t\t\treturn redirect(url_for('auth.register'))\r\n\t\tuser = User(\r\n\t\t\tlogin=form.login.data,\r\n\t\t\temail=form.email.data,\r\n\t\t\tpassword=form.password.data,\r\n\t\t)\r\n\t\tdb.session.add(user)\r\n\t\tflash(u'Вы зарегистрировались', 'success')\r\n\t\treturn redirect(url_for('index'))\r\n\t\t# db.session.commit()\r\n\t\t# token = user.generate_token()\r\n\t\t# message = Message(\r\n\t\t# \tu'Подтверждение регистрации',\r\n\t\t# \thtml=render_template(\r\n\t\t# \t\t'auth/mail.html',\r\n\t\t# \t\tlogin=form.login.data,\r\n\t\t# \t\tlink=url_for('auth.activate', token=token, _external=True)\r\n\t\t# \t),\r\n\t\t# \trecipients=[form.email.data],\r\n\t\t# )\r\n\t\t# with current_app.app_context():\r\n\t\t# \tmail.send(message)\r\n\t\t# flash(u'Письмо с подтверждением регистрации было отправлено на {e}'.format(e=form.email.data), 'info')\r\n\t\t# return redirect(url_for('auth.login'))\r\n\telse:\r\n\t\tfor errors in form.errors.values():\r\n\t\t\tfor error in errors:\r\n\t\t\t\tflash(error, 'danger')\r\n\t\treturn render_template('auth/register.html', form=form)\r\n\r\n\r\[email protected]('/activate/<token>/')\r\n@login_required\r\ndef activate(token):\r\n\tif current_user.is_active():\r\n\t\tflash(u'Вы уже подтвердили регистрацию', 'info')\r\n\t\treturn redirect(url_for('index'))\r\n\telif current_user.check_token(token):\r\n\t\tflash(u'Вы подтвердили регистрацию', 'success')\r\n\t\treturn redirect(url_for('auth.login'))\r\n\telse:\r\n\t\tabort(404)\r\n" }, { "alpha_fraction": 0.7678571343421936, "alphanum_fraction": 0.7678571343421936, "avg_line_length": 17.33333396911621, "blob_id": "41085dad064b0dde55e4cb1341748a1b0605ae72", "content_id": "89412ea3e166e9f55ccd080331b6adcbb65af2d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 56, "license_type": "no_license", "max_line_length": 33, "num_lines": 3, "path": "/appvue/services/migrate.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask_migrate import Migrate\r\n\r\nmigrate = Migrate()" }, { "alpha_fraction": 0.5170731544494629, "alphanum_fraction": 0.6682927012443542, "avg_line_length": 18.5, "blob_id": "32c39c4c8a6c73792a5013ef5db7c30c554bbcd9", "content_id": "be9643be5183f21e160777616aa03f5910ac8442", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 205, "license_type": "no_license", "max_line_length": 23, "num_lines": 10, "path": "/appvue/requirements.txt", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "email-validator==1.1.1\r\nFlask==1.1.2\r\nFlask-Admin==1.5.6\r\nFlask-Login==0.5.0\r\nFlask-Mail==0.9.1\r\nFlask-Migrate==2.5.3\r\nFlask-Script==2.0.6\r\nFlask-SQLAlchemy==2.4.3\r\nFlask-WTF==0.14.3\r\nitsdangerous==1.1.0\r\n" }, { "alpha_fraction": 0.5379746556282043, "alphanum_fraction": 0.5569620132446289, "avg_line_length": 20.85714340209961, "blob_id": "2b8bcf277c9cf797a339e264ec2b17a5592180b0", "content_id": "0d3d91282b9705cbbe0c51f397e6cdb3ffe974fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 158, "license_type": "no_license", "max_line_length": 46, "num_lines": 7, "path": "/appvue/static/flashes.js", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "$(document).ready(function(){\r\n\t$('div.alert button').on('click', function(){\r\n\t\t$(this).parent().slideUp(500, function(){\r\n\t\t\t$(this).remove()\r\n\t\t})\r\n\t})\r\n})" }, { "alpha_fraction": 0.6169871687889099, "alphanum_fraction": 0.620192289352417, "avg_line_length": 14.86486530303955, "blob_id": "7c9ca64b44c4118bd5b595cd9a9430ba3c0268ed", "content_id": "e3ca3c08380d6b392e08240503c707e1479f9798", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 624, "license_type": "no_license", "max_line_length": 34, "num_lines": 37, "path": "/appvue/blueprints/post/models.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from datetime import datetime\r\nfrom services.db import db\r\n\r\n\r\nclass Post(db.Model):\r\n\t__tablename__ = 'posts'\r\n\tid = db.Column(\r\n\t\tdb.Integer,\r\n\t\tprimary_key=True\r\n\t)\r\n\ttitle = db.Column(\r\n\t\tdb.String(64),\r\n\t\tnullable=False\r\n\t)\r\n\tcontent = db.Column(\r\n\t\tdb.Text,\r\n\t\tnullable=False\r\n\t)\r\n\tedit = db.Column(\r\n\t\tdb.Boolean,\r\n\t\tdefault=False,\r\n\t\tnullable=False\r\n\t)\r\n\tpublication_datetime = db.Column(\r\n\t\tdb.DateTime,\r\n\t\tdefault=datetime.now(),\r\n\t\tnullable=False,\r\n\t\tindex=True\r\n\t)\r\n\tuser_id = db.Column(\r\n\t\tdb.Integer,\r\n\t\tdb.ForeignKey('users.id'),\r\n\t\tnullable=False\r\n\t)\r\n\r\n\tdef __repr__(self):\r\n\t\treturn '<Post %r>' % self.id\r\n" }, { "alpha_fraction": 0.6791208982467651, "alphanum_fraction": 0.6857143044471741, "avg_line_length": 18.772727966308594, "blob_id": "ab8ba3097bb9b0e6e599f64c93bccfe753a424e5", "content_id": "7d1bf6bf6339affcda621cea449ed775128c690a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 455, "license_type": "no_license", "max_line_length": 49, "num_lines": 22, "path": "/appvue/settings.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "# Main\r\nDEBUG = True\r\nSECRET_KEY = '{SECRET_KEY}'\r\n\r\n# Database\r\nSQLALCHEMY_DATABASE_URI = 'sqlite:///database.db'\r\nSQLALCHEMY_COMMIT_ON_TEARDOWN = True\r\nSQLALCHEMY_TRACK_MODIFICATIONS = True\r\n\r\n# Mail\r\nMAIL_SERVER = '{MAIL_SERVER}'\r\nMAIL_PORT = 587\r\nMAIL_USE_TLS = True\r\nMAIL_USERNAME = '{MAIL_USERNAME}'\r\nMAIL_PASSWORD = '{MAIL_PASSWORD}'\r\nMAIL_DEFAULT_SENDER = MAIL_USERNAME\r\n\r\n# Admin\r\nFLASK_ADMIN_SWATCH = 'cerulean'\r\n\r\n# WTF\r\nWTF_CSRF_TIME_LIMIT = None" }, { "alpha_fraction": 0.652577817440033, "alphanum_fraction": 0.661402702331543, "avg_line_length": 23.93975830078125, "blob_id": "a9607f1aa77eb927fab0ed746bf3a126066741ff", "content_id": "d7cc8385db5625e70479305e48f8076e618b2584", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2200, "license_type": "no_license", "max_line_length": 94, "num_lines": 83, "path": "/appvue/blueprints/post/post.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from datetime import datetime\r\nfrom flask import (\r\n\tBlueprint,\r\n\trender_template,\r\n\tredirect,\r\n\turl_for,\r\n\tflash,\r\n\tabort\r\n)\r\nfrom flask_login import current_user, login_required\r\nfrom services.db import db\r\nfrom .models import Post\r\nfrom .forms import PostForm\r\n\r\npost = Blueprint('post', __name__)\r\n\r\n\r\[email protected]('/<int:page>/')\r\ndef index(page):\r\n\tpages = Post.query.order_by(Post.publication_datetime.desc()).paginate(page=page, per_page=3)\r\n\treturn render_template('post/index.html', page=page)\r\n\r\n\r\[email protected]('/create/', methods=['GET', 'POST'])\r\n@login_required\r\ndef create():\r\n\tform = PostForm()\r\n\tif form.validate_on_submit():\r\n\t\tpost = Post(\r\n\t\t\ttitle=form.title.data,\r\n\t\t\tcontent=form.content.data,\r\n\t\t\tuser_id=current_user.id\r\n\t\t)\r\n\t\tdb.session.add(post)\r\n\t\tflash(u'Вы создали пост', 'success')\r\n\t\treturn redirect(url_for('post.index', page=1))\r\n\telse:\r\n\t\tfor errors in form.errors.values():\r\n\t\t\tfor error in errors:\r\n\t\t\t\tflash(error, 'danger')\r\n\t\treturn render_template('post/create.html', form=form)\r\n\r\n\r\[email protected]('/view/<int:id>/')\r\ndef view(id):\r\n\tpost = Post.query.get_or_404(id)\r\n\treturn render_template('post/view.html', id=id)\r\n\r\n\r\[email protected]('/edit/<int:id>/', methods=['GET', 'POST'])\r\n@login_required\r\ndef edit(id):\r\n\tpost = Post.query.get_or_404(id)\r\n\tif current_user.id == post.user.id:\r\n\t\tform = PostForm()\r\n\t\tif form.validate_on_submit():\r\n\t\t\tpost.title = form.title.data\r\n\t\t\tpost.content = form.content.data\r\n\t\t\tpost.edit = True\r\n\t\t\tpost.publication_datetime = datetime.now()\r\n\t\t\tdb.session.add(post)\r\n\t\t\tflash(u'Вы отредактировали пост', 'success')\r\n\t\t\treturn redirect(url_for('post.index', page=1))\r\n\t\telse:\r\n\t\t\tform.content.data = post.content\r\n\t\t\tfor errors in form.errors.values():\r\n\t\t\t\tfor error in errors:\r\n\t\t\t\t\tflash(error, 'danger')\r\n\t\t\treturn render_template('post/edit.html', form=form, post=post)\r\n\telse:\r\n\t\tabort(403)\r\n\r\n\r\[email protected]('/delete/<int:id>/')\r\n@login_required\r\ndef delete(id):\r\n\tpost = Post.query.get_or_404(id)\r\n\tif current_user.id == post.user.id:\r\n\t\tdb.session.delete(post)\r\n\t\tflash(u'Вы удалили пост', 'success')\r\n\t\treturn redirect(url_for('post.index', page=1))\r\n\telse:\r\n\t\tabort(403)\r\n" }, { "alpha_fraction": 0.6369951963424683, "alphanum_fraction": 0.6547980904579163, "avg_line_length": 20.578432083129883, "blob_id": "285cc41d5447cda4a7c69b3c611513a1c3242a55", "content_id": "4e8eca271c3bcf05142a4a2c8331ea58dd632b26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2303, "license_type": "no_license", "max_line_length": 86, "num_lines": 102, "path": "/appvue/blueprints/auth/models.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from itsdangerous import TimedJSONWebSignatureSerializer as Serializer\r\nfrom werkzeug.security import generate_password_hash, check_password_hash\r\nfrom flask import current_app\r\nfrom flask_login import UserMixin\r\nfrom services.db import db\r\nfrom blueprints.post.models import Post\r\n\r\n\r\nclass User(db.Model, UserMixin):\r\n\t__tablename__ = 'users'\r\n\tid = db.Column(\r\n\t\tdb.Integer,\r\n\t\tprimary_key=True,\r\n\t)\r\n\tlogin = db.Column(\r\n\t\tdb.String(64),\r\n\t\tunique=True,\r\n\t\tnullable=False,\r\n\t)\r\n\temail = db.Column(\r\n\t\tdb.String(64),\r\n\t\tunique=True,\r\n\t\tnullable=False,\r\n\t)\r\n\tpassword = db.Column(\r\n\t\tdb.String(128),\r\n\t\tnullable=False,\r\n\t)\r\n\t# active = db.Column(\r\n\t# \tdb.Boolean,\r\n\t# \tdefault=False,\r\n\t# \tnullable=False,\r\n\t# )\r\n\trole_id = db.Column(\r\n\t\tdb.Integer,\r\n\t\tdb.ForeignKey('roles.id'),\r\n\t\tnullable=False,\r\n\t)\r\n\tposts = db.relationship(\r\n\t\t'Post',\r\n\t\tbackref='user',\r\n\t\tlazy='dynamic'\r\n\t)\r\n\r\n\tdef __init__(self, *, login, email, password, role_id=1):\r\n\t\tself.login = login\r\n\t\tself.email = email\r\n\t\tself.password = generate_password_hash(password)[21:]\r\n\t\tself.role_id = role_id\r\n\r\n\tdef __repr__(self):\r\n\t\treturn '<User %r>' % self.login\r\n\r\n\tdef check_password(self, password):\r\n\t\treturn check_password_hash('pbkdf2:sha256:150000$%s' % self.password, password)\r\n\r\n\tdef generate_token(self, expiration=60*60):\r\n\t\tserializer = Serializer(current_app.config['SECRET_KEY'], expiration)\r\n\t\treturn serializer.dumps({'active': self.id})\r\n\r\n\tdef check_token(self, token):\r\n\t\tserializer = Serializer(current_app.config['SECRET_KEY'])\r\n\t\tdata = serializer.loads(token)\r\n\t\tif data['active'] == self.id:\r\n\t\t\tself.active = True\r\n\t\t\tdb.session.add(self)\r\n\t\t\treturn True\r\n\t\treturn False\r\n\r\n\tdef is_active(self):\r\n\t\treturn self.active\r\n\r\n\tdef is_allowed(self, permission):\r\n\t\treturn self.is_authenticated and self.role.permission & permission == permission\r\n\r\n\r\nclass Role(db.Model):\r\n\t__tablename__ = 'roles'\r\n\tid = db.Column(\r\n\t\tdb.Integer,\r\n\t\tprimary_key=True,\r\n\t)\r\n\tname = db.Column(\r\n\t\tdb.String(64),\r\n\t\tunique=True,\r\n\t)\r\n\tpermission = db.Column(\r\n\t\tdb.Integer,\r\n\t)\r\n\tusers = db.relationship(\r\n\t\t'User',\r\n\t\tbackref='role',\r\n\t\tlazy='dynamic',\r\n\t)\r\n\r\n\tdef __repr__(self):\r\n\t\treturn '<Role %r>' % self.name\r\n\r\n\tclass Permission:\r\n\t\tCREATE_POST = 0x1 # 0b0001\r\n\t\tMODERATE = 0x8 # 0b1000\r\n\t\tADMINISTER = 0xf # 0b1111\r\n" }, { "alpha_fraction": 0.761695921421051, "alphanum_fraction": 0.761695921421051, "avg_line_length": 25.440000534057617, "blob_id": "52cc470febbfbabc22e124119cfe3b642fce5cea", "content_id": "e0a7246e10d235b276ec25668fb653e45889490d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 684, "license_type": "no_license", "max_line_length": 48, "num_lines": 25, "path": "/appvue/app.py", "repo_name": "StombieIT/flask-app", "src_encoding": "UTF-8", "text": "from flask import Flask\r\nimport settings\r\nfrom services.admin import admin\r\nfrom services.login_manager import login_manager\r\nfrom services.mail import mail\r\nfrom services.db import db\r\nfrom services.migrate import migrate\r\nfrom blueprints.api.api import api\r\nfrom blueprints.auth.auth import auth\r\nfrom blueprints.post.post import post\r\n\r\napp = Flask(__name__)\r\napp.config.from_object(settings)\r\n\r\n# Apps\r\nadmin.init_app(app)\r\nlogin_manager.init_app(app)\r\nmail.init_app(app)\r\ndb.init_app(app)\r\nmigrate.init_app(app, db)\r\n\r\n# Blueprints\r\napp.register_blueprint(auth, url_prefix='/auth')\r\napp.register_blueprint(post, url_prefix='/post')\r\napp.register_blueprint(api, url_prefix='/api')" } ]
16
johntango/flaskBasicMiniProject007
https://github.com/johntango/flaskBasicMiniProject007
b53057032be20b21e00754e5d2fdfa2055f2ef96
ff0ac5b2c39d10fccbc9f288a0d3b97b0abe54e7
1050fbb02665c2cbe051f256e0af29f1e5ffc11c
refs/heads/master
2023-04-29T15:05:04.911041
2021-05-20T20:50:23
2021-05-20T20:50:23
369,334,874
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.5906746983528137, "alphanum_fraction": 0.5969058871269226, "avg_line_length": 27.036144256591797, "blob_id": "ab2bf19c96dfd8a11e360caf1f84be34d8cc2b69", "content_id": "3689b15fbad9562302db86dddff71e1bee397319", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4654, "license_type": "no_license", "max_line_length": 79, "num_lines": 166, "path": "/app.py", "repo_name": "johntango/flaskBasicMiniProject007", "src_encoding": "UTF-8", "text": "from flask import Flask, request, render_template, session\nfrom flask import redirect, make_response, jsonify\nfrom functools import wraps\nimport os\n\nfrom flask_restful import Resource, Api\nfrom flask_jwt_extended import create_access_token\nfrom flask_jwt_extended import jwt_required, verify_jwt_in_request\nfrom flask_jwt_extended import JWTManager, get_jwt_identity, get_jwt_claims\nfrom flask_jwt_extended import set_access_cookies\n\n\napp = Flask(__name__)\napp.config[\"JWT_SECRET_KEY\"] = \"secretkey\"\napp.config[\"JWT_TOKEN_LOCATION\"] = [\"cookies\"]\napp.config[\"JWT_COOKIE_SECURE\"] = False\njwt = JWTManager(app)\njwt.init_app(app)\napp = Flask(__name__)\napp.secret_key = \"secretkey\"\napp.config[\"UPLOADED_PHOTOS_DEST\"] = \"static\"\napp.config[\"JWT_SECRET_KEY\"] = \"secretkey\"\napp.config[\"JWT_TOKEN_LOCATION\"] = [\"cookies\"]\napp.config[\"JWT_COOKIE_SECURE\"] = False\napp.config[\"JWT_COOKIE_CSRF_PROTECT\"] = False\n\njwt = JWTManager(app)\njwt.init_app(app)\n\nbooks = [\n {\n \"id\": 1,\n \"author\": \"Eric Reis\",\n \"country\": \"USA\",\n \"language\": \"English\",\n \"title\": \"Lean Startup\",\n \"year\": 2011,\n },\n {\n \"id\": 2,\n \"author\": \"Mark Schwartz\",\n \"country\": \"USA\",\n \"language\": \"English\",\n \"title\": \"A Seat at the Table\",\n \"year\": 2017,\n },\n {\n \"id\": 3,\n \"author\": \"James Womak\",\n \"country\": \"USA\",\n \"language\": \"English\",\n \"title\": \"Lean Thinking\",\n \"year\": 1996,\n },\n]\n\nusers = [\n {\"username\": \"testuser\", \"password\": \"testuser\", \"role\": \"admin\"},\n {\"username\": \"John\", \"password\": \"John\", \"role\": \"reader\"},\n {\"username\": \"Anne\", \"password\": \"Anne\", \"role\": \"admin\"}\n]\n\n\n# def admin_required(fn):\n# @wraps(fn)\n# def wrapper(*args, **kwargs):\n\n# return fn(*args, **kwargs)\n# return wrapper\n\n\ndef checkUser(username, password):\n for user in users:\n if username in user[\"username\"] and password in user[\"password\"]:\n return {\"username\": user[\"username\"], \"role\": user[\"role\"]}\n return None\n\n\[email protected](\"/\", methods=[\"GET\"])\ndef firstRoute():\n return render_template(\"register.html\")\n\n\[email protected](\"/login\", methods=[\"GET\", \"POST\"])\ndef login():\n if request.method == \"POST\":\n username = request.form[\"username\"]\n password = request.form[\"password\"]\n validUser = checkUser(username, password)\n if validUser != None:\n # set JWT token\n\n user_claims = {\"role\": validUser[\"roles\"]}\n access_token = create_access_token(\n username, user_claims=user_claims)\n\n response = make_response(\n render_template(\n \"index.html\", title=\"books\", username=username, books=books\n )\n )\n response.status_code = 200\n # add jwt-token to response headers\n # response.headers.extend({\"jwt-token\": access_token})\n set_access_cookies(response, access_token)\n return response\n\n return render_template(\"register.html\")\n\n\[email protected](\"/logout\")\ndef logout():\n # invalidate the JWT token\n\n return \"Logged Out of Books\"\n\n\[email protected](\"/books\", methods=[\"GET\"])\n@jwt_required\ndef getBooks():\n try:\n username = get_jwt_identity()\n return render_template('books.html', username=username, books=books)\n except:\n return render_template(\"register.html\")\n\n\[email protected](\"/addbook\", methods=[\"GET\", \"POST\"])\n@jwt_required\n# @admin_required\ndef addBook():\n username = get_jwt_identity()\n if request.method == \"GET\":\n return render_template(\"addBook.html\", username=username)\n if request.method == \"POST\":\n # expects pure json with quotes everywheree\n author = request.form.get(\"author\")\n title = request.form.get(\"title\")\n newbook = {\"author\": author, \"title\": title}\n books.append(newbook)\n return render_template(\n \"books.html\", books=books, username=username, title=\"books\"\n )\n else:\n return 400\n\n\[email protected](\"/addimage\", methods=[\"GET\", \"POST\"])\n@jwt_required\n@admin_required\ndef addimage():\n if request.method == \"GET\":\n return render_template(\"addimage.html\")\n elif request.method == \"POST\":\n image = request.files[\"image\"]\n id = request.form.get(\"number\") # use id to number the image\n imagename = \"image\" + id + \".png\"\n image.save(os.path.join(app.config[\"UPLOADED_PHOTOS_DEST\"], imagename))\n print(image.filename)\n return \"image loaded\"\n\n return \"all done\"\n\n\nif __name__ == \"__main__\":\n app.run(debug=True, host=\"0.0.0.0\", port=5000)\n" } ]
1
JayCax/CS-493
https://github.com/JayCax/CS-493
4a9e1ad52a0ca312f9f0c2cea723795541437712
03847775b235b1c0c9264d3f2c07d402926f64b5
137d4008f72f3c7762871cabcb1810aea7416c01
refs/heads/main
2023-03-15T20:04:05.981748
2021-01-08T20:27:57
2021-01-08T20:27:57
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6204718351364136, "alphanum_fraction": 0.6375626921653748, "avg_line_length": 37.73381423950195, "blob_id": "4ac4ab4c98e3d4c0f101140f585197c42e8ceae2", "content_id": "a4b54f3f435dff4311089918c6f013fe1552cd7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5383, "license_type": "no_license", "max_line_length": 182, "num_lines": 139, "path": "/assignment-8/main.py", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "'''\n\tHarinder Gakhal\n\tCS493 - Assignment 3: Build a Restful API\n\t10/20/2020\n'''\n\nfrom google.cloud import datastore\nfrom flask import Flask, request, jsonify\nimport json\n\napp = Flask(__name__)\nclient = datastore.Client()\n\[email protected]('/')\ndef index():\n\treturn \"Please refer to the API Spec to use this API.\"\\\n\[email protected]('/boats', methods=['POST','GET'])\ndef boats_get_post():\n\tif request.method == 'POST':\n\t\tcontent = request.get_json()\n\t\tif len(content) != 3:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\t\tnew_boat = datastore.entity.Entity(key=client.key(\"boats\"))\n\t\tnew_boat.update({\"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"]})\n\t\tclient.put(new_boat)\n\t\treturn (jsonify({\"id\": new_boat.key.id, \"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"], \"self\": str(request.url) + \"/\" + str(new_boat.key.id)}), 201)\n\telif request.method == 'GET':\n\t\tquery = client.query(kind=\"boats\")\n\t\tresults = list(query.fetch())\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = str(request.url) + \"/\" + str(entity.key.id)\n\t\treturn (jsonify(results), 200)\n\telse:\n\t\treturn 'Method not recogonized'\n\[email protected]('/boats/<id>', methods=['PUT','DELETE','GET','PATCH'])\ndef boats_put_delete(id):\n\tif request.method == 'PUT' or request.method == 'PATCH':\n\t\tcontent = request.get_json()\n\t\tif len(content) != 3:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\t\tboat_key = client.key(\"boats\", int(id))\n\t\tboat = client.get(key=boat_key)\n\t\tif boat == None:\n\t\t\treturn (jsonify({\"Error\": \"No boat with this boat_id exists\"}), 404)\n\t\tboat.update({\"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"]})\n\t\tclient.put(boat)\n\t\treturn (jsonify({\"id\": boat.key.id, \"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"], \"self\": str(request.url)}), 200)\n\telif request.method == 'DELETE':\n\t\tboat_key = client.key(\"boats\", int(id))\n\t\tquery = client.query(kind=\"slips\")\n\t\tquery.add_filter('current_boat', '=', int(id))\n\t\tresult = list(query.fetch())\n\t\tif len(result) > 0:\n\t\t\tresult[0][\"current_boat\"] = None\n\t\t\tclient.put(result[0])\n\t\tif client.get(key=boat_key) == None:\n\t\t\treturn (jsonify({\"Error\": \"No boat with this boat_id exists\"}), 404)\n\t\tclient.delete(boat_key)\n\t\treturn ('',204)\n\telif request.method == 'GET':\n\t\tcontent = request.get_json()\n\t\tboat_key = client.key(\"boats\", int(id))\n\t\tboat = client.get(key=boat_key)\n\t\tif boat == None:\n\t\t\treturn (jsonify({\"Error\": \"No boat with this boat_id exists\"}), 404)\n\t\tboat[\"id\"] = id\n\t\tboat[\"self\"] = str(request.url)\n\t\treturn (jsonify(boat), 200)\n\telse:\n\t\treturn 'Method not recogonized'\n\n# ======================================= SLIPS =======================================\n\[email protected]('/slips', methods=['POST','GET'])\ndef slips_get_post():\n\tif request.method == 'POST':\n\t\tcontent = request.get_json()\n\t\tif len(content) != 1:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing the required number\"}), 400)\n\t\tnew_slip = datastore.entity.Entity(key=client.key(\"slips\"))\n\t\tnew_slip.update({\"number\": content[\"number\"], \"current_boat\": None})\n\t\tclient.put(new_slip)\n\t\treturn (jsonify({\"id\": new_slip.key.id, \"number\": content[\"number\"], \"current_boat\": None, \"self\": str(request.url) + \"/\" + str(new_slip.key.id)}), 201)\n\telif request.method == 'GET':\n\t\tquery = client.query(kind=\"slips\")\n\t\tresults = list(query.fetch())\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = str(request.url) + \"/\" + str(entity.key.id)\n\t\treturn (jsonify(results), 200)\n\telse:\n\t\treturn 'Method not recogonized'\n\[email protected]('/slips/<id>', methods=['DELETE','GET',])\ndef slips_put_delete(id):\n\tif request.method == 'DELETE':\n\t\tslip_key = client.key(\"slips\", int(id))\n\t\tif client.get(key=slip_key) == None:\n\t\t\treturn (jsonify({\"Error\": \"No slip with this slip_id exists\"}), 404)\n\t\tclient.delete(slip_key)\n\t\treturn ('',204)\n\telif request.method == 'GET':\n\t\tcontent = request.get_json()\n\t\tslip_key = client.key(\"slips\", int(id))\n\t\tslip = client.get(key=slip_key)\n\t\tif slip == None:\n\t\t\treturn (jsonify({\"Error\": \"No slip with this slip_id exists\"}), 404)\n\t\tslip[\"id\"] = id\n\t\tslip[\"self\"] = str(request.url)\n\t\treturn (jsonify(slip), 200)\n\telse:\n\t\treturn 'Method not recogonized'\n\[email protected]('/slips/<slip_id>/<boat_id>', methods=['PUT', 'DELETE'])\ndef slips_boats_put(slip_id, boat_id):\n\tif request.method == 'PUT':\n\t\tslip = client.get(key=client.key(\"slips\", int(slip_id)))\n\t\tboat = client.get(key=client.key(\"boats\", int(boat_id)))\n\t\tif boat == None or slip == None:\n\t\t\treturn(jsonify({\"Error\": \"The specified boat and/or slip does not exist\"}), 404)\n\t\telif slip[\"current_boat\"] != None:\n\t\t\treturn(jsonify({\"Error\": \"The slip is not empty\"}), 403)\n\t\tslip.update({\"current_boat\": boat.key.id})\n\t\tclient.put(slip)\n\t\treturn('', 204)\n\telif request.method == 'DELETE':\n\t\tslip = client.get(key=client.key(\"slips\", int(slip_id)))\n\t\tboat = client.get(key=client.key(\"boats\", int(boat_id)))\n\t\tif boat == None or slip == None or int(slip[\"current_boat\"]) != int(boat_id):\n\t\t\treturn(jsonify({\"Error\": \"No boat with this boat_id is at the slip with this slip_id\"}), 404)\n\t\tslip.update({\"current_boat\": None})\n\t\tclient.put(slip)\n\t\treturn('', 204)\n\t\nif __name__ == '__main__':\n\tapp.run(host='0.0.0.0', port=8000, debug=True)" }, { "alpha_fraction": 0.8045112490653992, "alphanum_fraction": 0.829573929309845, "avg_line_length": 79, "blob_id": "eb0a7fe1b1189f3357dcef156771fc593572445d", "content_id": "4dd742105cf7500c0f9bdadaeecf576ec9ec3174", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 399, "license_type": "no_license", "max_line_length": 202, "num_lines": 5, "path": "/README.md", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "# CS 493 Cloud Application \nThis repository is an archive of all the programming assignments for CS 493 at Oregon State university taken fall quarter 2020, taught by Nauman Chaudhry.\n\n## Course Description\nCovers developing RESTful cloud services, an approach based on representational state transfer technology, an architectural style and approach to communications used in modern cloud servicesdevelopment." }, { "alpha_fraction": 0.6808042526245117, "alphanum_fraction": 0.6938422918319702, "avg_line_length": 34.372222900390625, "blob_id": "af918fc5657e101d5606e68bc0aa2d69d4c96134", "content_id": "763e2cacba28234a3003f5acf33c7ecc38fdebc4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6366, "license_type": "no_license", "max_line_length": 174, "num_lines": 180, "path": "/assignment-7/main.py", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "# CS493 - Assignment 7: More Authentication and Authorization\n# Harinder Gakhal\nfrom google.cloud import datastore\nfrom flask import Flask, request, jsonify\nfrom requests_oauthlib import OAuth2Session\nimport json\nfrom google.oauth2 import id_token\nfrom google.auth import crypt\nfrom google.auth import jwt\nfrom google.auth.transport import requests\nimport constants\n\n# This disables the requirement to use HTTPS so that you can test locally.\nimport os \nos.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'\n\napp = Flask(__name__)\nclient = datastore.Client()\n\n# These should be copied from an OAuth2 Credential section at\n# https://console.cloud.google.com/apis/credentials\nclient_id = constants.CLIENT_ID\nclient_secret = constants.CLIENT_SECRET\n\n# This is the page that you will use to decode and collect the info from\n# the Google authentication flow\nredirect_uri = 'https://cs493-a7-hg.wl.r.appspot.com/oauth'\n\n# These let us get basic info to identify a user and not much else\n# they are part of the Google People API\nscope = ['https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile', 'openid']\noauth = OAuth2Session(client_id, redirect_uri=redirect_uri, scope=scope)\n\n# This link will redirect users to begin the OAuth flow with Google\[email protected]('/')\ndef index():\n\tauthorization_url, state = oauth.authorization_url(\n\t\t'https://accounts.google.com/o/oauth2/auth',\n\t\t# access_type and prompt are Google specific extra\n\t\t# parameters.\n\t\taccess_type=\"offline\", prompt=\"select_account\")\n\treturn '<h1>Welcome</h1>\\n <p>Click <a href=%s>here</a> to get your JWT.</p>' % authorization_url\n\n# This is where users will be redirected back to and where you can collect\n# the JWT for use in future requests\[email protected]('/oauth')\ndef oauthroute():\n\ttoken = oauth.fetch_token('https://accounts.google.com/o/oauth2/token', authorization_response=request.url, client_secret=client_secret)\n\treq = requests.Request()\n\tid_info = id_token.verify_oauth2_token(token['id_token'], req, client_id)\n\n\t# return \"Your JWT is: <p style=\\\"font-size:12px;\\\">%s</p>\" % token['id_token']\n\treturn (jsonify({\"jwt\": token['id_token']}), 200)\n\n\n# This page demonstrates verifying a JWT. id_info['email'] contains\n# the user's email address and can be used to identify them\n# this is the code that could prefix any API call that needs to be\n# tied to a specific user by checking that the email in the verified\n# JWT matches the email associated to the resource being accessed.\[email protected]('/verify-jwt')\ndef verify():\n\treq = requests.Request()\n\n\ttry:\n\t\tid_info = id_token.verify_oauth2_token(request.args['jwt'], req, client_id)\n\texcept:\n\t\treturn('Could not verify JWT!', 401)\n\n\treturn repr(id_info) + \"<br><br> the user is: \" + id_info['email']\n\[email protected]('/boats', methods=['POST','GET'])\ndef boats_get_post():\n\tif request.method == 'POST':\n\t\t# Get JWT from Authorization header\n\t\treq = requests.Request()\n\t\tjwt_token = request.headers.get('Authorization')\n\t\tif jwt_token:\n\t\t\tjwt_token = jwt_token.split(\" \")[1]\n\t\t\t# Check to see if JWT is valid\n\t\t\ttry:\n\t\t\t\tjwt_sub = id_token.verify_oauth2_token(jwt_token, req, client_id)['sub']\n\t\t\texcept:\n\t\t\t\treturn('Could not verify JWT!\\n', 401)\n\t\telse:\n\t\t\t# Return 401 if no JWT is given\n\t\t\treturn (jsonify('JWT was not given!'), 401)\n\n\t\t# Grab content from request body\n\t\tcontent = request.get_json()\n\n\t\t# Check to see if all properties are given. - No need to validate\n\t\tif len(content) != 4:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\n\t\t# Create a new boat entity\n\t\tnew_boat = datastore.entity.Entity(key=client.key(\"boats\"))\n\t\tnew_boat.update({\"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"], \"public\": content[\"public\"], \"owner\": jwt_sub})\n\t\tclient.put(new_boat) # Upload boat object to Datastore\n\n\t\t# Return boat object\n\t\treturn (jsonify({\"id\": new_boat.key.id, \"name\": content[\"name\"], \"type\": content[\"type\"], \"length\": content[\"length\"], \"public\": content[\"public\"], \"owner\": jwt_sub}), 201)\n\telif request.method == 'GET':\n\t\tlist_public = False\n\n\t\t# Get JWT from Authorization header\n\t\treq = requests.Request()\n\t\tjwt_token = request.headers.get('Authorization')\n\t\t# Check if JWT is valid/exists - if not list public boats\n\t\tif jwt_token:\n\t\t\tjwt_token = jwt_token.split(\" \")[1]\n\t\t\ttry:\n\t\t\t\tjwt_sub = id_token.verify_oauth2_token(jwt_token, req, client_id)['sub']\n\t\t\texcept:\n\t\t\t\tlist_public = True\n\t\telse:\n\t\t\tlist_public = True\n\n\t\t# Run query on database - public or owned by current user\n\t\tquery = client.query(kind=\"boats\")\n\t\tif list_public:\n\t\t\tquery.add_filter(\"public\", \"=\", True)\n\t\telse:\n\t\t\tquery.add_filter(\"owner\", \"=\", jwt_sub)\n\t\tresults = list(query.fetch())\n\n\t\t# Add respective id to each boat\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\n\t\treturn (jsonify(results), 200)\n\telse:\n\t\treturn 'Method not recogonized'\n\[email protected]('/owners/<owner_id>/boats', methods=['GET'])\ndef boats_get(owner_id):\n\tif request.method == 'GET':\n\t\t# Search the database for all boats with the owner_id and that are public\n\t\tquery = client.query(kind=\"boats\")\n\t\tquery.add_filter(\"public\", \"=\", True)\n\t\tquery.add_filter(\"owner\", \"=\", owner_id)\n\t\tresults = list(query.fetch())\n\n\t\t# Add respective id to each boat\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\n\t\treturn (jsonify(results), 200)\n\[email protected]('/boats/<boat_id>', methods=['DELETE'])\ndef delete_boat(boat_id):\n\tif request.method == 'DELETE':\n\t\t# Get JWT from Authorization header\n\t\treq = requests.Request()\n\t\tjwt_token = request.headers.get('Authorization')\n\t\tif jwt_token:\n\t\t\tjwt_token = jwt_token.split(\" \")[1]\n\t\t\t# Check to see if JWT is valid\n\t\t\ttry:\n\t\t\t\tjwt_sub = id_token.verify_oauth2_token(jwt_token, req, client_id)['sub']\n\t\t\texcept:\n\t\t\t\treturn('Could not verify JWT!\\n', 401)\n\t\telse:\n\t\t\t# Return 401 if no JWT is given\n\t\t\treturn (jsonify('JWT was not given!'), 401)\n\t\t\n\t\t# Find the boat using boat_id\n\t\tboat_key = client.key(\"boats\", int(boat_id))\n\t\tboat = client.get(key=boat_key)\n\t\tif boat == None:\n\t\t\treturn (jsonify({'Error': 'No boat with this boat_id exists'}), 403)\n\t\telif boat['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'This boat is owned by someone else!'}), 403)\n\n\t\t# Delete boat from Datastore\n\t\tclient.delete(boat_key)\n\t\treturn (jsonify(''), 204)\n\nif __name__ == '__main__':\n\tapp.run(host='127.0.0.1', port=8080, debug=True)" }, { "alpha_fraction": 0.6257529854774475, "alphanum_fraction": 0.6388607621192932, "avg_line_length": 35.27797317504883, "blob_id": "3b2a2f010bc5d8b24debf9ea9891378df6b40bc6", "content_id": "7453b202c116ecdfd8a779adcd995dc53cb90b25", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20751, "license_type": "no_license", "max_line_length": 165, "num_lines": 572, "path": "/final/main.py", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "# CS493 - Portfolio Assignment: Final Project\n# Harinder Gakhal\n# SET ENV VAR FOR GCLOUD\n\nfrom google.cloud import datastore\nfrom flask import Flask, request, jsonify\nfrom requests_oauthlib import OAuth2Session\nfrom google.oauth2 import id_token\nfrom google.auth import crypt, jwt\nfrom google.auth.transport import requests\nimport requests as reqq\nimport constants\n\n\n# This disables the requirement to use HTTPS so that you can test locally.\nimport os \nos.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'\n\napp = Flask(__name__)\nclient = datastore.Client()\n\n# OAuth2 Credentials\nclient_id = constants.CLIENT_ID\nclient_secret = constants.CLIENT_SECRET\n\n# This is the page that you will use to decode and collect the info from\n# the Google authentication flow\nredirect_uri = 'https://cs493-final-hg.wl.r.appspot.com/profile'\n\n# These let us get basic info to identify a user and not much else\n# they are part of the Google People API\nscope = ['https://www.googleapis.com/auth/userinfo.email', 'https://www.googleapis.com/auth/userinfo.profile', 'openid']\noauth = OAuth2Session(client_id, redirect_uri=redirect_uri, scope=scope)\n\n# This link will redirect users to begin the OAuth flow with Google\[email protected]('/')\ndef index():\n\tauthorization_url, state = oauth.authorization_url('https://accounts.google.com/o/oauth2/auth',\taccess_type=\"offline\", prompt=\"select_account\")\n\treturn '<h1>Welcome</h1>\\n <p>Click <a href=%s>here</a> to log in or create a new account.</p>' % authorization_url\n\n# This is where users will be redirected back to and where you can collect\n# the JWT for use in future requests\[email protected]('/profile')\ndef oauthroute():\n\ttoken = oauth.fetch_token('https://accounts.google.com/o/oauth2/token', authorization_response=request.url, client_secret=client_secret)\n\treq = requests.Request()\n\tid_info = id_token.verify_oauth2_token(token['id_token'], req, client_id)\n\n\t# Search database for user\n\tquery = client.query(kind=\"users\")\n\tquery.add_filter(\"sub\", \"=\", id_info['sub'])\n\tresult = list(query.fetch())\n\n\t# Create a new user if they don't exist in the database\n\tif len(result) == 0:\n\t\tnew_user = datastore.entity.Entity(key=client.key('users'))\n\t\tnew_user.update({'email': id_info['email'], 'sub': id_info['sub']})\n\t\tclient.put(new_user)\n\t\treturn ((\"<h1>Account has been created</h1>\\n\t<p>JWT: %s</p>\\n\t<p>Unique ID (sub): %s</p>\\n\" % (token['id_token'], id_info['sub'])), 201)\n\telif len(result) == 1:\n\t\treturn ((\"<h1>Welcome back!</h1>\\n\t<p>JWT: %s</p>\\n\t<p>Unique ID (sub): %s</p>\\n\" % (token['id_token'], id_info['sub'])), 200)\n\[email protected]('/users', methods=['GET'])\ndef get_users():\n\tif request.method == 'GET':\n\t\tquery = client.query(kind=\"users\")\n\t\tresults = list(query.fetch())\n\n\t\t# Add respective id to each object\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = request.url + \"/\" + str(entity.key.id)\n\n\t\treturn (jsonify(results), 200)\n\[email protected]('/users/<uid>', methods=['GET'])\ndef get_user_id(uid):\n\tif request.method == 'GET':\n\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\t\t\n\t\t# Verify that the request is authorized to access user\n\t\tif jwt_sub != uid:\n\t\t\treturn(jsonify({'Error': 'You do not have access to this user!'}), 401)\n\t\t\t\n\t\t# Find user with sub\n\t\tquery = client.query(kind=\"users\")\n\t\tquery.add_filter(\"sub\", \"=\", uid)\n\t\tresults = list(query.fetch())\n\n\t\t# Throw error is user does not exist in database\n\t\tif len(results) == 0:\n\t\t\treturn(jsonify({'Error': 'This user does not exist!\\n'}), 401)\n\n\t\t# Add respective id to each object\n\t\tfor entity in results:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = request.url\n\n\t\treturn (jsonify(results), 200)\n\[email protected]('/projects', methods=['POST', 'GET'])\ndef projects_post_get():\n\tif request.method == 'POST':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\t# Grab content from request zdy\n\t\tcontent = request.get_json()\n\t\t# Check to see if all properties are given. - No need to validate\n\t\tif len(content) != 3:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\t\t\n\t\tnew_project = datastore.entity.Entity(key=client.key(\"projects\"))\n\t\tnew_project.update({\"title\": content[\"title\"], \"due_by\": content[\"due_by\"], \"notes\": content[\"notes\"], \"owner\": jwt_sub, \"tasks\": [], \"completed_tasks\": []})\n\t\tclient.put(new_project)\n\n\t\t# Return object\n\t\tnew_project.update({'id': new_project.key.id, 'self': request.url + \"/\" + str(new_project.key.id)})\n\t\treturn (jsonify(new_project), 201)\n\telif request.method == 'GET':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tquery = client.query(kind=\"projects\")\n\t\tquery.add_filter('owner', '=', jwt_sub)\n\t\tq_limit = int(request.args.get('limit', '5'))\n\t\tq_offset = int(request.args.get('offset', '0'))\n\t\tl_iterator = query.fetch(limit= q_limit, offset=q_offset)\n\t\tpages = l_iterator.pages\n\t\tresults = list(next(pages))\n\t\tif l_iterator.next_page_token:\n\t\t\tnext_offset = q_offset + q_limit\n\t\t\tnext_url = request.base_url + \"?limit=\" + str(q_limit) + \"&offset=\" + str(next_offset)\n\t\telse:\n\t\t\tnext_url = None\n\t\tfor e in results:\n\t\t\te[\"id\"] = e.key.id\n\t\t\te[\"self\"] = request.url_root + 'projects/' + str(e.key.id)\n\t\t\tif e['tasks']:\n\t\t\t\tfor single_task in e['tasks']:\n\t\t\t\t\tgtask_key = client.key(\"tasks\", single_task['id'])\n\t\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\t\tsingle_task['title'] = gtask['title']\n\t\t\t\t\tsingle_task['due_by'] = gtask['due_by']\n\t\t\t\t\tsingle_task['description'] = gtask['description']\n\t\t\t\t\tsingle_task['completed'] = gtask['completed']\n\t\t\t\t\tsingle_task[\"self\"] = request.url_root + \"tasks/\" + str(single_task['id'])\n\t\t\tif e['completed_tasks']:\n\t\t\t\tfor ctask in e['completed_tasks']:\n\t\t\t\t\tgtask_key = client.key(\"tasks\", ctask['id'])\n\t\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\t\tctask['title'] = gtask['title']\n\t\t\t\t\tctask['due_by'] = gtask['due_by']\n\t\t\t\t\tctask['description'] = gtask['description']\n\t\t\t\t\tctask['completed'] = gtask['completed']\n\t\t\t\t\tctask[\"self\"] = request.url_root + \"tasks/\" + str(ctask['id'])\n\n\t\toutput = {\"projects\": results}\n\t\tif next_url:\n\t\t\toutput[\"next\"] = next_url\n\t\toutput['total'] = len(list(query.fetch()))\n\t\treturn (jsonify(output), 200)\n\[email protected]('/tasks', methods=['POST', 'GET'])\ndef tasks_post_get():\n\tif request.method == 'POST':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\t# Grab content from request body\n\t\tcontent = request.get_json()\n\t\t# Check to see if all properties are given. - No need to validate\n\t\tif len(content) != 3:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\t\t\n\t\tnew_task = datastore.entity.Entity(key=client.key(\"tasks\"))\n\t\tnew_task.update({'title': content['title'], 'pid': None, 'completed': False, \"description\": content[\"description\"], \"due_by\": content[\"due_by\"], \"owner\": jwt_sub})\n\t\tclient.put(new_task)\n\n\t\t# Return object\n\t\tnew_task.update({'id': new_task.key.id, 'self': request.url + \"/\" + str(new_task.key.id)})\n\t\treturn (jsonify(new_task), 201)\n\telif request.method == 'GET':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tquery = client.query(kind=\"tasks\")\n\t\tquery.add_filter('owner', '=', jwt_sub)\n\t\tq_limit = int(request.args.get('limit', '5'))\n\t\tq_offset = int(request.args.get('offset', '0'))\n\t\tl_iterator = query.fetch(limit= q_limit, offset=q_offset)\n\t\tpages = l_iterator.pages\n\t\tresults = list(next(pages))\n\t\tif l_iterator.next_page_token:\n\t\t\tnext_offset = q_offset + q_limit\n\t\t\tnext_url = request.base_url + \"?limit=\" + str(q_limit) + \"&offset=\" + str(next_offset)\n\t\telse:\n\t\t\tnext_url = None\n\t\tfor e in results:\n\t\t\te[\"id\"] = e.key.id\n\t\t\te[\"self\"] = request.url_root + 'tasks/' + str(e.key.id)\n\t\toutput = {\"tasks\": results}\n\t\tif next_url:\n\t\t\toutput[\"next\"] = next_url\n\t\toutput['total'] = len(list(query.fetch()))\n\t\treturn (jsonify(output), 200)\n\[email protected]('/projects/<pid>/tasks', methods=['POST', 'GET'])\ndef tasks_post_projects(pid):\n\tif request.method == 'GET':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({'Error': 'This project does not exist!'}), 401)\n\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\n\t\tquery = client.query(kind=\"tasks\")\n\t\tquery.add_filter(\"pid\", \"=\", pid)\n\t\tquery.add_filter(\"completed\", \"=\", False)\n\t\ttasks = list(query.fetch())\n\n\t\tquery = client.query(kind=\"tasks\")\n\t\tquery.add_filter(\"pid\", \"=\", pid)\n\t\tquery.add_filter(\"completed\", \"=\", True)\n\t\tctasks = list(query.fetch())\n\n\t\tfor entity in tasks:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = request.url_root + \"tasks/\" + str(entity.key.id)\n\t\tfor entity in ctasks:\n\t\t\tentity[\"id\"] = entity.key.id\n\t\t\tentity[\"self\"] = request.url_root + \"tasks/\" + str(entity.key.id)\n\n\t\treturn(jsonify({'tasks': tasks, 'completed_tasks': ctasks}), 200)\n\t\t\n\telif request.method == 'POST':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({'Error': 'This project does not exist!'}), 401)\n\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\n\t\t# Grab content from request body\n\t\tcontent = request.get_json()\n\t\t# Check to see if all properties are given. - No need to validate\n\t\tif len(content) != 3:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\n\t\t\n\t\tnew_task = datastore.entity.Entity(key=client.key(\"tasks\"))\n\t\tnew_task.update({'title': content['title'], 'pid': pid, 'completed': False, \"description\": content[\"description\"], \"due_by\": content[\"due_by\"], \"owner\": jwt_sub})\n\t\tclient.put(new_task)\n\n\t\t# Put task in project\n\t\tproject['tasks'].append({'id': new_task.key.id, 'owner': jwt_sub, 'pid': pid})\n\t\tclient.put(project)\n\n\t\t# Return object\n\t\tproject.update({'id': project.key.id, 'self': request.url_root + \"projects/\" + str(project.key.id)})\n\t\tfor task in project['tasks']:\n\t\t\ttask['self'] = request.url_root + \"tasks/\" + str(task['id'])\n\t\treturn (jsonify(project), 201)\n\[email protected]('/projects/<pid>/tasks/<tid>', methods=['PUT'])\ndef put_task_project(pid, tid):\n\tif request.method == 'PUT':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\t\ttask_key = client.key(\"tasks\", int(tid))\n\t\ttask = client.get(key=task_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({\"Error\": \"This project does not exist!\"}), 404)\n\t\tif task == None:\n\t\t\treturn (jsonify({\"Error\": \"This task does not exist!\"}), 404)\n\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\t\telif task['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this task!'}), 401)\n\n\t\tfor ptask in project['tasks']:\n\t\t\tif ptask['id'] == int(tid):\n\t\t\t\treturn (jsonify({'Error': 'This task is already in the project!'}), 403)\n\t\t\n\t\tproject['tasks'].append({'id': task.key.id, 'owner': task['owner'], 'pid': pid})\n\t\tclient.put(project)\n\t\ttask['pid'] = pid\n\t\tclient.put(task)\n\n\t\treturn(jsonify(''), 204)\n\[email protected]('/tasks/<tid>', methods=['GET', 'DELETE', 'PUT', 'PATCH'])\ndef tasks_get_delete_put_patch(tid):\n\tif request.method == 'PUT' or request.method == 'PATCH':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\ttask_key = client.key(\"tasks\", int(tid))\n\t\ttask = client.get(key=task_key)\n\n\t\tif task == None:\n\t\t\treturn (jsonify({'Error': 'This task does not exist!'}), 401)\n\t\t\n\t\tif task['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this task!'}), 401)\n\t\t\n\t\t# Grab content from request body\n\t\tcontent = request.get_json()\n\t\tif len(content) == 0:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing!\"}), 400)\n\n\t\t# Validate content\n\t\tfor prop in content:\n\t\t\tif prop == 'completed' and type(content.get(prop)) == bool:\n\t\t\t\tif task[\"pid\"]:\n\t\t\t\t\tproject_key = client.key(\"projects\", int(task['pid']))\n\t\t\t\t\tproject = client.get(key=project_key)\n\t\t\t\t\tif task['completed'] == False and content.get(prop) == True:\n\t\t\t\t\t\tproject['tasks'].remove({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\t\t\t\tproject['completed_tasks'].append({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\t\t\telif task['completed'] == True and content.get(prop) == False:\n\t\t\t\t\t\tproject['completed_tasks'].remove({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\t\t\t\tproject['tasks'].append({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\t\t\tclient.put(project)\n\t\t\t\ttask[\"completed\"] = content.get(prop)\n\t\t\telif prop == 'title' and type(content.get(prop)) == str:\n\t\t\t\ttask[\"title\"] = content.get(prop)\n\t\t\telif prop == 'description' and type(content.get(prop)) == str:\n\t\t\t\ttask[\"description\"] = content.get(prop)\n\t\t\telif prop == 'due_by' and type(content.get(prop)) == str:\n\t\t\t\ttask[\"due_by\"] = content.get(prop)\n\t\t\telse:\n\t\t\t\treturn (jsonify({\"Error\": \"Invalid content!\"}), 400)\n\n\t\tclient.put(task)\n\t\ttask['id'] = task.key.id\n\t\ttask['self'] = request.url\n\t\treturn(jsonify(task), 201)\n\telif request.method == 'GET':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\ttask_key = client.key(\"tasks\", int(tid))\n\t\ttask = client.get(key=task_key)\n\n\t\tif task == None:\n\t\t\treturn (jsonify({'Error': 'This task does not exist!'}), 401)\n\n\t\tif task['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this task!'}), 401)\n\n\t\ttask['id'] = task.key.id\n\t\ttask['self'] = request.url\n\t\treturn(jsonify(task), 200)\n\telif request.method == 'DELETE':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\ttask_key = client.key(\"tasks\", int(tid))\n\t\ttask = client.get(key=task_key)\n\n\t\tif task == None:\n\t\t\treturn (jsonify({\"Error\": \"No task with this task_id exists\"}), 404)\n\t\telif task['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this task!'}), 401)\n\t\t\n\t\tif task['pid']:\n\t\t\tproject_key = client.key(\"projects\", int(task['pid']))\n\t\t\tproject = client.get(key=project_key)\n\t\t\tif task['completed']:\n\t\t\t\tproject['completed_tasks'].remove({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\telse:\n\t\t\t\tproject['tasks'].remove({'id': task.key.id, 'owner': jwt_sub, 'pid': str(project.key.id)})\n\t\t\tclient.put(project)\n\t\t\n\t\tclient.delete(task)\t\t\t\n\n\t\treturn(jsonify(''), 204)\n\[email protected]('/projects/<pid>', methods=['GET', 'DELETE', 'PUT', 'PATCH'])\ndef projects_get_delete(pid):\n\tif request.method == 'PUT' or request.method == 'PATCH':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({'Error': 'This project does not exist!'}), 401)\n\t\t\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\t\t\n\t\t# Grab content from request body\n\t\tcontent = request.get_json()\n\t\tif len(content) == 0:\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing!\"}), 400)\n\n\t\t# Validate content\n\t\tfor prop in content:\n\t\t\tif prop == 'title' and type(content.get(prop)) == str:\n\t\t\t\tproject[\"title\"] = content.get(prop)\n\t\t\telif prop == 'notes' and type(content.get(prop)) == str:\n\t\t\t\tproject[\"notes\"] = content.get(prop)\n\t\t\telif prop == 'due_by' and type(content.get(prop)) == str:\n\t\t\t\tproject[\"due_by\"] = content.get(prop)\n\t\t\telse:\n\t\t\t\treturn (jsonify({\"Error\": \"Invalid content!\"}), 400)\n\n\t\tclient.put(project)\n\t\tproject['id'] = project.key.id\n\t\tproject['self'] = request.url\n\t\t\n\t\tif project['tasks']:\n\t\t\tfor single_task in project['tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", single_task['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\tsingle_task['title'] = gtask['title']\n\t\t\t\tsingle_task['due_by'] = gtask['due_by']\n\t\t\t\tsingle_task['description'] = gtask['description']\n\t\t\t\tsingle_task['completed'] = gtask['completed']\n\t\t\t\tsingle_task[\"self\"] = request.url_root + \"tasks/\" + str(single_task['id'])\n\t\tif project['completed_tasks']:\n\t\t\tfor ctask in project['completed_tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", ctask['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\tctask['title'] = gtask['title']\n\t\t\t\tctask['due_by'] = gtask['due_by']\n\t\t\t\tctask['description'] = gtask['description']\n\t\t\t\tctask['completed'] = gtask['completed']\n\t\t\t\tctask[\"self\"] = request.url_root + \"tasks/\" + str(ctask['id'])\n\t\treturn(jsonify(project), 201)\n\telif request.method == 'GET':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({\"Error\": \"This project does not exist!\"}), 404)\n\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\n\t\tproject['id'] = project.key.id\n\t\tproject['self'] = request.url\n\t\tif project['tasks']:\n\t\t\tfor task in project['tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", task['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\ttask['title'] = gtask['title']\n\t\t\t\ttask['due_by'] = gtask['due_by']\n\t\t\t\ttask['description'] = gtask['description']\n\t\t\t\ttask['completed'] = gtask['completed']\n\t\t\t\ttask['self'] = request.url_root + \"tasks/\" + str(task['id'])\n\t\tif project['completed_tasks']:\n\t\t\tfor ctask in project['completed_tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", ctask['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\tctask['title'] = gtask['title']\n\t\t\t\tctask['due_by'] = gtask['due_by']\n\t\t\t\tctask['description'] = gtask['description']\n\t\t\t\tctask['completed'] = gtask['completed']\n\t\treturn(jsonify(project), 200)\n\telif request.method == 'DELETE':\n\t\tjwt_sub = verifyJWT()\n\t\tif jwt_sub == 'fail':\n\t\t\treturn(jsonify({'Error': 'Could not verify JWT!'}), 401)\n\t\telif jwt_sub == 'nojwt':\n\t\t\treturn (jsonify({'Error': 'JWT was not given!'}), 401)\n\n\t\tproject_key = client.key(\"projects\", int(pid))\n\t\tproject = client.get(key=project_key)\n\n\t\tif project == None:\n\t\t\treturn (jsonify({'Error': 'This project does not exist!'}), 401)\n\t\t\n\t\tif project['owner'] != jwt_sub:\n\t\t\treturn (jsonify({'Error': 'You do not own this project!'}), 401)\n\t\t\n\t\tif project['tasks']:\n\t\t\tfor task in project['tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", task['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\tgtask['pid'] = None\n\t\t\t\tclient.put(gtask)\n\t\tif project['completed_tasks']:\n\t\t\tfor task in project['completed_tasks']:\n\t\t\t\tgtask_key = client.key(\"tasks\", task['id'])\n\t\t\t\tgtask = client.get(key=gtask_key)\n\t\t\t\tgtask['pid'] = None\n\t\t\t\tclient.put(gtask)\n\t\t\n\t\tclient.delete(project)\n\t\treturn(jsonify(''), 204)\n\ndef verifyJWT():\n\t# Get JWT from Authorization header\n\treq = requests.Request()\n\tjwt_token = request.headers.get('Authorization')\n\tif jwt_token:\n\t\tjwt_token = jwt_token.split(\" \")[1]\n\t\t# Check to see if JWT is valid\n\t\ttry:\n\t\t\tjwt_sub = id_token.verify_oauth2_token(jwt_token, req, client_id)['sub']\n\t\texcept:\n\t\t\treturn 'fail'\n\telse:\n\t\t# Return 401 if no JWT is given\n\t\treturn 'nojwt'\n\treturn jwt_sub\n\nif __name__ == '__main__':\n\tapp.run(host='127.0.0.1', port=8080, debug=True)\n" }, { "alpha_fraction": 0.6891220211982727, "alphanum_fraction": 0.7090005278587341, "avg_line_length": 30.771930694580078, "blob_id": "d1326fa4ab8d4806f117c7544c98eec74bf5c994", "content_id": "e3bec77bd7862b26c10c6cd8bb5d737ee5501900", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1811, "license_type": "no_license", "max_line_length": 109, "num_lines": 57, "path": "/assignment-6/main.py", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "'''\n\tHarinder Gakhal\n\tCS493 - Assignment 6: OAuth 2.0 Implementation\n\t10/27/2020\n'''\n\nfrom google.cloud import datastore\nfrom flask import Flask, request, jsonify, render_template, redirect, session\nimport flask\nimport requests\nimport json\nimport uuid\nimport string\nimport random\nimport constants # This file will hold the client id and client secret\n\napp = flask.Flask(__name__)\napp.secret_key = str(uuid.uuid4())\nclient = datastore.Client()\n\nENDPOINT = \"https://accounts.google.com/o/oauth2/v2/auth?\"\nSCOPE = \"scope=https://www.googleapis.com/auth/userinfo.profile&\"\nACCESS_TYPE = \"access_type=offline&\"\nRESPONSE_TYPE = \"response_type=code&\"\nREDIRECT_URI = \"redirect_uri=https://assignment1-gae-hgg.wl.r.appspot.com/oauth&\"\nCLIENT_ID = \"client_id=\" + constants.clientID + \"&\"\n\[email protected]('/')\ndef index():\n\treturn render_template('index.html')\n\[email protected]('/oauth')\ndef oauth():\n\told_state = flask.session['state']\n\tif old_state != request.args.get('state'):\n\t\treturn (\"State does not match!\", 404)\n\tdata = {\n\t\t'code': request.args.get('code'),\n\t\t'client_id': constants.clientID,\n\t\t'client_secret': constants.clientSecret,\n\t\t'redirect_uri': \"https://assignment1-gae-hgg.wl.r.appspot.com/oauth\",\n\t\t'grant_type': 'authorization_code'\n\t}\n\tres = requests.post('https://oauth2.googleapis.com/token', data=data).json()\n\ttoken = res['access_token']\n\tperson = requests.get(\"https://www.googleapis.com/oauth2/v1/userinfo?alt=json&access_token=\" + token).json()\n\treturn person['name']\n\[email protected]('/gauth')\ndef gauth():\n\tletters = string.ascii_lowercase\n\tSTATE = ''.join(random.choice(letters) for i in range(8))\n\tflask.session['state'] = STATE\n\treturn redirect(f'{ENDPOINT}{SCOPE}{ACCESS_TYPE}{RESPONSE_TYPE}{REDIRECT_URI}{CLIENT_ID}state={STATE}')\n\nif __name__ == '__main__':\n\tapp.run(host='127.0.0.1', port=8080, debug=True)\n" }, { "alpha_fraction": 0.606419563293457, "alphanum_fraction": 0.6201757788658142, "avg_line_length": 33.84931564331055, "blob_id": "81c0e60b1504e542db38896622c1c0fd5f60a5b7", "content_id": "9174af3b26fe01bf249dcf0e79143e83dcb16726", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2617, "license_type": "no_license", "max_line_length": 139, "num_lines": 73, "path": "/assignment-4/load.py", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "'''\r\n\tHarinder Gakhal\r\n\tCS493 - Assignment 4: Intermediate Rest API\r\n\t10/27/2020\r\n'''\r\n\r\nfrom flask import Blueprint, request, jsonify\r\nfrom google.cloud import datastore\r\nimport json\r\n\r\nclient = datastore.Client()\r\n\r\nbp = Blueprint('load', __name__, url_prefix='/loads')\r\n\r\[email protected]('', methods=['POST','GET'])\r\ndef loads_get_post():\r\n\tif request.method == 'POST':\r\n\t\tcontent = request.get_json()\r\n\t\tif len(content) != 3:\r\n\t\t\treturn (jsonify({\"Error\": \"The request object is missing at least one of the required attributes\"}), 400)\r\n\t\tnew_load = datastore.entity.Entity(key=client.key(\"loads\"))\r\n\t\tnew_load.update({\"weight\": content[\"weight\"], 'carrier': None, 'content': content['content'], 'delivery_date': content['delivery_date']})\r\n\t\tclient.put(new_load)\r\n\t\tnew_load['id'] = new_load.key.id\r\n\t\tnew_load['self'] = request.url + '/' + str(new_load.key.id)\r\n\t\treturn (jsonify(new_load), 201)\r\n\telif request.method == 'GET':\r\n\t\tquery = client.query(kind=\"loads\")\r\n\t\tq_limit = int(request.args.get('limit', '3'))\r\n\t\tq_offset = int(request.args.get('offset', '0'))\r\n\t\tg_iterator = query.fetch(limit= q_limit, offset=q_offset)\r\n\t\tpages = g_iterator.pages\r\n\t\tresults = list(next(pages))\r\n\t\tif g_iterator.next_page_token:\r\n\t\t\tnext_offset = q_offset + q_limit\r\n\t\t\tnext_url = request.base_url + \"?limit=\" + str(q_limit) + \"&offset=\" + str(next_offset)\r\n\t\telse:\r\n\t\t\tnext_url = None\r\n\t\tfor e in results:\r\n\t\t\te[\"id\"] = e.key.id\r\n\t\t\te[\"self\"] = request.url_root + \"loads/\" + str(e.key.id)\r\n\t\t\tif e[\"carrier\"] != None:\r\n\t\t\t\te['carrier']['self'] = request.url_root + \"boats/\" + str(e['carrier']['id'])\r\n\t\toutput = {\"loads\": results}\r\n\t\tif next_url:\r\n\t\t\toutput[\"next\"] = next_url\r\n\t\treturn (jsonify(output), 200)\r\n\r\[email protected]('/<id>', methods=['DELETE','GET'])\r\ndef loads_get_delete(id):\r\n\tif request.method == 'DELETE':\r\n\t\tkey = client.key(\"loads\", int(id))\r\n\t\tload = client.get(key=key)\r\n\t\tif load == None:\r\n\t\t\treturn (jsonify({\"Error\": \"No load with this load_id exists\"}), 404)\r\n\t\tif load['carrier'] != None:\r\n\t\t\tboat = client.get(key=client.key(\"boats\", load['carrier']['id']))\r\n\t\t\tboat[\"loads\"].remove({'id': load.key.id})\r\n\t\t\tclient.put(boat)\r\n\t\tclient.delete(key)\r\n\t\treturn (jsonify(''),204)\r\n\telif request.method == 'GET':\r\n\t\tload_key = client.key(\"loads\", int(id))\r\n\t\tload = client.get(key=load_key)\r\n\t\tif load == None:\r\n\t\t\treturn (jsonify({\"Error\": \"No load with this load_id exists\"}), 404)\r\n\t\tif load[\"carrier\"]:\r\n\t\t\tload[\"carrier\"][\"self\"] = request.url_root + \"boats/\" + str(load[\"carrier\"][\"id\"])\r\n\t\tload[\"id\"] = id\r\n\t\tload[\"self\"] = request.url\r\n\t\treturn (jsonify(load), 200)\r\n\telse:\r\n\t\treturn 'Method not recogonized'\r\n" }, { "alpha_fraction": 0.6726457476615906, "alphanum_fraction": 0.7040358781814575, "avg_line_length": 22.77777862548828, "blob_id": "96fa45fa996d22dfe05a6fcb7e8923ffd7828cba", "content_id": "badec2aba87e55a3d3f65f91505a4cd3aa218eff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 223, "license_type": "no_license", "max_line_length": 48, "num_lines": 9, "path": "/assignment-8/Dockerfile", "repo_name": "JayCax/CS-493", "src_encoding": "UTF-8", "text": "FROM python:3.8\r\nWORKDIR /usr/src/app\r\nCOPY requirements.txt ./\r\nRUN pip install -r requirements.txt\r\nCOPY . .\r\nENV PORT=8000\r\nEXPOSE ${PORT}\r\nENV GOOGLE_APPLICATION_CREDENTIALS=\"my-key.json\"\r\nCMD [ \"python3\", \"main.py\" ]\r\n" } ]
7
canevaa/CapstoneProject
https://github.com/canevaa/CapstoneProject
e8727c636b5d1966f02a21f225580e9d55d16318
5eeb9e65013a82bcf8fd060a005065f4de23f874
fa908cd9b5680af86ce2c8b8e208f5590ac424f8
refs/heads/master
2021-01-10T13:00:17.544063
2016-05-06T00:39:33
2016-05-06T00:39:33
50,085,971
0
2
null
null
null
null
null
[ { "alpha_fraction": 0.6608833074569702, "alphanum_fraction": 0.6715299487113953, "avg_line_length": 29.10089111328125, "blob_id": "f8d73230d84656f650716ef0b63e5125e66c4e8f", "content_id": "bfd12f4d328a229d4b2f41122872886751d12ab9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10144, "license_type": "no_license", "max_line_length": 83, "num_lines": 337, "path": "/Client-5000.py", "repo_name": "canevaa/CapstoneProject", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n##################\nimport os\nimport socket\nimport pickle\nimport readline\nimport sys\nfrom threading import Thread\nfrom Crypto import Random\nfrom Crypto.Cipher import AES\nfrom Crypto.PublicKey import RSA\nfrom Crypto.Hash import MD5\n\n#Global variables\nlogfile = \"\"\nClientA = \"\"\nClientB = \"\"\nClient = \"\"\nCS = \"\"\ncipher1 = \"\"\ncipher2 = \"\"\nquit = False\n\n# Gets the local IP Address of the computer\ndef get_local_ip():\n \ts = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n \ts.connect((\"gmail.com\",80))\n \tAddress = (s.getsockname()[0])\n \tAddress = str(Address)\n \ts.close()\n \treturn Address\n \t# end get_local_address\n\n# check for hermes directory, create it if it doesnt exist\ndef check_log_dir():\n\thermespath = os.path.expanduser('~') + '/.hermes'\n\tif not os.path.exists(hermespath):\n\t\tprint '~/.hermes not found, creating directory\\n'\n\t\tos.makedirs(hermespath)\n\treturn\n\t# done check_log_dir()\n\n# Checks for the RSA key and returns whether it already exists or not\ndef check_key():\n\tglobal logfile, ClientA\n\tlogfile.write('Checking For Key\\n')\n\tkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.key'\n\tpubkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.pubkey'\n\tserverkeypath = os.path.expanduser('~') + '/.hermes/server.pub'\n\tlogfile.write('Full Key Path: ')\n\tlogfile.write(keypath)\n\tlogfile.write('\\n')\n\tlogfile.write('Server Key Path: ')\n\tlogfile.write(serverkeypath)\n\tlogfile.write('\\n')\n\tkeyfound = True\n\tif not os.path.exists(keypath):\n\t\tlogfile.write('Full Key Not Found\\n')\n\t\tfilekey = open(keypath, 'a+')\n\t\tkeyfound = False\n\telse:\n\t\tlogfile.write('Found Full Key\\n')\n\tif not os.path.exists(pubkeypath):\n\t\tlogfile.write('Full Key Not Found\\n')\n\t\tfilekey = open(pubkeypath, 'a+')\n\t\tkeyfound = False\n\telse:\n\t\tlogfile.write('Found Full Key\\n')\n\tif not os.path.exists(serverkeypath):\n\t\tlogfile.write('Server Key Not Found\\n')\n\t\tfilekey = open(serverkeypath, 'a+')\n\t\tkeyfound = False\n\telse:\n\t\tlogfile.write('Found Server Key\\n')\n\treturn keyfound\n\t# done check_key()\n\n# Creates the client key with a password and writes the server public key to a file\ndef create_key(server_key, s):\n\tglobal ClientA\n\tkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.key'\n\tpubkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.pubkey'\n\tserverkeypath = os.path.expanduser('~') + '/.hermes/server.pub'\n\t# Creates and writes client key\n\tprint \"New Password For Key: \"\n\tpassword = raw_input()\n\tkey = RSA.generate(2048)\n\texport_key = key.exportKey('PEM', password, pkcs=1)\n\tdumpfile = open(keypath, 'w')\n\tdumpfile.write(export_key)\n\tdumpfile.close\n\t# Writes the pubkey to a file\n\tpubkey = key.publickey().exportKey()\n\tdumpfile = open(pubkeypath, 'w')\n\tdumpfile.write(pubkey)\n\tdumpfile.close\n\t# Writes server key public ket to file\n\tprint \"The following Server Public Key will be created: \\n\"\n\tprint server_key, \"\\n\"\n\tprint \"Please Type 'Yes' to accept this key: \"\n\tflag = raw_input()\n\tif flag == 'Yes':\n\t\tdumpfile = open(serverkeypath, 'w')\n\t\tdumpfile.write(server_key)\n\t\tdumpfile.close\n#causes server to close, not that good of a solution\n\telse:\n\t\tprint \"Key not accepted.\"\n\t\tlogfile.write(\"You did not accept the key.\\n\")\n\t\ts.close()\n\t\tlogfile.write(\"Socket to server closed.\\n\")\n\t\tlogfile.close()\n\t\tsys.exit(1)\n\treturn pubkey\n\t# end create_key()\n\n# Loads the key into a variable for use with the proper password\ndef load_key():\n\tglobal ClientA\n\tkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.key'\n\timportfile = open(keypath, 'r')\n\tprint \"Password For \" + ClientA + \" Key: \"\n\tpassword = raw_input()\n\tkey = RSA.importKey(importfile.read(), passphrase=password)\n\timportfile.close()\n\treturn key\n\t# end load_key()\n\n# Loads the pubkey into a variable\ndef load_pub_key():\n\tglobal ClientA\n\tkeypath = os.path.expanduser('~') + '/.hermes/' + ClientA + '.pubkey'\n\timportfile = open(keypath, 'r')\n\tkey = importfile.read()\n\timportfile.close()\n\treturn key\n\t\n# Loads the server key into a variable for use\ndef load_server_key(passed_key):\n\tserverkeypath = os.path.expanduser('~') + '/.hermes/server.pub'\n\timportfile = open(serverkeypath, 'r')\n\tkey = importfile.read()\n\tif passed_key == key:\n\t\tkey = RSA.importKey(key)\n\telse:\n\t\tprint \"Error: Server Key does not match! Possible Man in the Middle!\"\n\timportfile.close()\n\treturn key\n\t# end load_server_key()\n\n# Creates a socket and connects to the other computer who became the server\ndef client_socket(Server_Address, Server_Port):\n\tglobal logfile \n\tlogfile.write('Creating Client Socket\\n')\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\tlogfile.write('Client Socket Created\\n')\n\tlogfile.write('Connecting to Server\\n')\n\ts.connect((Server_Address, Server_Port))\n\tlogfile.write('Connection Created\\n')\n\treturn s\n\t# end create_socket()\n\n# Creates a socket and waits for the other computer to connect\ndef server_socket(Port):\n\tglobal logfile\n\tlogfile.write('Creating Server Socket\\n')\n\tServerS = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\tlogfile.write('Socket built succesfully\\n')\n\tAddress = get_local_ip()\n\tServerS.bind((Address, Port))\n\tlogfile.write('Waiting for a Connection\\n')\n\tServerS.listen(5)\n\treturn ServerS\n\t# end server_socket()\n\t\n# Used from the threading statment. It will continuously wait for a message\ndef recv_thread(mssg):\n\tglobal ClientB, ClientA, CS, Client, cipher1, cipher2, quit\n\tdata_enc = \"\"\n\tdata_unenc = \"\"\n\tprompt = \"[\" + ClientA + \"]: \"\n\tif (mssg == 1):\n\t\twhile quit == False:\n\t\t\tdata_enc = Client.recv(1024)\n\t\t\tdata_unenc = cipher2.decrypt(data_enc)\n\t\t\tif (data_unenc == \"quit()\"):\n\t\t\t\tquit = True\n\t\t\t\tbreak\n\t\t\tsys.stdout.write('\\r'+' '*(len(readline.get_line_buffer())+2)+'\\r')\n\t\t\t#print \"[\" + ClientB + \" Encrypted]: \" + data_enc\n\t\t\tprint \"[\" + ClientB + \"]: \" + data_unenc\n\t\t\tsys.stdout.write(prompt + readline.get_line_buffer())\n\t\t\tsys.stdout.flush()\n\t\t\t\n\tif (mssg == 2):\n\t\twhile quit == False:\n\t\t\tdata_enc = CS.recv(1024)\n\t\t\tdata_unenc = cipher1.decrypt(data_enc)\n\t\t\tif (data_unenc == \"quit()\"):\n\t\t\t\tquit = True\n\t\t\t\tbreak\n\t\t\tsys.stdout.write('\\r'+' '*(len(readline.get_line_buffer())+2)+'\\r')\n\t\t\t#print \"[\" + ClientB + \" Encrypted]: \" + data_enc\n\t\t\tprint \"[\" + ClientB + \"]: \" + data_unenc\n\t\t\tsys.stdout.write(prompt + readline.get_line_buffer())\n\t\t\tsys.stdout.flush()\n\t# end recv_thread()\n\n# Used from the threading statment. It will send whenever it gets a message\ndef send_thread(mssg):\n\tglobal ClientA, CS, Client, ServerS, cipher1, cipher2, quit\n\tdata = \"\"\n\tprompt = \"[\" + ClientA + \"]: \"\n\tif (mssg == 1):\n\t\twhile quit == False:\n\t\t\tdata = raw_input(prompt)\n\t\t\tClient.send(cipher1.encrypt(data))\n\t\t\tif (data == \"quit()\"):\n\t\t\t\tquit = True\n\tif (mssg == 2):\n\t\twhile quit == False:\n\t\t\tdata = raw_input(prompt)\n\t\t\tCS.send(cipher2.encrypt(data))\n\t\t\tif (data == \"quit()\"):\n\t\t\t\tquit = True\n\t# end send_thread()\n\n# Handles the Server Portion of the messanger \n# | Server | ip | port | IV | Key | \ndef mess_server(sessionlist):\n\tglobal logfile, Client\n\tServerS = server_socket(sessionlist[2])\n\tprint \"Waiting for Connection\\n\"\n\tClient, ClientAddr = ServerS.accept()\n\tlogfile.write(\"Connected to: \")\n\tlogfile.write(repr(ClientAddr))\n\tlogfile.write(\"\\n\")\n\t# Create threads for sending and recv messages\n\tClient.send(\"Connection Established\")\n\tprint \"Connection Established\"\n\tlogfile.write(\"Starting Threads\\n\")\n\tt1 = Thread(target=recv_thread, args=(1,))\n\tt2 = Thread(target=send_thread, args=(1,))\n\tt1.start()\n\tt2.start()\n\tt1.join()\n\tt2.join()\n\tServerS.close()\n\tlogfile.write(\"Server Socket Closed\\n\")\n\t# end mess_server()\n\n# Handles the Client Portion of the messanger\n# | Server | ip | port | IV | Key | \t\ndef mess_client(sessionlist):\n\tglobal logfile, CS\n\tCS = client_socket(sessionlist[1], sessionlist[2])\n\tprint CS.recv(1024)\n\t# Create threads for sending and recv messages\n\tt1 = Thread(target=recv_thread, args=(2,))\n\tt2 = Thread(target=send_thread, args=(2,))\n\tt1.start()\n\tt2.start()\n\tt1.join()\n\tt2.join()\n\tCS.close()\n\tlogfile.write(\"All sockets Closed\\n\")\n\t# end mess_client()\n\n# Exchange between server and clients\ndef server_exchange(ServerAddr):\n\tglobal logfile, ClientA, ClientB, cipher1, cipher2\n\t# order is UserA, UserB, ClientIP\n\tsenddata = [\"\" for x in range(3)]\n\tprint \"Enter Your User Name:\"\n\tsenddata[0] = raw_input('>')\n\tClientA = senddata[0]\n\tprint \"Enter User to Connect To:\"\n\tsenddata[1] = raw_input('>')\n\tClientB = senddata[1]\n\tsenddata[2] = get_local_ip()\n\t# Creating socket to Server\n\ts = client_socket(ServerAddr, 5000)\n\t# recv server_pub_key\n\tserver_pub_key = s.recv(4096)\n\tif not check_key():\n\t\tsend_data_tmp2 = create_key(server_pub_key, s)\n\t# Loading private key to variable\n\tkey = load_key()\n\t# Loading server public key to variable\n\tserverkey = load_server_key(server_pub_key)\n\tsend_data_tmp2 = load_pub_key()\n\t# Sending array with connection info\n\ts.send(pickle.dumps(serverkey.encrypt(pickle.dumps(senddata),32)))\n\t# ACK from server\n\ttmp = s.recv(1024)\n\t# Sending our public key\n\ts.send(send_data_tmp2)\n\t# | Server | ip | port | IV | Key |\n\trecvdata = [\"\" for x in range(5)]\n\trecv_data_tmp = s.recv(4096) \n\trecvdata = pickle.loads(key.decrypt(pickle.loads(recv_data_tmp)))\n\t# creating ciphers\n\tcipher1 = AES.new(recvdata[4], AES.MODE_CFB, recvdata[3])\n\t#hashing they key and iv so the second cipher is different and cannot be decrypted\n\thashkey = MD5.new(recvdata[4]).digest()\n\thashiv = MD5.new(recvdata[3]).digest()\n\tcipher2 = AES.new(hashkey, AES.MODE_CFB, hashiv)\n\t# if recvdata[0] true: mess_server() else: mess_client()\n\tlogfile.write(str(recvdata))\n\tlogfile.write(\"\\n\")\n\tif recvdata[0] == True:\n\t\tmess_server(recvdata)\n\tif recvdata[0] == False:\n\t\tmess_client(recvdata)\n\ts.close()\n\t# end server_exchange()\n\n##############################################\n# #\n# Start Main, calling functions #\n# #\n##############################################\n\ndef main():\n\tglobal logfile\n\tcheck_log_dir()\n\tlogfile = open(os.path.expanduser('~') + '/.hermes/connection.log', 'a+')\n\tlogfile.write(\"***File Opened***\\n\")\n\tServerAddr = \"67.241.38.178\"\n\tserver_exchange(ServerAddr)\n\tlogfile.write(\"***File Closed***\\n\")\n\tlogfile.close()\n\t#end main()\n\n\nif __name__ == '__main__':\n\tmain()\n" }, { "alpha_fraction": 0.5952380895614624, "alphanum_fraction": 0.5952380895614624, "avg_line_length": 9.5, "blob_id": "84f34513ebdb7d061614b88183fed30bd2da7c80", "content_id": "c7ce5c4cf26086324e91b9e519bc03835f6bb33d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 42, "license_type": "no_license", "max_line_length": 15, "num_lines": 4, "path": "/ChangePermissions.sh", "repo_name": "canevaa/CapstoneProject", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nfiles=./*.py\nchmod +x $files\n" }, { "alpha_fraction": 0.6629445552825928, "alphanum_fraction": 0.6754893660545349, "avg_line_length": 28.97520637512207, "blob_id": "c537063bdba7828353e6cadd4d58ebb8120cc40a", "content_id": "b38fb038574783a92cece43b93395a4ddf148a01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7254, "license_type": "no_license", "max_line_length": 80, "num_lines": 242, "path": "/Server-5000.py", "repo_name": "canevaa/CapstoneProject", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport socket\nimport pickle\nimport sys\nfrom Crypto import Random\nfrom Crypto.Cipher import AES\nfrom Crypto.PublicKey import RSA\n\n# global Variables\ni = 0\nlistlength = 5\nnextopenspot = 0\nlogfile = \"\"\n\n# gets the local IP address of the server\ndef get_local_ip():\n\ts = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n\ts.connect((\"gmail.com\",80))\n\tAddress = (s.getsockname()[0])\n\ts.close()\n\treturn Address\n\t# end get_local_ip()\n\n# check for zeus directory, create it if it doesnt exist\ndef check_log_dir():\n\tglobal logfile\n\tif not os.path.exists(os.path.expanduser('~') + '/.zeus'):\n\t\tprint 'zeus not found, creating directory\\n'\n\t\tos.makedirs(os.path.expanduser('~') + '/.zeus')\n\t\tprint 'Directory created\\n'\n\treturn\n\t# done check_log_dir()\n\n# Checks for the RSA key and returns whether it already exists or not\ndef check_key():\n\tglobal logfile\n\tlogfile.write('Checking For Key\\n')\n\tserverkeypath = os.path.expanduser('~') + '/.zeus/server.key'\n\tkeyfound = True\n\tif not os.path.exists(serverkeypath):\n\t\tlogfile.write('Server Key Not Found\\n')\n\t\tkeyfound = False\n\telse:\n\t\tlogfile.write('Found Server Key\\n')\n\treturn keyfound\n\t# end check_key()\n\n# checks to see if client key exists, if it doesn't then write it\ndef check_client_key(ClientA, exportedkey):\n\tglobal logfile\n\tkeypath = os.path.expanduser('~') + '/.zeus/' + ClientA + '.pub'\n\tkeyfound = True\n\tif not os.path.exists(keypath):\n\t\tprint \"in check client key\"\n\t\tlogfile.write('Public Key Not Found\\n')\n\t\tdumpfile = open(keypath, 'a+')\n\t\tlogfile.write('Public Key Added for: ')\n\t\tlogfile.write(ClientA)\n\t\tlogfile.write('\\n')\n\t\tdumpfile.write(exportedkey)\n\t\tdumpfile.close()\n\treturn\n\t# end check_client_key()\n\n# if key does not exist it is created\ndef create_key(ServerPass):\n\tserverkeypath = os.path.expanduser('~') + '/.zeus/server.key'\n\tdumpfile = open(serverkeypath, 'w')\n\tkey = RSA.generate(2048)\n\tserver_key = key.exportKey('PEM', passphrase=ServerPass, pkcs=1) \n\tdumpfile.write(server_key)\n\tdumpfile.close\n\treturn\n\t# end create_key()\n\n# loads the server private key\ndef load_key(ServerPass):\n\tserverkeypath = os.path.expanduser('~') + '/.zeus/server.key'\n\tloadfile = open(serverkeypath, 'a+')\n\tserverkey = loadfile.read()\n\tkey = RSA.importKey(serverkey, ServerPass)\n\tloadfile.close()\n\treturn key\n\t# end load_key()\n\n# loads the client public key\ndef load_client_key(ClientA):\n\tkeypath = os.path.expanduser('~') + '/.zeus/' + ClientA + '.pub'\n\tloadfile = open(keypath, 'a+')\n\tclientkey = loadfile.read()\n\tkey = RSA.importKey(clientkey)\n\tloadfile.close()\n\treturn key\n\t# end load_client_key()\n\t\n# creates the server socket and leaves it in the listening state\ndef create_server_connection():\n\tglobal logfile\n\tlogfile.write('Creating Server Socket\\n')\n\tServerS = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\tlogfile.write('Socket built succesfully\\n')\n\tPort = 5000\n\tAddress = get_local_ip()\n\tServerS.bind((Address, Port))\n\tServerS.listen(5)\n\treturn ServerS\n\t# done create_server_connection():\n\n# Searches the list for the username\ndef search_list(mylist, namea, nameb):\n\tglobal logfile, listlength, i, nextopenspot\n\tflag = False\n\tx = 0\n\twhile x < listlength:\n\t\tif namea == mylist[x][1]:\n\t\t\tif nameb == mylist[x][0]:\n\t\t\t\tflag = True\n\t\t\t\ti = x\n\t\t\t\tlogfile.write(namea)\n\t\t\t\tlogfile.write(' found in list with ')\n\t\t\t\tlogfile.write(nameb)\n\t\t\t\tlogfile.write('\\n')\n\t\tx = x + 1\n\treturn flag\n\t# done search_list()\n\n# find next empty spot in list\ndef find_next_open(mylist):\n\tx = 0 \n\tflag = True\n\twhile x < listlength and flag == True:\n\t\tif mylist[x][0] == '*':\n\t\t\tnextopenspot = x\n\t\t\tflag = False\n\t\tx = x + 1\n\treturn\n\t# done find_next_open()\n\n# this function creates the session for the users and calls most other functions\ndef client_exchange(sessionlist, ServerS, ServerPass):\n\tglobal logfile, i\n\tif not check_key():\n\t\tprint \"going to create_key\"\n\t\tcreate_key(ServerPass)\n\tserverkey = load_key(ServerPass)\n\t#Accept a client\n\tprint \"Waiting for Connection\\n\"\n\tClient, ClientAddr = ServerS.accept()\n\tlogfile.write(\"\\nConnected to: \")\n\tlogfile.write(repr(ClientAddr))\n\tlogfile.write(\"\\n\")\n\tpubkey = serverkey.publickey().exportKey()\n\tClient.send(pubkey)\n\t#getting info from Client\n\t#order is UserA, UserB, ClientIP\n\trecvdata = [\"\" for x in range(3)]\n\trecv_data_tmp = Client.recv(4096)\n\tClient.send(\"1738\")\n\trecvkey = Client.recv(4096)\n\trecvdata= pickle.loads(serverkey.decrypt(pickle.loads(recv_data_tmp)))\n\t#print '\\n', recvkey, '\\n'\n\tprint \"Received Key From \" + recvdata[0]\n\tcheck_client_key(recvdata[0], recvkey)\n\tclientkey = load_client_key(recvdata[0])\n\t#pass client name and sessionlist to search. \n\t#return False if not in list\n\t#return True if in list\n\tcheck = search_list(sessionlist, recvdata[0], recvdata[1])\n\t#if client is in connection list\n\t#send False for server, ip to connect on, port to connect to, iv, key\n\tif (check == True):\n\t\t#info to be sent to client\n\t\t#| False for server | ip to connect to | port | iv | key |\t\t\n\t\toutdata = [\"\" for x in range(5)]\n\t\toutdata[0] = False\n\t\toutdata[1] = sessionlist[i][2]\n\t\toutdata[2] = sessionlist[i][3]\n\t\toutdata[3] = sessionlist[i][4]\n\t\toutdata[4] = sessionlist[i][5]\n#\t\tout_data = clientkey.encrypt(pickle.dumps(outdata),32)\n\t\tClient.send(pickle.dumps(clientkey.encrypt(pickle.dumps(outdata),32)))\n\t\tsessionlist[i][0] = \"*\"\n\t\tsessionlist[i][1] = \"*\"\n\t\tsessionlist[i][2] = \"*\"\n\t\tsessionlist[i][3] = \"*\"\n\t\tsessionlist[i][4] = \"*\"\n\t\tsessionlist[i][5] = \"*\"\n\t\tlogfile.write(str(sessionlist))\n\t\t#remove from list b/c session was created\n\t#if client is not in connection list\n\t#send True for server, port, iv, key\n\tif (check == False):\n\t\tfind_next_open(sessionlist)\n\t\t#add to list in sessionlist[nextopenspot][0...]\n\t\tsessionlist[nextopenspot][0] = recvdata[0]\n\t\tsessionlist[nextopenspot][1] = recvdata[1]\n\t\tsessionlist[nextopenspot][2] = recvdata[2]\n\t\tsessionlist[nextopenspot][3] = 5005 + i\n\t\tsessionlist[nextopenspot][4] = Random.new().read(16)\n\t\tsessionlist[nextopenspot][5] = Random.new().read(16)\n\t\tlogfile.write(str(sessionlist))\n\t\t#info to be sent to client\n\t\t#| True for server | filler | port | iv | key |\n\t\toutdata = [\"\" for x in range(5)]\n\t\toutdata[0] = True\n\t\toutdata[1] = \"filler\"\n\t\toutdata[2] = sessionlist[nextopenspot][3]\n\t\toutdata[3] = sessionlist[nextopenspot][4]\n\t\toutdata[4] = sessionlist[nextopenspot][5]\n\t\tClient.send(pickle.dumps(clientkey.encrypt(pickle.dumps(outdata),32)))\n\tClient.close()\n\treturn sessionlist\n\t#done client_excange()\n\n##############################################\n# #\n# Start Main, calling functions #\n# #\n##############################################\n\ndef main():\n\tglobal listlength, logfile\n\tprint \"Enter Password for Key: \"\n\tServerPass = raw_input()\n\tcheck_log_dir()\n\tlogfile = open(os.path.expanduser('~') + '/.zeus/connection.log', 'a+')\n\tlogfile.write(\"***File Opened***\\n\")\n\t# | ClientA(server) | ClientB(Client) | ClientAIP | Port | IV | Key\n\tsessionlist = [[\"*\" for x in range(6)] for x in range(listlength)]\n\n\t#Start Program\n\tServerS = create_server_connection()\n\twhile True:\n\t\tsessionlist = client_exchange(sessionlist, ServerS, ServerPass)\n\t\t#done while\n\tlogfile.write(\"***Closeing File***\\n\")\n\tlogfile.close()\n\t#done main()\n\nif __name__ == '__main__':\n\tmain()\n" }, { "alpha_fraction": 0.7819032073020935, "alphanum_fraction": 0.7876787781715393, "avg_line_length": 226, "blob_id": "62e5324ca17cd421163f3e89e7881ef55df99999", "content_id": "f60b00020f4b0f2bf4abbc101dd6d420ca864463", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3690, "license_type": "no_license", "max_line_length": 1490, "num_lines": 16, "path": "/README.md", "repo_name": "canevaa/CapstoneProject", "src_encoding": "UTF-8", "text": "Goal:\n\nAt a very high level, the idea of this program is simple. In one interaction we will have the the server, which we will just call “Server”, and two clients, called “Client A” and “Client B”. Client A will contact the server and say it wants to talk to Client B. The server will verify the identity of Client A and if everything checks out, it will send Client A a port to wait for a connection on and a symmetric key to use for encrypting the messages. When Client B contacts the Server asking to talk to Client A, the server will verify Client B as well, and then send it the IP, port and symmetric key. Client B can then connect to Client A and they can message each other securely. \n\n\nInstructions:\n\nTo be able to use our project, you will need some prerequisites. The first is the operating system. Because of the way sockets are handled and used, a Unix like operating system will need to be used. We have successfully used our project on the Computer Science department’s “fang” server, on Ubuntu 14.04.1 and on Mac with some extra configuration. Ultimately, any distribution should work as long as you are able to get the other prerequisites. The next will be python 2.7. We chose python as our language for our program because of its portability. Unfortunately, because of how sockets are handled in windows vs python, we decided to fixate on the Unix like operating systems, so the interoperability was not useful for our project. The last two prerequisite are pycrypto and git. This is a library that may not come with the normal packages included in python 2.7. Because of this you may need to install it separately. On mac you will need to install xcode to get pycrypto, while Ubuntu it works easily with the following command: “apt-get install python-crypto”. Git will be used to install and update our program. “Git clone https://github.com/ canevaa/CapstoneProject.git“ will be used to download the programs. “git pull in the directory will update it if there are any updates. Also, due to the point to point connection and NAT problem (covered in future works), the clients will be required to be on the same network. Otherwise they will not be able to connect to each other. \n\nAfter the prerequisites and the install, you are ready to start the program. First we will start with the server. We have a dedicated computer at Louie’s house which will act as our server. We port forwarded port 5000 to this server so it can receive connections from the internet. We will start running the server script by typing “python Server-5000.py” into the terminal in the “CapstonProject” directory. It will create the ~/.zeus directory which will hold all of our keys and log files. The server script will ask you to create a password for the server key, or ask you to enter the key if it has already been created. \n\nWith the server started and waiting for connections, we will move to ClientA. ClientA will install the program the same way and start the client code by entering “python Client-5000.py” into the terminal in the “CapstoneProject” directory. The program will ask for the current clients user name, which will be “ClientA”, and the user name of the client to connect to, which will be “ClientB”. If a key and password has not been created it will ask the user to create a password, and it will also ask the user to accept the server key. This is an important point of trust, just like OpenSSH has. Client B goes through the same steps on their side and after the connection they should be able to communicate freely. For screenshots please refer to the Results section. \n\nSecurity Notation:\n\nFuture Works:\n\n\n\n\n" }, { "alpha_fraction": 0.5743944644927979, "alphanum_fraction": 0.5951557159423828, "avg_line_length": 18.266666412353516, "blob_id": "97a6efb244f62a3c01df8540752be4f6ec4fa320", "content_id": "c142518e8bd1de2c5b08aa6159b2e9dd295e2acb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 289, "license_type": "no_license", "max_line_length": 56, "num_lines": 15, "path": "/ddos.py", "repo_name": "canevaa/CapstoneProject", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#####################\nimport socket, sys, os \n\na=0\n\nwhile True: \n try: \n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM) \n\ts.connect((\"lougreen.ddns.net\", 5000))\n\ts.send(\"Attack!!\") \n\tprint \"Attack Successful!\"\n\ts.close()\n except socket.error:\n\ta=a+1\n" } ]
5
edadasko/space_invaders_cv
https://github.com/edadasko/space_invaders_cv
02163bffc1d1b313b442b02a4aac7a5caa4ac863
65d11d60cdb7e89078826617bf173c11213b48e4
9c7435eeaada8d52fde98741591fe6828763b268
refs/heads/master
2020-05-17T19:19:13.458512
2019-05-24T13:55:41
2019-05-24T13:55:41
183,911,500
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5431132316589355, "alphanum_fraction": 0.5488994717597961, "avg_line_length": 34.82926940917969, "blob_id": "1a17ee115d3d1fa051a4cb9f968467b137e97aed", "content_id": "52ff85b3f2fb2ba34cbab2a0188fe6ff67130377", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8814, "license_type": "no_license", "max_line_length": 102, "num_lines": 246, "path": "/game.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import cv2.cv2 as cv2\nimport pygame\nimport game_objects\nimport interface\nimport control\nimport random\nimport animations\n\n\nclass Game:\n FREQUENCY_OF_ENEMIES = 10\n FREQUENCY_OF_BULLETS = 3\n FPS = 60\n CHANGE_SCORE = 1000\n\n def __init__(self):\n self.username = \"\"\n pygame.init()\n pygame.mixer.init()\n pygame.mixer.music.load('sounds/background.mp3')\n self.control = control.MouseControl\n\n self.main_clock = pygame.time.Clock()\n self.game_window = pygame.display.set_mode((interface.WINDOW_SIZE_X, interface.WINDOW_SIZE_Y))\n self.player = None\n self.background = game_objects.Background(self.game_window)\n\n self.health_indicator = interface.HealthIndicator(self.game_window)\n self.points_indicator = interface.PointsIndicator(self.game_window)\n\n self.enemies = []\n self.bullets = []\n self.explosions = []\n\n self.score = 0\n self.change_score = 0\n self.difficulty = 0\n\n self.enemy_checker = 0\n self.bullet_checker = 0\n self.boss_bullet_checker = 0\n\n self.is_boss = False\n self.ufos = [game_objects.StandardUFO, game_objects.BossUFO, game_objects.SideUFO]\n self.quantity_of_level_types = 3\n self.current_level_type = 0\n self.ufo = game_objects.StandardUFO\n\n self.pause = False\n\n pygame.display.set_caption('Space Invaders')\n pygame.mouse.set_visible(True)\n\n def main_menu(self):\n pygame.mixer.music.stop()\n self.set_all_to_zero()\n self.background.show()\n interface.show_main_menu(self)\n\n def statistics_menu(self):\n self.background.show()\n interface.show_statistics_menu(self)\n\n def top_menu(self):\n self.background.show()\n interface.show_top_menu(self)\n\n def choose_player_menu(self):\n self.background.show()\n interface.show_choose_player_menu(self)\n\n def change_user(self, username):\n self.username = username\n self.player = game_objects.Player(self.control, self.game_window, self.username)\n self.main_menu()\n\n def start(self, current_control):\n self.set_all_to_zero()\n pygame.mixer.music.play(-1)\n self.control = current_control\n self.player.change_control(self.control)\n pygame.mouse.set_visible(False)\n\n repeat = self.update_game_window()\n while repeat:\n repeat = self.update_game_window()\n\n pygame.mixer.music.stop()\n interface.show_lose_menu(self)\n\n def update_game_window(self):\n self.background.update()\n\n self.add_game_points()\n self.change_ufo_type()\n self.create_ufo()\n self.move_objects()\n self.update_ufos()\n self.create_explosions()\n self.create_bullets()\n self.show_indicators()\n pygame.display.update()\n\n self.check_exit()\n self.main_clock.tick(self.FPS)\n return self.check_players_health()\n\n def add_game_points(self):\n self.score += 1\n self.change_score += 1\n self.enemy_checker += 1\n self.bullet_checker += 1\n self.boss_bullet_checker += 1\n\n def change_ufo_type(self):\n if self.change_score > self.CHANGE_SCORE and not self.is_boss:\n self.difficulty += 1\n new_level_type = self.current_level_type\n while new_level_type == self.current_level_type:\n new_level_type = random.randint(0, self.quantity_of_level_types - 1)\n self.ufo = self.ufos[new_level_type]\n self.change_score = 0\n self.current_level_type = new_level_type\n\n def create_ufo(self):\n if self.ufo is game_objects.BossUFO and not self.is_boss:\n enemy = self.ufo(self.game_window, self.difficulty)\n enemy.create()\n self.enemies.append(enemy)\n self.is_boss = True\n elif self.ufo is not game_objects.BossUFO:\n if self.enemy_checker >= self.FREQUENCY_OF_ENEMIES:\n self.enemy_checker = 0\n enemy = self.ufo(self.game_window, self.difficulty)\n enemy.create()\n self.enemies.append(enemy)\n\n def move_objects(self):\n self.player.move()\n for en in self.enemies:\n en.move()\n for b in self.bullets:\n b.move()\n for ex in self.explosions:\n if not ex.update():\n self.explosions.remove(ex)\n\n def update_ufos(self):\n for en in self.enemies:\n if type(en) == game_objects.BossUFO and\\\n self.boss_bullet_checker > game_objects.BossUFO.FREQUENCY_OF_BULLETS and\\\n en.rect.centery >= game_objects.BossUFO.POSITION_Y:\n self.bullets.extend(en.shoot())\n self.boss_bullet_checker = 0\n if en.rect.top > interface.WINDOW_SIZE_Y or en.health < en.MIN_SIZE:\n if en.health < en.MIN_SIZE:\n self.score += en.size\n self.change_score += en.size\n self.explosions.append(animations.ExplosionAnimation(animations.SMALL,\n en.rect.center,\n self.game_window))\n if self.is_boss and type(en) == game_objects.BossUFO:\n self.is_boss = False\n self.score += en.size * 5\n self.change_score = self.CHANGE_SCORE + 1\n self.player.add_health()\n self.explosions.append(animations.ExplosionAnimation(animations.LARGE,\n en.rect.center,\n self.game_window))\n self.player.add_killed_enemy()\n self.enemies.remove(en)\n\n def create_explosions(self):\n for b in self.bullets:\n if type(b) == game_objects.PlayerBullet and b.is_collision(self.enemies):\n ex = animations.ExplosionAnimation(animations.SMALL,\n b.rect.center,\n self.game_window)\n self.explosions.append(ex)\n self.bullets.remove(b)\n continue\n if type(b) == game_objects.BossBullet and b.is_collision(self.player):\n ex = animations.ExplosionAnimation(animations.LARGE,\n self.player.rect.center,\n self.game_window)\n self.explosions.append(ex)\n self.bullets.remove(b)\n continue\n if b.rect.top < 0 or b.rect.top > interface.WINDOW_SIZE_Y:\n self.bullets.remove(b)\n\n if self.player.is_collision(self.enemies):\n self.explosions.append(animations.ExplosionAnimation(animations.LARGE,\n self.player.rect.center,\n self.game_window))\n\n def create_bullets(self):\n pressed = pygame.mouse.get_pressed()\n if self.bullet_checker > self.FREQUENCY_OF_BULLETS and \\\n (pressed[0] or self.control is control.CameraControl):\n self.bullets.extend(list(self.player.shoot()))\n self.bullet_checker = 0\n\n def show_indicators(self):\n self.health_indicator.show(self.player.health)\n self.points_indicator.show(self.score, self.player)\n\n def check_players_health(self):\n if self.player.health < 1:\n self.player.add_played_game()\n self.player.update_statistics(self.score)\n return False\n return True\n\n def check_exit(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n return False\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n interface.show_pause_menu(self)\n\n if self.control is control.CameraControl:\n k = cv2.waitKey(30) & 0xff\n if k == 27:\n return False\n\n def set_all_to_zero(self):\n self.score = 0\n self.change_score = 0\n self.enemy_checker = 0\n self.bullet_checker = 0\n self.boss_bullet_checker = 0\n self.is_boss = False\n self.ufo = game_objects.StandardUFO\n self.difficulty = 0\n self.enemies = []\n self.bullets = []\n self.explosions = []\n\n if self.control is control.CameraControl:\n self.player.control.destroy()\n\n\ngame = Game()\ngame.choose_player_menu()\n" }, { "alpha_fraction": 0.4675391614437103, "alphanum_fraction": 0.5015807747840881, "avg_line_length": 36.88579559326172, "blob_id": "299f59b7af86b2e307596c9762b833ea1ebf840a", "content_id": "967ae90c49df18710eab894913a986fab698ac55", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13601, "license_type": "no_license", "max_line_length": 115, "num_lines": 359, "path": "/interface.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import pygame\nimport control\nimport sys\nimport game_objects\nimport database\npygame.init()\n\nWINDOW_SIZE_X = 1800\nWINDOW_SIZE_Y = 1440\n\nWHITE = (255, 255, 255)\nBLACK = (255, 255, 255)\nRED = (255, 50, 0)\nNICE_BLUE = (0, 125, 255)\nSKY_BLUE = (135, 206, 235)\n\n\nclass Button:\n def __init__(self, game_window, x, y, w, h, color, text, action=None, sender=None):\n self.text = text\n self.action = action\n self.sender = sender\n self.color = color\n self.game_window = game_window\n self.rect = pygame.Rect(x, y, w, h)\n\n def draw(self):\n pygame.draw.rect(self.game_window, self.color, self.rect)\n draw_text(self.text, self.game_window, int(self.rect.h / 2), WHITE,\n self.rect.x + self.rect.w / 10, self.rect.y + self.rect.h / 3)\n\n def is_clicked(self, x, y):\n if self.rect.collidepoint(x, y):\n if self.text == \"Play with Camera Control\":\n self.action(control.CameraControl)\n elif self.text == \"Play with Mouse Control\":\n self.action(control.MouseControl)\n elif self.text == \"Play again\":\n self.action(self.sender.control)\n elif self.text == \"Choose\":\n if self.sender.text:\n self.action(self.sender.text)\n elif self.text == \"Delete player\":\n self.action(self.sender.text)\n self.sender.erase()\n elif self.text == \"Continue\":\n return True\n else:\n self.action()\n return True\n\n\nclass InputBox:\n COLOR_INACTIVE = WHITE\n COLOR_ACTIVE = SKY_BLUE\n FONT_SIZE = 70\n FONT = pygame.font.Font(None, FONT_SIZE)\n\n def __init__(self, screen, x, y, w, h, text=''):\n self.screen = screen\n self.w = w\n self.rect = pygame.Rect(x, y, w, h)\n self.color = self.COLOR_INACTIVE\n self.text = text\n self.txt_surface = self.FONT.render(text, True, self.color)\n self.active = False\n\n def handle_event(self, event):\n if event.type == pygame.MOUSEBUTTONDOWN:\n if self.rect.collidepoint(event.pos[0], event.pos[1]):\n self.active = not self.active\n else:\n self.active = False\n self.color = self.COLOR_ACTIVE if self.active else self.COLOR_INACTIVE\n if event.type == pygame.KEYDOWN:\n if self.active:\n if event.key == pygame.K_BACKSPACE:\n self.text = self.text[:-1]\n else:\n self.text += event.unicode\n self.txt_surface = self.FONT.render(self.text, True, self.color)\n\n def erase(self):\n self.text = \"\"\n self.txt_surface = self.FONT.render(self.text, True, self.color)\n self.draw()\n\n def draw(self):\n width = max(self.w, self.txt_surface.get_width() + 10)\n self.rect.w = width\n self.screen.blit(self.txt_surface, (self.rect.x + 5, self.rect.y + 5))\n pygame.draw.rect(self.screen, self.color, self.rect, 2)\n\n\ndef show_choose_player_menu(game):\n input_box = InputBox(game.game_window,\n WINDOW_SIZE_X / 2,\n WINDOW_SIZE_Y / 2 - 150,\n 500,\n 70)\n\n choose_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 300,\n 600, 100, NICE_BLUE,\n \"Choose\",\n game.change_user,\n input_box)\n\n delete_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 300,\n 600, 100, NICE_BLUE,\n \"Delete player\",\n database.delete_user,\n input_box)\n\n buttons = [choose_button, delete_button]\n\n done = False\n while not done:\n game.background.show()\n\n draw_text('Choose player', game.game_window, 100, WHITE,\n (WINDOW_SIZE_X / 2 - 250), (WINDOW_SIZE_Y / 10))\n\n draw_text('Enter your name:', game.game_window, 70, WHITE,\n (WINDOW_SIZE_X / 2 - 500), (WINDOW_SIZE_Y / 2 - 150))\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\n input_box.handle_event(event)\n if event.type == pygame.MOUSEBUTTONDOWN:\n for button in buttons:\n button.is_clicked(event.pos[0], event.pos[1])\n for button in buttons:\n button.draw()\n input_box.draw()\n pygame.display.update()\n\n\nclass HealthIndicator:\n SIZE = 100\n image = pygame.transform.scale(pygame.image.load('game_pictures/heart.png'), (SIZE, SIZE))\n\n def __init__(self, game_window):\n self.game_window = game_window\n self.rects = [pygame.Rect(0, 0, self.SIZE, self.SIZE),\n pygame.Rect(self.SIZE, 0, self.SIZE, self.SIZE),\n pygame.Rect(self.SIZE * 2, 0, self.SIZE, self.SIZE),\n pygame.Rect(self.SIZE * 3, 0, self.SIZE, self.SIZE),\n pygame.Rect(self.SIZE * 4, 0, self.SIZE, self.SIZE)]\n\n def show(self, num):\n for r in range(num):\n self.game_window.blit(self.image, self.rects[r])\n\n\nclass PointsIndicator:\n SIZE = 50\n COLOR = WHITE\n\n def __init__(self, game_window):\n self.game_window = game_window\n\n def show(self, points, player):\n draw_text('Score: %s' % points, self.game_window, self.SIZE, self.COLOR, 10, 110)\n if player.get_high_score() > points:\n draw_text('Record: %s' % player.get_high_score(), self.game_window, self.SIZE, self.COLOR, 10, 150)\n else:\n draw_text('New Record!', self.game_window, self.SIZE, self.COLOR, 10, 150)\n\n\ndef draw_text(text, surface, font_size, color, x, y):\n text_obj = pygame.font.SysFont(None, font_size).render(text, 1, color)\n text_rect = text_obj.get_rect()\n text_rect.topleft = (x, y)\n surface.blit(text_obj, text_rect)\n\n\ndef show_main_menu(game):\n pygame.mouse.set_visible(True)\n\n draw_text('SPACE INVADERS', game.game_window, 200, WHITE,\n (WINDOW_SIZE_X / 2 - 600), (WINDOW_SIZE_Y / 6))\n\n draw_text('Hello, ' + game.username, game.game_window, 70, WHITE,\n 50, 50)\n\n image_size = 300\n player_image_path = game_objects.Player.image_path\n image = pygame.transform.scale(pygame.image.load(player_image_path),\n (image_size, image_size))\n rect = pygame.Rect(WINDOW_SIZE_X / 2 - image_size / 2,\n WINDOW_SIZE_Y / 2 - image_size / 2 - 50,\n image_size, image_size)\n\n game.game_window.blit(image, rect)\n\n cam_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 250,\n 600, 100, NICE_BLUE,\n \"Play with Camera Control\",\n game.start)\n\n mouse_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 250,\n 600, 100, NICE_BLUE,\n \"Play with Mouse Control\",\n game.start)\n\n statistics_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Statistics\",\n game.statistics_menu)\n\n choose_player_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Change Player\",\n game.choose_player_menu)\n\n clicks_checked(cam_button, mouse_button, statistics_button, choose_player_button)\n\n\ndef show_statistics_menu(game):\n stats = game.player.statistics\n pygame.mouse.set_visible(True)\n draw_text('Your Statistics', game.game_window, 150, NICE_BLUE,\n (WINDOW_SIZE_X / 3 - 100), (WINDOW_SIZE_Y / 15))\n draw_text('Records', game.game_window, 150, NICE_BLUE,\n (WINDOW_SIZE_X / 5), (WINDOW_SIZE_Y / 4))\n for i in range(stats.RECORDS_COUNT):\n draw_text(str(i + 1)+\". \"+str(stats.records[i]), game.game_window, 150, NICE_BLUE,\n (WINDOW_SIZE_X / 5), (WINDOW_SIZE_Y / 4 + (i + 1) * 110))\n\n image_heart = pygame.transform.scale(pygame.image.load('game_pictures/heart.png'), (200, 200))\n rect_heart = pygame.Rect(WINDOW_SIZE_X / 2 + 20, WINDOW_SIZE_Y / 2 - 300, 200, 200)\n draw_text(str(stats.played_games), game.game_window, 300, NICE_BLUE,\n (WINDOW_SIZE_X / 2 + 270), (WINDOW_SIZE_Y / 2 - 290))\n game.game_window.blit(image_heart, rect_heart)\n\n image_ufo = pygame.transform.scale(pygame.image.load('ufo_pictures/ufo_2.png'), (200, 200))\n rect_ufo = pygame.Rect(WINDOW_SIZE_X / 2 + 20, WINDOW_SIZE_Y / 2, 200, 200)\n draw_text(str(stats.killed_enemies), game.game_window, 300, NICE_BLUE,\n (WINDOW_SIZE_X / 2 + 270), (WINDOW_SIZE_Y / 2))\n game.game_window.blit(image_ufo, rect_ufo)\n\n top_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Top Players\",\n game.top_menu)\n menu_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Main Menu\",\n game.main_menu)\n\n clicks_checked(menu_button, top_button)\n\n\ndef show_top_menu(game):\n records = database.get_global_records()\n pygame.mouse.set_visible(True)\n draw_text('Top Players', game.game_window, 150, NICE_BLUE,\n (WINDOW_SIZE_X / 2 - 300), (WINDOW_SIZE_Y / 15))\n for i in range(game_objects.Statistics.RECORDS_COUNT):\n draw_text(str(i + 1)+\". \" + records[i][1] + \" (\" + str(records[i][0]) + \")\", game.game_window, 150, WHITE,\n (WINDOW_SIZE_X / 5), (WINDOW_SIZE_Y / 9 + (i + 1) * 150))\n\n statistics_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Your statistics\",\n game.statistics_menu)\n menu_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 400,\n 600, 100, NICE_BLUE,\n \"Main Menu\",\n game.main_menu)\n\n clicks_checked(menu_button, statistics_button)\n\n\ndef show_lose_menu(game):\n pygame.mouse.set_visible(True)\n draw_text('GAME OVER', game.game_window, 150, RED,\n (WINDOW_SIZE_X / 2 - 300), (WINDOW_SIZE_Y / 2 - 300))\n draw_text('Your score: ' + str(game.score), game.game_window, 150, RED,\n (WINDOW_SIZE_X / 2 - 370), (WINDOW_SIZE_Y / 2 - 150))\n\n again_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 200,\n 600, 100, NICE_BLUE,\n \"Play again\",\n game.start, game)\n menu_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 200,\n 600, 100, NICE_BLUE,\n \"Main Menu\",\n game.main_menu)\n\n clicks_checked(again_button, menu_button)\n\n\ndef show_pause_menu(game):\n pygame.mouse.set_visible(True)\n continue_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 - 670,\n WINDOW_SIZE_Y / 2 + 200,\n 600, 100, NICE_BLUE,\n \"Continue\")\n menu_button = Button(game.game_window,\n WINDOW_SIZE_X / 2 + 70,\n WINDOW_SIZE_Y / 2 + 200,\n 600, 100, NICE_BLUE,\n \"Main Menu\",\n game.main_menu)\n\n buttons = [continue_button, menu_button]\n cont = False\n while not cont:\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n cont = continue_button.is_clicked(event.pos[0], event.pos[1])\n menu_button.is_clicked(event.pos[0], event.pos[1])\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n for b in buttons:\n b.draw()\n pygame.display.update()\n\n\ndef clicks_checked(*buttons):\n while True:\n for event in pygame.event.get():\n if event.type == pygame.MOUSEBUTTONDOWN:\n for b in buttons:\n b.is_clicked(event.pos[0], event.pos[1])\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n for b in buttons:\n b.draw()\n pygame.display.update()\n" }, { "alpha_fraction": 0.557233989238739, "alphanum_fraction": 0.574689507484436, "avg_line_length": 35.76543045043945, "blob_id": "3fa6c4079e8e1e5bbba44a00831e47a70b76c453", "content_id": "8ad026077bfd34fa89d08871a461063a46099a62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2979, "license_type": "no_license", "max_line_length": 108, "num_lines": 81, "path": "/control.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import interface\nimport pygame\nimport cv2.cv2 as cv2\nfrom abc import ABC, abstractmethod\n\n\nclass Control(ABC):\n def __init__(self, rect):\n self.rect = rect\n\n @abstractmethod\n def move_object(self):\n pass\n\n\nclass CameraControl(Control):\n WEBCAM_SIZE_X = 600\n WEBCAM_SIZE_Y = 600\n HAAR_CASCADE_PATH = 'haar_cascades/spaceship_cascade_medium.xml'\n MIN_STEP = 5\n\n def __init__(self, rect):\n Control.__init__(self, rect)\n self.spaceship_cascade = cv2.CascadeClassifier(self.HAAR_CASCADE_PATH)\n self.capture = cv2.VideoCapture(0)\n self.is_object_initialized = False\n self.last_x = 0\n\n def move_object(self):\n ret, img = self.capture.read()\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n if not self.is_object_initialized:\n spaceship_detector = self.spaceship_cascade.detectMultiScale(gray, 1.3, 5)\n for (x, y, w, h) in spaceship_detector:\n self.is_object_initialized = True\n self.last_x = x\n self.rect.center = ((self.WEBCAM_SIZE_X - x) / self.WEBCAM_SIZE_X * interface.WINDOW_SIZE_X,\n interface.WINDOW_SIZE_Y * 0.8)\n return\n self.rect.center = (interface.WINDOW_SIZE_X / 2, interface.WINDOW_SIZE_Y * 0.8)\n return\n\n spaceship_detector = self.spaceship_cascade.detectMultiScale(gray, 1.3, 5)\n for (x, y, w, h) in spaceship_detector:\n cv2.rectangle(img, (x, y), (x + w, y + h), (255, 255, 0), 2)\n cv2.imshow('Camera Detection', img)\n step = (self.last_x - x) / self.WEBCAM_SIZE_X * interface.WINDOW_SIZE_X\n if abs(step) < self.MIN_STEP or \\\n x / self.WEBCAM_SIZE_X * interface.WINDOW_SIZE_X < 0 or \\\n x / self.WEBCAM_SIZE_X * interface.WINDOW_SIZE_X > interface.WINDOW_SIZE_X:\n return\n self.rect.move_ip(step, 0)\n self.last_x = x\n return\n\n def destroy(self):\n self.capture.release()\n cv2.destroyAllWindows()\n\n\nclass MouseControl(Control):\n def __init__(self, rect):\n Control.__init__(self, rect)\n self.is_object_initialized = False\n\n def move_object(self):\n if not self.is_object_initialized:\n self.rect.center = (interface.WINDOW_SIZE_X / 2, interface.WINDOW_SIZE_Y * 0.8)\n pygame.mouse.set_pos(self.rect.centerx, self.rect.centery)\n self.is_object_initialized = True\n return\n\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.MOUSEMOTION:\n if (event.pos[0] < 0 or event.pos[0] > interface.WINDOW_SIZE_X\n or event.pos[1] < 0 or event.pos[1] > interface.WINDOW_SIZE_Y):\n return\n pygame.mouse.set_pos(self.rect.centerx, self.rect.centery)\n self.rect.move_ip(event.pos[0] - self.rect.centerx, 0)\n\n" }, { "alpha_fraction": 0.5884325504302979, "alphanum_fraction": 0.6026822924613953, "avg_line_length": 32.13888931274414, "blob_id": "22d30da3a3385d72092d2aac3641b249eaf6c3f9", "content_id": "75f53e84c93c162f5cebfc49c632c12e311aab93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1193, "license_type": "no_license", "max_line_length": 88, "num_lines": 36, "path": "/animations.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import pygame\n\nSMALL = \"small\"\nLARGE = \"large\"\n\n\nclass ExplosionAnimation:\n animation_images = {SMALL: [], LARGE: []}\n\n for i in range(9):\n img = pygame.image.load('explosion_animation/regularExplosion0{}.png'.format(i))\n img_large = pygame.transform.scale(img, (250, 250))\n animation_images[LARGE].append(img_large)\n img_small = pygame.transform.scale(img, (150, 150))\n animation_images[SMALL].append(img_small)\n\n def __init__(self, size, center, game_window):\n self.size = size\n self.game_window = game_window\n self.image = self.animation_images[size][0]\n self.rect = self.image.get_rect()\n self.center = center\n self.rect.center = center\n self.frame = 0\n self.game_window.blit(self.image, self.rect)\n\n def update(self):\n self.frame += 1\n if self.frame >= len(self.animation_images[self.size]):\n return False\n else:\n self.image = self.animation_images[self.size][self.frame]\n self.rect = self.image.get_rect()\n self.rect.center = self.center\n self.game_window.blit(self.image, self.rect)\n return True\n" }, { "alpha_fraction": 0.6013771891593933, "alphanum_fraction": 0.6074981093406677, "avg_line_length": 26.808509826660156, "blob_id": "8022eaddd28a79275f25c325d7b8c65ce76a1f36", "content_id": "331b5e04e817e34ac542cb4e20fec2144e7410d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1307, "license_type": "no_license", "max_line_length": 68, "num_lines": 47, "path": "/database.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import game_objects\nfrom pymongo import MongoClient\n\n\ndef connect():\n client = MongoClient('localhost', 27017)\n db = client.space_invaders.users\n return db\n\n\ndef upload_user(stats):\n db = connect()\n data = db.find_one({\"username\": stats.username})\n if data:\n stats.records = data[\"records\"]\n stats.killed_enemies = data[\"killed_enemies\"]\n stats.played_games = data[\"played_games\"]\n else:\n stats.reset_data()\n save_user(stats)\n\n\ndef save_user(stats):\n db = connect()\n db.update({'username': stats.username},\n {'username': stats.username, 'records': stats.records,\n 'killed_enemies': stats.killed_enemies,\n 'played_games': stats.played_games}, upsert=True)\n\n\ndef delete_user(username):\n db = connect()\n db.remove({'username': username}, True)\n\n\ndef get_global_records():\n db = connect()\n users = db.find()\n tops = []\n for user in users:\n for i in range(game_objects.Statistics.RECORDS_COUNT):\n tops.append([user[\"records\"][i], user[\"username\"]])\n tops.sort(key=lambda x: x[0], reverse=True)\n for i in range(game_objects.Statistics.RECORDS_COUNT):\n if tops[i][0] == 0:\n tops[i] = [\"-\", \"-\"]\n return tops[:game_objects.Statistics.RECORDS_COUNT]\n" }, { "alpha_fraction": 0.5702438354492188, "alphanum_fraction": 0.5850058197975159, "avg_line_length": 34.154518127441406, "blob_id": "98687e106c41a8712339becbe3b0d3c535ce8e52", "content_id": "de8a4f423d53e3a27621f3eb9b15ba2a79b21680", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12058, "license_type": "no_license", "max_line_length": 119, "num_lines": 343, "path": "/game_objects.py", "repo_name": "edadasko/space_invaders_cv", "src_encoding": "UTF-8", "text": "import random\nimport pygame\nimport interface\nimport database\nfrom abc import ABC, abstractmethod\npygame.mixer.init()\n\n\nclass Player:\n SIZE_X = 200\n SIZE_Y = 200\n MAX_HEALTH = 5\n health = 3\n\n image_path = 'game_pictures/player.png'\n image = pygame.transform.scale(pygame.image.load(image_path),\n (SIZE_X, SIZE_Y))\n\n def __init__(self, current_control, game_window, player_name):\n self.statistics = Statistics(player_name)\n self.shoot_sound = pygame.mixer.Sound(\"sounds/player_shoot.wav\")\n self.shoot_sound.set_volume(0.5)\n self.collision_sound = pygame.mixer.Sound(\"sounds/explosion_1.wav\")\n self.collision_sound.set_volume(0.5)\n self.game_window = game_window\n self.rect = self.image.get_rect()\n self.control = current_control(self.rect)\n\n def move(self):\n self.control.move_object()\n self.game_window.blit(self.image, self.rect)\n\n def is_collision(self, enemies):\n for en in enemies:\n if self.rect.colliderect(en.rect):\n self.collision_sound.play()\n self.health -= 1\n enemies.remove(en)\n return True\n return False\n\n def shoot(self):\n self.shoot_sound.play()\n bullet_1 = PlayerBullet(self.game_window, self.rect.centerx - self.SIZE_X / 4, self.rect.centery, self)\n bullet_2 = PlayerBullet(self.game_window, self.rect.centerx + self.SIZE_X / 4, self.rect.centery, self)\n bullet_1.create()\n bullet_2.create()\n return bullet_1, bullet_2\n\n def add_health(self):\n if self.health < self.MAX_HEALTH:\n self.health += 1\n\n def update_statistics(self, score):\n for i in range(self.statistics.RECORDS_COUNT):\n if score > self.statistics.records[i]:\n self.statistics.records[i+1:self.statistics.RECORDS_COUNT] \\\n = self.statistics.records[i:self.statistics.RECORDS_COUNT - 1]\n self.statistics.records[i] = score\n break\n self.statistics.save_user_to_db()\n\n def add_killed_enemy(self):\n self.statistics.killed_enemies += 1\n\n def add_played_game(self):\n self.statistics.played_games += 1\n\n def get_high_score(self):\n return self.statistics.records[0]\n\n def change_control(self, control):\n self.health = 3\n self.control = control(self.rect)\n\n def delete_statistics(self):\n database.delete_user(self.statistics.username)\n\n\nclass Bullet(ABC):\n DAMAGE = 15\n SPEED = 30\n SIZE_X = 50\n SIZE_Y = 150\n\n surface = rect = None\n\n def __init__(self, game_window, x, y, owner):\n self.owner = owner\n self.game_window = game_window\n self.x = x\n self.y = y\n\n def create(self):\n self.game_window.blit(self.surface, self.rect)\n\n @abstractmethod\n def move(self):\n pass\n\n @abstractmethod\n def is_collision(self, objects):\n pass\n\n\nclass PlayerBullet(Bullet):\n image = pygame.transform.rotate(pygame.image.load(\"bullets_pictures/bullet.png\"), 90)\n collision_sound = pygame.mixer.Sound(\"sounds/explosion_2.wav\")\n collision_sound.set_volume(0.5)\n\n def __init__(self, game_window, x, y, owner):\n Bullet.__init__(self, game_window, x, y, owner)\n self.surface = pygame.transform.scale(self.image, (self.SIZE_X, self.SIZE_Y))\n self.rect = pygame.Rect(x, y, self.SIZE_X * 0.8, self.SIZE_Y * 0.8)\n self.rect.center = (x, y)\n\n def move(self):\n self.rect.move_ip(0, - self.SPEED)\n self.game_window.blit(self.surface, self.rect)\n\n def is_collision(self, objects):\n for en in objects:\n if type(en) != Player and self.rect.colliderect(en.rect):\n self.collision_sound.play()\n if type(en) == BossUFO:\n en.health -= int(self.DAMAGE / 5)\n elif type(en) == SideUFO:\n en.health -= int(self.DAMAGE / 2)\n else:\n en.health -= self.DAMAGE\n return True\n return False\n\n\nclass BossBullet(Bullet):\n images = [pygame.transform.rotate(pygame.image.load(\"bullets_pictures/boss_bullet.png\"), 0),\n pygame.transform.rotate(pygame.image.load(\"bullets_pictures/boss_bullet.png\"), -45),\n pygame.transform.rotate(pygame.image.load(\"bullets_pictures/boss_bullet.png\"), 45)]\n quantity_of_types = 3\n\n collision_sound = pygame.mixer.Sound(\"sounds/explosion_1.wav\")\n collision_sound.set_volume(0.5)\n\n def __init__(self, game_window, x, y, owner):\n Bullet.__init__(self, game_window, x, y, owner)\n self.type = random.randint(0, self.quantity_of_types - 1)\n self.surface = pygame.transform.scale(self.images[self.type], (self.SIZE_X * 2, self.SIZE_Y * 2))\n self.rect = pygame.Rect(x, y, self.SIZE_X * 0.8, self.SIZE_Y * 0.8)\n self.rect.center = (x, y)\n\n def move(self):\n if self.type == 0:\n self.rect.move_ip(0, self.SPEED)\n elif self.type == 1:\n self.rect.move_ip(-self.SPEED, self.SPEED)\n elif self.type == 2:\n self.rect.move_ip(self.SPEED, self.SPEED)\n self.game_window.blit(self.surface, self.rect)\n\n def is_collision(self, player):\n if type(player) == Player and self.rect.colliderect(player.rect):\n self.collision_sound.play()\n player.health -= 1\n return True\n return False\n\n\nclass UFO(ABC):\n MIN_SIZE = 200\n MAX_SIZE = 500\n min_speed = 10\n max_speed = 30\n\n image = None\n surface = None\n\n def __init__(self, game_window, difficulty):\n self.game_window = game_window\n self.size = random.randint(self.MIN_SIZE, self.MAX_SIZE)\n self.rect = pygame.Rect(random.randint(0, interface.WINDOW_SIZE_X - self.size),\n 0 - self.size, self.size, self.size * 0.8)\n self.speed = random.randint(self.min_speed, self.max_speed)\n self.speed += difficulty\n self.health = self.size\n\n def create(self):\n self.game_window.blit(self.surface, self.rect)\n\n @abstractmethod\n def move(self):\n pass\n\n\nclass StandardUFO(UFO):\n images = [pygame.image.load('ufo_pictures/ufo_2.png'),\n pygame.image.load('ufo_pictures/ufo_3.png')]\n count_of_images = 2\n\n def __init__(self, game_window, difficulty):\n UFO.__init__(self, game_window, difficulty)\n self.image = self.images[random.randint(0, self.count_of_images - 1)]\n self.surface = pygame.transform.scale(self.image, (self.size, self.size))\n\n def move(self):\n self.rect.move_ip(0, self.speed)\n if self.health > 0:\n self.surface = pygame.transform.scale(self.image, (self.health, self.health))\n center = self.rect.centerx, self.rect.centery\n self.rect.size = self.health, self.health * 0.8\n self.rect.center = center\n self.game_window.blit(self.surface, self.rect)\n\n\nclass SideUFO(UFO):\n MIN_SIZE = 100\n MAX_SIZE = 250\n max_speed = 20\n\n images = [pygame.image.load('ufo_pictures/back_ufo.png'),\n pygame.transform.flip(pygame.image.load('ufo_pictures/back_ufo.png'), True, False)]\n\n def __init__(self, game_window, difficulty):\n UFO.__init__(self, game_window, difficulty)\n self.type = random.randint(0, 1)\n if self.type == 0:\n self.rect = pygame.Rect(interface.WINDOW_SIZE_X + self.size,\n random.randint(- interface.WINDOW_SIZE_Y / 3, interface.WINDOW_SIZE_Y / 5),\n self.size * 2, self.size)\n\n else:\n self.rect = pygame.Rect(- self.size * 2,\n random.randint(- interface.WINDOW_SIZE_Y / 3, interface.WINDOW_SIZE_Y / 5),\n self.size * 2, self.size)\n\n self.image = self.images[self.type]\n self.surface = pygame.transform.scale(self.image, (self.size * 2, self.size))\n\n def move(self):\n if self.type == 0:\n self.rect.move_ip(-self.speed * 2, self.speed)\n else:\n self.rect.move_ip(self.speed * 2, self.speed)\n if self.health > 0:\n self.surface = pygame.transform.scale(self.image, (self.health * 2, self.health))\n center = self.rect.centerx, self.rect.centery\n self.rect.size = self.health * 2, self.health\n self.rect.center = center\n self.game_window.blit(self.surface, self.rect)\n\n\nclass BossUFO(UFO):\n MIN_SIZE = 300\n FREQUENCY_OF_BULLETS = 6\n POSITION_Y = interface.WINDOW_SIZE_Y / 7\n images = [pygame.image.load('ufo_pictures/boss_1.png'),\n pygame.image.load('ufo_pictures/boss_2.png')]\n count_of_images = 2\n\n shoot_sound = pygame.mixer.Sound(\"sounds/player_shoot.wav\")\n shoot_sound.set_volume(0.5)\n\n def __init__(self, game_window, difficulty):\n UFO.__init__(self, game_window, difficulty)\n pygame.mixer.init()\n self.difficulty = difficulty\n self.size = 800\n self.health = self.size\n self.speed = 10\n self.image = self.images[random.randint(0, self.count_of_images - 1)]\n self.surface = pygame.transform.scale(self.image, (self.size * 2, self.size))\n self.rect = pygame.Rect(interface.WINDOW_SIZE_X / 2 - self.size, 0 - self.size, self.size * 2, self.size * 0.6)\n\n def move(self):\n if self.rect.centery < self.POSITION_Y:\n self.rect.move_ip(0, self.speed)\n if self.health > 0:\n self.surface = pygame.transform.scale(self.image, (self.health * 2, self.health))\n center = self.rect.centerx, self.rect.centery\n self.rect.size = self.health * 2, self.health * 0.6\n self.rect.center = center\n self.game_window.blit(self.surface, self.rect)\n\n def shoot(self):\n self.shoot_sound.play()\n bullets = []\n for i in range(int(self.difficulty / 5) + 1):\n bullet = BossBullet(self.game_window,\n random.randint(self.rect.centerx - int(self.size / 3),\n self.rect.centerx + int(self.size / 3)),\n self.rect.centery + 100, self)\n bullet.create()\n bullets.append(bullet)\n return bullets\n\n\nclass Background:\n speed = 10\n image = pygame.transform.scale(pygame.image.load(\"game_pictures/space.png\"),\n (interface.WINDOW_SIZE_X, interface.WINDOW_SIZE_Y))\n\n def __init__(self, game_window):\n self.rects = [self.image.get_rect(), self.image.get_rect()]\n self.rects[0].center = interface.WINDOW_SIZE_X / 2, interface.WINDOW_SIZE_Y / 2\n self.rects[1].center = interface.WINDOW_SIZE_X / 2, - interface.WINDOW_SIZE_Y / 2\n self.game_window = game_window\n\n def show(self):\n self.game_window.blit(self.image, self.rects[0])\n self.game_window.blit(self.image, self.rects[1])\n\n def update(self):\n for r in self.rects:\n if r.centery > interface.WINDOW_SIZE_Y / 2 * 3:\n self.rects.remove(r)\n new_rect = self.image.get_rect()\n new_rect.center = interface.WINDOW_SIZE_X / 2, - interface.WINDOW_SIZE_Y / 2\n self.rects.append(new_rect)\n r.move_ip(0, self.speed)\n self.game_window.blit(self.image, r)\n\n\nclass Statistics:\n RECORDS_COUNT = 5\n killed_enemies = 0\n played_games = 0\n\n def __init__(self, username):\n self.username = username\n self.records = 0\n self.upload_user_from_db()\n\n def upload_user_from_db(self):\n database.upload_user(self)\n\n def save_user_to_db(self):\n database.save_user(self)\n\n def reset_data(self):\n self.records = []\n for i in range(self.RECORDS_COUNT):\n self.records.append(0)\n self.killed_enemies = 0\n self.played_games = 0\n" } ]
6
saketh1506/CODES
https://github.com/saketh1506/CODES
899a9c5510bd12280326c3e224c928c43d4f3ac2
7483daa973a47238775e96a2166819d6b17dc285
ae9cd89c18dab1792105c8ebb0327986d8d336cf
refs/heads/master
2022-04-26T13:28:24.488305
2020-04-19T13:38:41
2020-04-19T13:38:41
241,308,347
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.3741610646247864, "alphanum_fraction": 0.481543630361557, "avg_line_length": 24.913043975830078, "blob_id": "e01a31b3093b944b651cfd4d915d336898c924b3", "content_id": "9b7703af0bc0239ad2308a7cf44f3f3e9ab0e96d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 596, "license_type": "no_license", "max_line_length": 104, "num_lines": 23, "path": "/countprimes.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "#COUNT NO:OF PRIMES INCLUDING THE NUMBER?\n\ndef countprimes(num):\n if num < 2: #for numbers 0,1\n return 0\n primes=[2] # for number 2\n x=3 #for number greater than 2 \n while x <= num:\n for y in range(3,x,2): #checks number is prime or not\n if x%y == 0:\n x += 2\n break\n else:\n primes.append(x)\n x += 2\n print(primes)\n return(len(primes))\n \n countprimes(100)\n \n #output:\n [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]\n 25\n" }, { "alpha_fraction": 0.5234296917915344, "alphanum_fraction": 0.5613160729408264, "avg_line_length": 39.119998931884766, "blob_id": "a349d3c9db8c5aae5b517bc5f739a1368780a84b", "content_id": "ba64ec0f79ad1c56a1ec373a45ea568224e11aad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1003, "license_type": "no_license", "max_line_length": 110, "num_lines": 25, "path": "/NUT_BUTTER_SALES CSV FILE CODE(ML).py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "#IMPORTING THE TOOLS REQUIRED TO CREATE FILES BY PANDAS N NUMPY\nimport pandas as pd\nimport numpy as np\n\n#CREATING THE SALES_AMOUNT ARRAY TO CREATE THE WEEKLY_SALES DATA FRAME\nsales_amounts = np.array([ [2,7,1],\n [9,4,16],\n [11,14,18],\n [13,13,16],\n [15,18,19], \n [10,8,12]])\n \n#CREATING WEEKLY_SALES DATA FRAME\nweekly_sales = pd.DataFrame(sales_amounts,\n index=['MONDAY','TUESDAY','WEDNESDAY','THURSDAY','FRIDAY','PRICES OF EACH ITEM'],\n columns=['ALMONDS BUTTER','PEANUT BUTTER','CASHEW BUTTER']) \n#CREATING THE PRICES ARRAY\nprices = np.array([10,8,12])\n\n#CALCULATING TOTAL_SALES FOR A WEEK WITH TOTAL COST OF EACH ITEM\ntotal_sales = prices.dot(sales_amounts.T)\ntotal_sales[5]=30\n\n#ADDING TOTAL COLUMN AND ITS VALUES TO THE WEEKLY_SALES DATAFRAME\nweekly_sales['TOTAL($)'] = total_sales.T\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 20.66666603088379, "blob_id": "cc151818f1382ecb2a1bd911ad7d9e2c94d7df0b", "content_id": "6afa2d67bf3de5daef949e24af33ff5d0f38099f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 65, "license_type": "no_license", "max_line_length": 31, "num_lines": 3, "path": "/HWFiles.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "x = open('test.txt',mode = 'w')\nx.write('Hello World')\nx.close()\n" }, { "alpha_fraction": 0.5056179761886597, "alphanum_fraction": 0.5056179761886597, "avg_line_length": 16.799999237060547, "blob_id": "0a42068e879bd13bd9bea0778c7b23380f69d922", "content_id": "941906aea7eb7c0d8eddb3023b112efb8a626283", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 89, "license_type": "no_license", "max_line_length": 20, "num_lines": 5, "path": "/Greater.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "def is_greater(a,b):\n if a>b:\n return True\n elif a<=b:\n return False\n" }, { "alpha_fraction": 0.4429347813129425, "alphanum_fraction": 0.5244565010070801, "avg_line_length": 20.647058486938477, "blob_id": "753fe36ee419117c053c0d928700963e8fa1af19", "content_id": "26c4603f301d34b34237044260641c3d934b62c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 368, "license_type": "no_license", "max_line_length": 83, "num_lines": 17, "path": "/SPY GAME.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "#Write the program if the list of integers has 007 in order return TRUE else FALSE?\n\ndef spygame(nums):\n code = [0,0,7,'x']\n for num in nums:\n if num == code[0]:\n code.pop(0)\n return len(code) == 1\n \n spygame([1,2,4,0,0,7,5])\n spygame([1,0,2,4,0,5,7])\n spygame([1,7,2,0,4,5,0])\n \n #output: \n TRUE\n TRUE\n FALSE\n" }, { "alpha_fraction": 0.550000011920929, "alphanum_fraction": 0.5833333134651184, "avg_line_length": 29, "blob_id": "fad50d137caefe24dfe5dd3e20bd31d0d6cbc77f", "content_id": "b73f97c8efb49d4a16d07ccf469a32cfc69be2db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 60, "license_type": "no_license", "max_line_length": 40, "num_lines": 2, "path": "/Even by args.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "def myfunc(*args):\n return [n for n in args if n%2 == 0]\n" }, { "alpha_fraction": 0.38999998569488525, "alphanum_fraction": 0.4099999964237213, "avg_line_length": 19, "blob_id": "78aadd9b40bba71be958ebe83d562a59d10a0833", "content_id": "fdc3c7e8e099479a4cdf7458fcebeaa19cb78a8d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 100, "license_type": "no_license", "max_line_length": 28, "num_lines": 5, "path": "/EO.py", "repo_name": "saketh1506/CODES", "src_encoding": "UTF-8", "text": "for i in items:\n if i%2 == 0:\n print(f'even : {i}')\n else:\n print(f'odd : {i}')\n" } ]
7
Zatonskikh/RecognitionSearch
https://github.com/Zatonskikh/RecognitionSearch
d514bade4d2656c4c32bdff5715a532c97fb59d2
3ce6a0dce84e6cf11b3026b8d510b32e88c6949a
225cf72ef2a3177259d67111f26eb68e049937cc
refs/heads/master
2020-04-11T05:54:52.873767
2018-12-13T01:21:05
2018-12-13T01:21:05
161,563,971
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 45, "blob_id": "b0796779bd91db836c331315de81197c08d56a17", "content_id": "8459200226c7f9b1a55d231bc261d087eff30d2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 45, "license_type": "no_license", "max_line_length": 45, "num_lines": 1, "path": "/blueprints/products_endpoint/__init__.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "from .init_blueprint import blueprint # noqa" }, { "alpha_fraction": 0.6960651278495789, "alphanum_fraction": 0.6974219679832458, "avg_line_length": 23.600000381469727, "blob_id": "2f2f1c8f747128932ea72886bff212a3068a5561", "content_id": "81d0a3f31244c8e1f2563e2bbf4fb4330247b63d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 737, "license_type": "no_license", "max_line_length": 80, "num_lines": 30, "path": "/app.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "from flask import Flask\nfrom logging import getLogger\n\nimport blueprints\nfrom extensions import cors, deep_detect_extension\nfrom settings import Config\n\nlogger = getLogger(__name__)\n\ndef create_app(config=Config):\n \"\"\"Create and set up app.\"\"\"\n app = Flask(__name__.split('.')[0])\n app.config.from_object(config)\n\n register_extensions(app)\n register_blueprints(app)\n\n logger.info(app.config)\n\n return app\n\ndef register_extensions(app):\n \"\"\"Register extensions.\"\"\"\n cors.init_app(app, resources={r'/*': {'origins': '*'}})\n deep_detect_extension.init_app(app)\n\n\ndef register_blueprints(app):\n \"\"\"Register Flask Blueprints.\"\"\"\n app.register_blueprint(blueprints.products_endpoint, url_prefix='/products')" }, { "alpha_fraction": 0.6134913563728333, "alphanum_fraction": 0.6216955184936523, "avg_line_length": 27.153846740722656, "blob_id": "861a30ad5d1b5f4d52f5a0812a559fb0ca53518e", "content_id": "0e3f4c4b975caf53dd7f7a911d27abe99c1aac89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1097, "license_type": "no_license", "max_line_length": 76, "num_lines": 39, "path": "/blueprints/products_endpoint/helpers.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "import base64\nfrom flask import current_app as app\nfrom logging import getLogger\n\nlogger = getLogger(__name__)\n\ndef get_tags(image_bytes: bytes) -> list:\n \"\"\"Get tags according to image.\n\n Args:\n image_bytes:\n\n Returns:\n\n \"\"\"\n encoded_image = base64.b64encode(image_bytes)\n parameters_output = {\n 'best': app.config.get('RECOGNITION_SERVICE_TAG_AMOUNT')\n }\n data = [encoded_image.decode()]\n recognition_service_name = app.config.get('RECOGNITION_SERVICE_NAME')\n post_predict_result = app.deep_detect.post_predict(\n recognition_service_name, data, {}, {}, parameters_output)\n classes = []\n try:\n results_classes = post_predict_result['body']['predictions'][0][\n 'classes']\n except (KeyError, IndexError):\n logger.warning(f'Invalid answer from server: {post_predict_result}')\n return classes\n\n for i in results_classes:\n try:\n # delete class number\n classes.append(i['cat'].split(' ', 1)[1])\n except IndexError:\n classes.append(i['cat'])\n\n return classes" }, { "alpha_fraction": 0.747706413269043, "alphanum_fraction": 0.752293586730957, "avg_line_length": 23.22222137451172, "blob_id": "c68788257d518872376c6dbfab766b058a56d353", "content_id": "42c1f613e15d643d32f949493ecfe2009c9edacb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 218, "license_type": "no_license", "max_line_length": 44, "num_lines": 9, "path": "/extensions/extensions_initialization.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask_cors import CORS\nfrom flask_wtf import CSRFProtect\n\nfrom .flask_dd_client import FlaskDeepDetect\n\ncsrf_protect = CSRFProtect()\ncors = CORS()\ndeep_detect_extension = FlaskDeepDetect()\n" }, { "alpha_fraction": 0.675000011920929, "alphanum_fraction": 0.6833333373069763, "avg_line_length": 39, "blob_id": "263bc2210bc4ce966d9497307cb51601512e2d38", "content_id": "2522b2f5bc12f51dd1b32aec4c8021b05c613492", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 120, "license_type": "no_license", "max_line_length": 48, "num_lines": 3, "path": "/extensions/__init__.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom .extensions_initialization import ( # noqa\n csrf_protect, cors, deep_detect_extension)\n" }, { "alpha_fraction": 0.6281754970550537, "alphanum_fraction": 0.6289453506469727, "avg_line_length": 26.659574508666992, "blob_id": "3efbb578f2d7653cb62057bd02dcc7ca05638313", "content_id": "02f4ec9a546dd2b41b1bb30297cac2dbcfd61571", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1299, "license_type": "no_license", "max_line_length": 77, "num_lines": 47, "path": "/blueprints/products_endpoint/views.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# ThirdParty Library\nimport imghdr\n# Standard Library\nfrom logging import getLogger\n\nfrom flask import current_app as app\nfrom flask import request\nfrom flask_restful import Resource\nfrom webargs import fields\nfrom webargs.flaskparser import use_args\nfrom werkzeug.exceptions import BadRequest\n\nfrom .helpers import get_tags\n\nlogger = getLogger(__name__)\n\n\nclass GetProducts(Resource):\n def post(self):\n result = []\n available_types = app.config[\"AVAILABLE_IMAGE_TYPES\"]\n if 'file' not in request.files:\n raise BadRequest(\"Invalid multiple key for files\")\n for image in request.files.getlist(\"file\"):\n if not bool(image.filename) or imghdr.what(\n image) not in available_types:\n continue\n image_bytes = image.read()\n tags = get_tags(image_bytes)\n part_result = {'tags': tags}\n # TODO: add api call for some service\n result.append(part_result)\n return result\n\n\n# example with args\n# class GetProducts(Resource):\n# args = {\n# \"type\":\n# fields.Str(required=True, validate=lambda x: x in Config.SOME_LIST)\n# }\n#\n# @use_args(args, locations=(\"query\",))\n# def post(self, args):\n# *method_body*" }, { "alpha_fraction": 0.7706422209739685, "alphanum_fraction": 0.7706422209739685, "avg_line_length": 20.799999237060547, "blob_id": "6fea8a8b1610b5818a20665851673a753191af4b", "content_id": "5dd336b097e86e5c3d21bcaf45f33648e8975944", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 218, "license_type": "no_license", "max_line_length": 52, "num_lines": 10, "path": "/blueprints/products_endpoint/init_blueprint.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "from flask import Blueprint\nfrom flask_restful import Api\n\nfrom .views import GetProducts\n\nblueprint = Blueprint('products_endpoint', __name__)\n\napi = Api(blueprint)\n\napi.add_resource(GetProducts, '/suggest_products')\n" }, { "alpha_fraction": 0.5459533333778381, "alphanum_fraction": 0.5528120994567871, "avg_line_length": 39.55555725097656, "blob_id": "304feba5d9401f80974ea62d05c5556cee30344f", "content_id": "9a9a6ff0348f53e9e0d4f0d25a38f4d7996f8ff7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 729, "license_type": "no_license", "max_line_length": 77, "num_lines": 18, "path": "/settings.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "import os\n\n\nclass Config(object):\n DD_ENDPOINT = ''\n API_ENDPOINT = ''\n AVAILABLE_IMAGE_TYPES = ['jpg', 'jpeg', 'png']\n\n RECOGNITION_SERVICE_HOST = os.environ.get('RECOGNITION_SERVICE_HOST',\n '%SERVICE_IP%')\n RECOGNITION_SERVICE_PORT = int(\n os.environ.get('RECOGNITION_SERVICE_HOST', 8049))\n RECOGNITION_SERVICE_SCHEME = os.environ.get('RECOGNITION_SERVICE_SCHEME',\n 'https')\n RECOGNITION_SERVICE_TAG_AMOUNT = int(\n os.environ.get('RECOGNITION_SERVICE_TAG_AMOUNT', 3))\n RECOGNITION_SERVICE_NAME = os.environ.get('RECOGNITION_SERVICE_NAME',\n 'imageserv')" }, { "alpha_fraction": 0.6507936716079712, "alphanum_fraction": 0.6560846567153931, "avg_line_length": 16.272727966308594, "blob_id": "db67522dac0d68afeacc5b10e9f28745afe05336", "content_id": "feba8149b3acbe90f9dd0450becc8321b7ca2f12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 40, "num_lines": 11, "path": "/wsgi.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom flask.helpers import get_debug_flag\n\nfrom app import create_app\nfrom settings import Config\n\napp = create_app(Config)\n\nif __name__ == \"__main__\":\n app.run()" }, { "alpha_fraction": 0.8260869383811951, "alphanum_fraction": 0.8260869383811951, "avg_line_length": 69, "blob_id": "7bd90cdd867fa1393c7de4cb51376b8a2988b037", "content_id": "f8c01cc845ce4890a17ff01922eeb0016c38a77b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 69, "license_type": "no_license", "max_line_length": 69, "num_lines": 1, "path": "/blueprints/__init__.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "from .products_endpoint import blueprint as products_endpoint # noqa" }, { "alpha_fraction": 0.5750315189361572, "alphanum_fraction": 0.5800756812095642, "avg_line_length": 27.321428298950195, "blob_id": "6056628898fa9873092330f667bdc30d1f870cc4", "content_id": "add3171a1d8f1f9344d2ab4785ff802b84335f81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 793, "license_type": "no_license", "max_line_length": 61, "num_lines": 28, "path": "/extensions/flask_dd_client/__init__.py", "repo_name": "Zatonskikh/RecognitionSearch", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask import Flask\nfrom .dd_client import DD\n\n\nclass FlaskDeepDetect(object):\n \"\"\"Work with deepdetect.\"\"\"\n\n def __init__(self, app: Flask = None):\n \"\"\"Create instanse class.\"\"\"\n\n if app is not None:\n self.init_app(app)\n\n def init_app(self, app):\n \"\"\"Initialization of deep detect api.\"\"\"\n\n host = app.config.get('RECOGNITION_SERVICE_HOST')\n port = app.config.get('RECOGNITION_SERVICE_PORT')\n scheme = app.config.get('RECOGNITION_SERVICE_SCHEME')\n\n # deep detect client - proto == 0 -> http, else https\n proto = 0\n if scheme == 'https':\n proto = 1\n dd = DD(host, port=port, proto=proto)\n dd.set_return_format(dd.RETURN_PYTHON)\n app.deep_detect = dd\n" } ]
11
doberan/rebarseSample
https://github.com/doberan/rebarseSample
ccbf69d533d80e2f6095c06dd068d69704807f9d
21998140a3f2c18bc32c125f2cc1e247994baec8
fea9802eab610241dcb0aafbffba1b37c3be1562
refs/heads/master
2021-07-02T06:44:40.082507
2017-09-17T14:23:39
2017-09-17T14:23:39
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6623376607894897, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 16.22222137451172, "blob_id": "75742c604539c3cc2a085604b1015c3183bab09d", "content_id": "320494b876a84fd7ca9d3fbe2a848c52c24c55e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154, "license_type": "no_license", "max_line_length": 44, "num_lines": 9, "path": "/chapter1/printf.py", "repo_name": "doberan/rebarseSample", "src_encoding": "UTF-8", "text": "'''\nCreated on 2017/09/04\n\n'''\nfrom ctypes import cdll\n\nmsvcrt = cdll.msvcrt\nmessage_string = \"hello world\\n\"\nmsvcrt.printf(\"Testing: %s\", message_string)" }, { "alpha_fraction": 0.6423841118812561, "alphanum_fraction": 0.7086092829704285, "avg_line_length": 15.777777671813965, "blob_id": "59a26f34074a2d9bbb94391ea2faeb7d5c9d289e", "content_id": "1bf71a4791d5924228f84a980657b6b897ab6b34", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151, "license_type": "no_license", "max_line_length": 48, "num_lines": 9, "path": "/chapter1/my_test.py", "repo_name": "doberan/rebarseSample", "src_encoding": "UTF-8", "text": "'''\nCreated on 2017/09/04\n\n@author: doberan\n'''\nimport my_debugger\n\ndebugger = my_debugger.debugger()\ndebugger.load(\"C:\\\\WINDOWS\\\\system32\\\\calc.exe\")\n" } ]
2
flipdot/sopel-modules
https://github.com/flipdot/sopel-modules
a8a87179116b318ed59661bda850e87f09093863
268460760ee6157ed5b52c568b8e7deefd8fd4db
925f92792bd5da75bc80304c27524031b177f585
refs/heads/master
2021-12-15T03:38:40.772612
2021-11-13T15:46:22
2021-11-14T11:02:25
44,775,100
2
1
null
2015-10-22T21:46:08
2020-10-23T15:32:04
2021-11-14T11:02:26
Python
[ { "alpha_fraction": 0.6656656861305237, "alphanum_fraction": 0.6716716885566711, "avg_line_length": 20.717391967773438, "blob_id": "6c00c236c26a9d6753f69008330b9c23b6eada91", "content_id": "4f5c7b7c6863cf9f3530867ddd8d42635f21693a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 999, "license_type": "no_license", "max_line_length": 61, "num_lines": 46, "path": "/mqtt/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "import json\nfrom threading import Thread\n\nimport sopel\nimport paho.mqtt.client as mqtt\n\nMQTT_HOST = \"power-pi.fd\"\nMQTT_TOPIC = \"actors/all/flipbot_send\"\n\n# For more information\n# https://github.com/myano/jenni/wiki/IRC-String-Formatting\nCOLOR_IOT = \"\\x0307\" # orange\nCOLOR_RESET = \"\\x0F\"\nCOLOR_PREFIX = \"[{}iot{}]\".format(COLOR_IOT, COLOR_RESET)\n\nbot = None\n\n\ndef on_mqtt_connect(client, userdata, flags, result):\n client.subscribe(MQTT_TOPIC)\n\n\ndef on_mqtt_message(client, userdata, msg):\n msg_obj = json.loads(msg.payload.decode(\"utf-8\"))\n msg = \"{} {}\".format(COLOR_PREFIX, msg_obj[\"content\"])\n\n for c in bot.config.core.channels:\n bot.msg(c, msg)\n\n\ndef mqtt_main():\n client = mqtt.Client()\n client.on_connect = on_mqtt_connect\n client.on_message = on_mqtt_message\n\n client.connect(MQTT_HOST)\n client.loop_forever()\n\n\ndef setup(b):\n global bot\n bot = b\n\n mqtt_thread = Thread(target=mqtt_main)\n mqtt_thread.daemon = True\n mqtt_thread.start()\n" }, { "alpha_fraction": 0.5975136160850525, "alphanum_fraction": 0.6068376302719116, "avg_line_length": 30.909090042114258, "blob_id": "8954eb1f39e702510ef02b5422120b585787edbb", "content_id": "baca0408f920f84d25a7872cc2a3ec1b0cb0d729", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3861, "license_type": "no_license", "max_line_length": 122, "num_lines": 121, "path": "/grafana/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "\"\"\"Grafana alerts webhook module for sopel\n\nAccepts authorized webhook alert requests and forms them to messages.\n\"\"\"\n\nimport base64\nimport threading\nimport sys\nfrom flask import Flask, abort\n\nfrom sopel import module\nfrom sopel.config.types import FilenameAttribute, StaticSection, ValidatedAttribute\n\n# Colored prefix\n# \\x03AA,BB\n# AA = foreground color\n# BB = background color\n# ,BB can be omitted\n#\n# For more information\n# https://github.com/myano/jenni/wiki/IRC-String-Formatting\n# http://www.mirc.co.uk/colors.html\nCOLOR_GRAFANA = '\\x0303' # green\nCOLOR_BOLD = '\\x02'\nCOLOR_RESET = '\\x0F'\nCOLOR_PREFIX = '[%sstats%s]' % (COLOR_GRAFANA, COLOR_RESET)\n\napp = Flask(__name__)\nbot_global = None\nflask_grafana_started = False\n\n\nclass GrafanaSection(StaticSection):\n announce_channel = ValidatedAttribute('announce_channel', default='#flipdot')\n webhook_user = ValidatedAttribute('webhook_user', default='CHANGEUSER')\n webhook_pass = ValidatedAttribute('webhook_pass', default='CHANGEPASS')\n webhook_port = ValidatedAttribute('webhook_port', int, default=4444)\n\n\ndef setup(bot):\n global app, bot_global, flask_grafana_started\n bot.config.define_section('grafana', GrafanaSection)\n bot_global = bot\n if not flask_grafana_started:\n threading.Thread(target=app.run,\n args=(),\n kwargs={'port': bot.config.grafana.webhook_port},\n ).start()\n flask_grafana_started = True\n\n\ndef shutdown(bot):\n func = request.environ.get('werkzeug.server.shutdown')\n if func is not None:\n func()\n\n\[email protected]('/', methods=['POST'])\ndef webhook():\n global bot_global\n with app.test_request_context():\n from flask import request\n\n # Authentication\n try:\n auth = request.headers.get('Authorization').split(' ')\n auth_type = auth[0]\n if auth_type != 'Basic':\n raise ValueError('Only basic auth (user/pass) is allowed!\\n')\n auth_login = base64.b64decode(auth[1]).decode('utf-8').split(':')\n auth_user = auth_login[0]\n auth_pass = auth_login[1]\n if auth_user != bot_global.config.grafana.webhook_user or auth_pass != bot_global.config.grafana.webhook_pass:\n raise ValueError('Wrong credentials! ({} / {})\\n'.format(auth_user, auth_pass))\n except Exception as e:\n sys.stdout.write(\"\\n\\nERROR:\\n{}\\n\\n\".format(e))\n sys.stdout.flush()\n abort(403)\n\n # Debug\n # sys.stdout.write(\"\\n\\nREQUEST:\\n{}\\n\\nHEADERS:\\n{}\\n\\nJSON:\\n{}\\n\\n\".format(request, request.headers, request.json))\n\n # Create IRC message\n try:\n json = request.json\n msgs = [\"{} {}{}{}: {}\".format(COLOR_PREFIX, COLOR_BOLD, json.get('ruleName'), COLOR_RESET, json.get('message'))]\n\n # Only show alerts, not \"OK\"s and not \"no data\"s\n # if json.get('state') == 'ok':\n if json.get('state') != 'alerting':\n abort(500)\n\n # Add numeric reason if available\n if json.get('evalMatches'):\n matches = ''\n for item in json.get('evalMatches'):\n if len(matches) > 1:\n matches += ', '\n matches += '{}: {}'.format(item.get('metric'), item.get('value'))\n msgs[0] = \"{} ({})\".format(msgs[0], matches)\n\n # Add image URL if available\n if json.get('imageUrl'):\n msgs.append(\"{}{}\".format(\" \", json.get('imageUrl')))\n except Exception as e:\n sys.stdout.write(\"\\n\\nERROR:\\n{}\\n\\n\".format(e))\n sys.stdout.flush()\n abort(500)\n\n for msg in msgs:\n # sys.stdout.write(\"\\n\\nMESSAGE:\\n{}\\n\\n\".format(msg))\n # sys.stdout.flush()\n bot_say(msg)\n return \"OK\\n\"\n\n\n\n\ndef bot_say(msg):\n global bot_global\n bot_global.say(msg, bot_global.config.grafana.announce_channel)\n" }, { "alpha_fraction": 0.5328533053398132, "alphanum_fraction": 0.5598559975624084, "avg_line_length": 33.44961166381836, "blob_id": "1f0f0b8dbaa7fafc4363996f5aecc86481d8507f", "content_id": "1af44a6bb8059b7c365f7d43d6a9bb4fd2797a25", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4444, "license_type": "no_license", "max_line_length": 307, "num_lines": 129, "path": "/chanlogs-display/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "# coding=utf8\n\n# flask module\nimport sopel\nfrom sopel import module\n#from flask import Flask, abort, request\nfrom flask import Flask, abort\nimport threading\n\n# display\nimport colorsys\nimport hashlib\nimport re\nimport pprint\nimport dateutil.parser\nimport os\n\napp = Flask(__name__)\nlocal_bot = None\n\ndef setup(bot):\n global local_bot\n global app\n local_bot = bot\n #start_new_thread(app.run,(),{'port': 9999})\n threading.Thread(target=app.run,\n args=(),\n kwargs={'host': '0.0.0.0', 'port': 11111},\n ).start()\n\n\ndef shutdown(bot):\n func = request.environ.get('werkzeug.server.shutdown')\n if func is not None:\n func()\n\[email protected]('/', methods=['GET'])\ndef flipdot_log():\n with app.test_request_context():\n from flask import request\n css = \"::-webkit-scrollbar,::-webkit-scrollbar-button,::-webkit-scrollbar-track,::-webkit-scrollbar-track-piece,::-webkit-scrollbar-thumb,::-webkit-scrollbar-corner,::-webkit-resizer{ background-color:red; };}\"\n ret = \"<html><head><meta http-equiv=\\\"refresh\\\" content='10; URL=/#end'> </head><body onLoad='setTimeout(function() { window.scrollTo(0, document.body.scrollHeight || document.documentElement.scrollHeight) }, 1);' style='font-family:monospace; background: black; color: white; font-size:38px; \"+css+\"'>\"\n\n chanlogs = getattr(local_bot.config.chanlogs, \"dir\", None)\n if chanlogs is None:\n raise ConfigurationError(\"Channel logs needs a 'dir' set.\")\n\n\n with open(\"%s/flipdot.log\" % chanlogs, \"r\") as f:\n f.seek (0, 2) # Seek @ EOF\n fsize = f.tell() # Get Size\n f.seek (max (fsize-2048, 0), 0) # Set pos @ last n chars\n lines = f.readlines() # Read to end\n\n lines = lines[-22:] # Get last 22 lines\n\n #stdin,stdout = os.popen2(\"tail -n 13 \"+chanlogs+\"/flipdot.log\")\n #stdin.close()\n #lines = stdout.readlines()\n lastname = \"\"\n backlog = []\n names = []\n for l in lines:\n pl = process_line(l)\n if pl is \"\":\n continue\n print(l, pl, len(pl))\n print(pl[0],pl[1],pl[2],pl[3])\n for p in pl:\n if p is not None:\n print(p)\n date, name, color, text = process_line(l)\n backlog.append((date, name, color, text))\n if not name in names:\n names.append(name)\n #stdout.close()\n for l in backlog:\n date, name, color, text = l\n if name == lastname:\n name = \"-\"\n else:\n lastname = name\n # highlight nicks in messages\n for n in names:\n text = text.replace(n, \"<font color=\\\"#%s\\\">%s</font>\" % (get_color(n), n))\n # highlight urls in messages\n text = re.sub(r'([a-z]+://[^ ]+/?)', '<font color=\"#14cc75\"><u>\\\\1</u></font>', text)\n\n ret += \"%s <font color=\\\"#%s\\\">%s</font> %s<br>\" % (date, color, name, text)\n if name == \"ERROR\":\n break\n\n ret += \"<div id=\\\"end\\\"></div></body></html>\"\n return ret\n\ndef get_color(string, n=76):\n n = float(n)\n color = colorsys.hsv_to_rgb((abs(hash(string)) % n) / n, .9, .8)\n return \"%x%x%x\" % (color[0]*255, color[1]*255, color[2]*255)\n\ndef process_line(line):\n try:\n # divide by: date user text\n # 1999-04-01T23:42:59+00:00 <nick_name> hello, world!\n # 2016-05-19T07:17:14+00:00 *** flipbot has joined #flipdot\n regex = re.compile(\"([^<]+) [<*]+ ?([^> ]*)>? (.*)?\")\n search = regex.search(line)\n if not search:\n return \"\"\n date = dateutil.parser.parse(search.group(1))\n datestr = date.strftime(\"<font color=\\\"#555\\\">%H:%M</font>\")\n name = search.group(2)\n #text = re.escape(search.group(3))\n text = search.group(3)\n text = re.sub(r\"&\",\"&amp;\",text)\n text = re.sub(r\"\\\"\",\"&quot;\",text)\n text = re.sub(r\"<\",\"&lt;\",text)\n text = re.sub(r\">\",\"&gt;\",text)\n color = get_color(name)\n return (datestr, name, color, text)\n except Exception as e:\n\t#for i in dir(e):\n\t#\tprint(i, getattr(e, i))\n try:\n if \"nothing to repeat\" in e.message:\n return(\"\", \"ERROR\", \"FF0000\", \"Wrong Python 2 version. Please update to at least version 2.7.9.\")\n return(\"\", \"ERROR\", \"FF0000\", e.message)\n except AttributeError as e2:\n return(\"\", \"ERROR\", \"FF0000\", \"LOG FILE IS BR&Ouml;KEN<br>%s<br>%s<br>%s\" % (e.args, e.with_traceback, dir(e)))\n" }, { "alpha_fraction": 0.6783919334411621, "alphanum_fraction": 0.6814070343971252, "avg_line_length": 25.83783721923828, "blob_id": "112aea4f7e25ef1ce8aaca79f7b66b5d9fcb2eb6", "content_id": "cb4a0f0caca2ef305e4e0fa7afa08fb936c130cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 995, "license_type": "no_license", "max_line_length": 100, "num_lines": 37, "path": "/autovoice/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "import sopel\nfrom time import sleep\nfrom sopel.module import interval, OP, VOICE\nimport threading\n\nINTERVAL = 300\nCHANNEL = '#flipdot'\n\nauto_voice_threads = {}\n\n\ndef set_voice(bot, nick):\n global auto_voice_threads\n bot.write(['MODE', CHANNEL, '+v', nick])\n if nick in auto_voice_threads.keys():\n auto_voice_threads.pop(nick)\n\n\n\[email protected]('.*')\[email protected](\"JOIN\")\ndef auto_voice_join(bot, trigger):\n global auto_voice_threads\n if trigger.host.startswith(\"gateway/shell/matrix.org\"):\n set_voice(bot, trigger.nick)\n else:\n auto_voice_threads[trigger.nick] = threading.Timer(INTERVAL, set_voice, [bot, trigger.nick])\n auto_voice_threads[trigger.nick].start()\n\n\[email protected]('.*')\[email protected](\"PART\", \"QUIT\")\ndef auto_voice_quit(bot, trigger):\n global auto_voice_threads\n if trigger.nick in auto_voice_threads.keys():\n auto_voice_threads[trigger.nick].cancel()\n auto_voice_threads.pop(trigger.nick)\n\n\n" }, { "alpha_fraction": 0.570576548576355, "alphanum_fraction": 0.5864810943603516, "avg_line_length": 17, "blob_id": "c20c2f109ee7abd393bcbaa1f4828560764b8a35", "content_id": "9a714ea66817daaaa8ff57131d0b4c9d490b82f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 503, "license_type": "no_license", "max_line_length": 52, "num_lines": 28, "path": "/spacestatus/webserver.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "import json\n\nfrom flask import Flask\nfrom flask import request\n\napp = Flask(__name__)\n\nglobal bot\n\[email protected](\"/msg\", methods=['POST'])\ndef hello():\n global bot\n\n data = request.get_json(force=True, silent=True)\n msg = data['msg']\n if msg and bot:\n for c in bot.config.core.channels:\n bot.msg(c, str(msg))\n\n return '{ \"status\": \"ok\" }'\n\ndef run_server(b):\n global bot\n bot = b\n app.run(host='0.0.0.0', port=7645)\n\nif __name__ == \"__main__\":\n run_server(None)" }, { "alpha_fraction": 0.6371335387229919, "alphanum_fraction": 0.6429967284202576, "avg_line_length": 28.509614944458008, "blob_id": "d2cdbe4124d03452514b80c27b19e12d06f09d2a", "content_id": "590524b37300453658b86112827625cbb843235b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3070, "license_type": "no_license", "max_line_length": 107, "num_lines": 104, "path": "/rss/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "#\"\"\"RSS module for sopel\n#\n#Print information on changes of an RSS feed by polling and parsing it.\n#\"\"\"\n\nimport re\nimport requests\nimport time\n\nfrom sopel import module\nfrom sopel.module import interval\nfrom sopel.config.types import StaticSection, ValidatedAttribute\n\ntry:\n import xml.etree.cElementTree as et\nexcept ImportError:\n print(\"cElementTree not found. Using slower Python implementation 'ElementTree' instead.\")\n import xml.etree.ElementTree as et\n\n\n# Time in seconds, that the bot reloads network metrics\nINTERVAL_UPDATE = 60\n\n# Colored prefix\n# \\x03AA,BB\n# AA = foreground color\n# BB = background color\n# ,BB can be omitted\n#\n# For more information\n# https://github.com/myano/jenni/wiki/IRC-String-Formatting\n# http://www.mirc.co.uk/colors.html\nCOLOR_NETWORK = '\\x0309' # light green\nCOLOR_BOLD = '\\x02'\nCOLOR_RESET = '\\x0F'\nCOLOR_PREFIX = '[%sblg%s]' % (COLOR_NETWORK, COLOR_RESET)\n\n\nclass RssSection(StaticSection):\n rss_url = ValidatedAttribute('rss_url', default='https://flipdot.org/blog/index.php?/feeds/index.rss2')\n announce_channel = ValidatedAttribute('announce_channel', default='#flipdot')\n\n\ndef setup(bot):\n bot.config.define_section('rss', RssSection)\n\n\n###@interval(INTERVAL_UPDATE)\ndef check_recent_changes(bot, force=False):\n \"\"\"Download recent changes xml file and print on diff with local cache\"\"\"\n announce_channel = bot.config.rss.announce_channel\n\n r = requests.get(bot.config.rss.rss_url)\n if r.status_code != 200:\n bot.say(\"{} Could not download recent entries\".format(COLOR_PREFIX), announce_channel)\n return\n\n rss = r.text.encode('utf-8')\n timestamp = bot.db.get_channel_value(bot.config.rss.announce_channel, 'rss_timestamp') or 0\n items = parse_xml(rss)\n\n for item in items:\n if item.date < timestamp:\n continue\n bot.say(\"{} {}{}{} blogged by {}:\".format(COLOR_PREFIX,\n COLOR_BOLD,\n item['title'],\n COLOR_RESET,\n item['author']), announce_channel)\n bot.say(\" {}\".format(item['url']), announce_channel)\n\n #timestamp = bot.db.set_channel_value(bot.config.rss.announce_channel, 'rss_timestamp', time.time())\n\n# Parses MoinMoin's RSS XML structure and gives back a list of dicts with the\n# following elements:\n# author\n# date\n# title\n# url\ndef parse_xml(xml_string):\n tree = et.fromstring(xml_string)\n items = []\n\n print(\"QWER\")\n for item in tree.findall(\"item\"):\n print(\"ASDF\")\n author = item.find(\"author\").text\n date = item.find(\"pubDate\").text\n title = item.find(\"title\").text\n url = item.find(\"link\").text\n\n #author = re.sub(r\"Self:(.+)\", r\"\\1\", author)\n\n items.append({\"author\":author,\n \"date\":date,\n \"title\":title,\n \"url\":url})\n\n return items\n\nwith open('/tmp/time.time/index.rss', 'r') as xml_file:\n xml_content = xml_file.read()\nprint(parse_xml(xml_content))\nimport pdb; pdb.set_trace() # XXX BREAKPOINT\n\n" }, { "alpha_fraction": 0.5983873009681702, "alphanum_fraction": 0.6088578701019287, "avg_line_length": 26.976430892944336, "blob_id": "9eaf3d8db21b4edcb7ff3d33554f852115d7ff06", "content_id": "8191e306832c1ea02183373005c761106e1eb1a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8337, "license_type": "no_license", "max_line_length": 115, "num_lines": 297, "path": "/spacestatus/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "# coding=utf8\nfrom __future__ import absolute_import\nfrom typing import Optional, Mapping\nimport sopel\nfrom sopel.module import commands, interval\nfrom threading import Thread\nimport paho.mqtt.client as mqtt\n\nfrom .webserver import run_server\n\nfrom sopel import module\n# from socketIO_client import SocketIO\n\nimport logging\nimport time\nimport requests\nimport json\nimport os\nimport sys\nimport sqlite3\nimport datetime\n\nINTERVAL = 60\nspace_status = None\nlast_motion = None\nmqtt_client = None\n\nlogger = logging.getLogger(__name__)\n\nCO2 = 3600\n\ndef setup(bot):\n global space_status\n global app\n global mqtt_client\n\n webserver_thread = Thread(target=run_server, args=(bot,))\n webserver_thread.daemon = True\n webserver_thread.start()\n\n mqtt_client = mqtt.Client()\n mqtt_client.connect('power-pi.fd')\n mqtt_client.loop_start()\n\n space_status = update_space_status()\n\n\ndef update_space_status() -> Mapping:\n global space_status\n\n try:\n r = requests.get('https://api.flipdot.org', timeout=5)\n if r.status_code == 200 and r.json().get('api', '0') == '0.13':\n return r.json()\n else:\n return space_status\n except:\n logger.exception('Failed to fetch spaceapi')\n return space_status\n\ndef get_sensor_val(name: str, field='value') -> Optional[float]:\n global space_status\n try:\n return space_status['state']['sensors'][name][0][field]\n except:\n return None\n\n\ndef get_sensor_location(sensor_type: str, location: str, state=None) -> Optional[Mapping]:\n global space_status\n if state is None:\n state = space_status\n\n locations = state['state']['sensors'][sensor_type]\n for obj in locations:\n if obj['location'] == location:\n return obj\n return None\n\n@interval(INTERVAL)\ndef update(bot, force=False) -> None:\n global space_status\n\n new_state = update_space_status()\n if new_state is None:\n return\n if space_status is None:\n space_status = new_state\n return\n\n if new_state['open'] != space_status['open']:\n\n new_power_status = ('🔌', 'hochgefahren') if new_state['open'] else ('⏸️', 'heruntergefahren')\n\n for c in bot.config.core.channels:\n bot.msg(c, f'{new_power_status[0]} Der Space wurde {new_power_status[1]}.')\n\n try:\n new_locked = get_sensor_location('door', 'locked', new_state)\n old_locked = get_sensor_location('door', 'locked')\n if not new_locked:\n return\n if old_locked['value'] != new_locked['value']:\n\n new_lock_state = ('🔐', 'abgeschlossen') if new_locked['value'] else ('🔓', 'aufgeschlossen')\n\n for c in bot.config.core.channels:\n bot.msg(c, f'{new_lock_state[0]} Der Space wurde {new_lock_state[1]}.')\n\n except KeyError:\n print('missing \\'door\\' sensor in fd api')\n finally:\n space_status = new_state\n\n@interval(CO2)\ndef co2(bot, force=False) -> None:\n co2_ppm = get_sensor_val('co2')\n if co2_ppm and co2_ppm > 2400:\n for c in bot.config.core.channels:\n bot.msg(c, f'Wir störben!!1! Mach sofort ein Fenster auf, der CO2-Wert ist zu hoch ({co2_ppm} ppm). 🏭')\n\n\[email protected]('tuer', 'door')\ndef doorState(bot, trigger) -> None:\n global space_status\n y = space_status.get('state').get('open')\n if y is not None:\n status = 'auf' if y else 'zu'\n bot.say(f'Space ist {status}')\n else:\n bot.say('Space-Status is unbekannt :(')\n\n\[email protected]('temp', 'temperatur')\ndef temp(bot, trigger) -> None:\n temperature(bot, '', 'lounge')\n # temperature(bot, 'workshop_', 'kino');\n\n\ndef temperature(bot, room: str, room_name: str) -> None:\n global space_status\n\n if space_status is None:\n bot.say('Space status ist unbekannt')\n return\n\n for heiz in space_status.get('state')['sensors'].get('temperature', []):\n state = heiz['value']\n locate = heiz['location']\n\n if state > 28.0:\n zustand = 'heiß 🔥'\n elif state > 18.0:\n zustand = 'warm'\n elif state > 10.0:\n zustand = 'kalt'\n else:\n zustand = 'arschkalt ❄️'\n\n bot.say(f'In {locate} ist es aktuell {state:.2f}°C {zustand}.')\n\n\n\[email protected]('users')\ndef users(bot, trigger) -> None:\n global space_status\n if space_status is None:\n bot.say('Space status is unbekannt')\n return\n\n names = get_sensor_val('people_now_present', 'names')\n user_count = get_sensor_val('people_now_present')\n\n if not user_count or user_count is 0:\n bot.say('Es ist niemand im Space')\n return\n\n names = names.split(',')\n user_count -= len(names)\n known = ', '.join(x for x in names)\n\n if user_count is 0:\n bot.say(f'Es sind im Space: {known}')\n elif len(names) is 0:\n bot.say(f'Es sind {user_count} unbekannte im Space')\n else:\n bot.say(f'Es sind {user_count} unbekannte und {known} im Space')\n\n\[email protected]('status')\ndef space_status_all(bot, trigger) -> None:\n doorState(bot, trigger)\n users(bot, trigger)\n temp(bot, trigger)\n\n\n@interval(60 * 60 * 24)\ndef clear_status_counter(bot, force=False) -> None:\n last = bot.db.get_channel_value('#flipdot', 'status_cnt') or datetime.datetime.now().month\n if datetime.datetime.now().month == last:\n return\n\n # TODO: Use with here? Does it work here?\n db = bot.db.connect()\n db.execute('DELETE FROM nick_values WHERE nick_values.key = \\'status_cnt\\'')\n db.commit()\n db.close()\n bot.db.set_channel_value('#flipdot', 'status_cnt', datetime.datetime.now().month)\n\n\n\n# TODO: Does this even still work? Remove it if it doesn't.\[email protected]('heizen', 'heatup', 'heizung')\[email protected]_chanmsg(message='Dieser Befehl muss im #flipdot channel eingegeben werden')\[email protected]_privilege(sopel.module.VOICE, 'Du darfst das nicht')\ndef heat(bot, trigger) -> None:\n global space_status\n\n bot.say('Kaputt')\n\n mqtt_names = {\n 'raum4': 'f376db',\n 'lounge': 'f391d8',\n 'm-shop': '4c857f'\n }\n cmd = trigger.group(2) or '20 all'\n cmds = cmd.split(' ')\n\n temp = cmds[0] if len(cmds) > 0 else 20\n room = cmds[1] if len(cmds) > 1 else 'all'\n\n if temp == 'ein':\n temp = '20'\n elif temp == 'aus':\n temp = '5'\n try:\n temp = int(temp)\n except ValueError as e:\n bot.say('Bitte eine natürliche Zahl in Grad Celsius angeben')\n return\n\n rooms = []\n if room == 'all':\n for k, v in mqtt_names.items():\n rooms.append(k)\n else:\n mqtt_name = mqtt_names[room]\n if not mqtt_name:\n bot.say(f'{room} existiert nicht')\n return\n rooms.append(room)\n\n for r in rooms:\n try:\n mqtt_client.publish(f'sensors/heater/{mqtt_names[r]}/fenster/setpoint', temp)\n bot.say(f'Stelle Heizung({r:s}) auf {temp:.2f}°C')\n except Exception as e:\n print(e)\n bot.say(f'Da ist ein Fehler aufgetreten ({r:s})')\n\n\[email protected]('essen')\ndef essen(bot, trigger) -> None:\n futter = bot.db.get_channel_value('#flipdot', 'hapahapa') or 'nix'\n bot.say(futter)\n\n\[email protected]('kochen')\ndef kochen(bot, trigger) -> None:\n if trigger.group(2) is None or len(trigger.group(2).split(' ')) < 2:\n bot.say('Bitte gib den Kochstatus nach folgendem Schmema ein, [Koch/Ansprechpartner] [Mahlzeit/Essen]')\n else:\n x = trigger.group(2).split(' ')\n msg = f'{x[0]} kocht {x[1]}'\n bot.db.set_channel_value('#flipdot', 'hapahapa', msg)\n bot.say('done')\n\n\[email protected]('futter')\ndef futter(bot, trigger) -> None:\n api_key = bot.config.spacestatus.forum_key\n res = requests.get(\n f'https://forum.flipdot.org/latest.json?api_key={api_key}&api_username=flipbot',\n headers={'Accept': 'application/json'},\n )\n topics = res.json()\n\n cooking_category_id = 19 # 'Kochen & Essen'\n\n cooking_topic = None\n for t in topics['topic_list']['topics']:\n if t['category_id'] == cooking_category_id:\n cooking_topic = t\n break\n\n cooking_topic_name = cooking_topic['title']\n bot.say('Futter: ' + cooking_topic_name)\n" }, { "alpha_fraction": 0.6063807606697083, "alphanum_fraction": 0.6125079393386841, "avg_line_length": 28.39751625061035, "blob_id": "7ae64fa996ba2589e2d465e065957316353ea71c", "content_id": "83705489cf9c8ba74226334e6d8ae7eeaf1087ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4735, "license_type": "no_license", "max_line_length": 87, "num_lines": 161, "path": "/covid/__init__.py", "repo_name": "flipdot/sopel-modules", "src_encoding": "UTF-8", "text": "\"\"\"Loads stats on Covid-19 for the city of Kassel using ArcGIS' API\"\"\"\nimport json\nimport locale\nimport re\nimport requests\n\nimport pandas as pd\n\nfrom bs4 import BeautifulSoup\nfrom datetime import datetime\nfrom sopel import module\nfrom sopel.config.types import StaticSection, ValidatedAttribute\n\n# Colored prefix\n# \\x03AA,BB\n# AA = foreground color\n# BB = background color\n# ,BB can be omitted\n#\n# For more information\n# https://github.com/myano/jenni/wiki/IRC-String-Formatting\n# http://www.mirc.co.uk/colors.html\nCOLOR_COVID = '\\x0304' # red\nCOLOR_BOLD = '\\x02'\nCOLOR_RESET = '\\x0F'\nCOLOR_PREFIX = '[%spsa%s]' % (COLOR_COVID, COLOR_RESET)\n\nPREFIX = '[psa]'\nKEY_TIME = 'Aktualisierung'\n\nTS_FORMATS = [\n 'Stand: %d. %B %Y',\n 'Stand: %d. %B %Y; %H Uhr',\n 'Stand: %d. %B %Y; %H.%M Uhr',\n 'Stand: %d. %B %Y, %H Uhr',\n 'Stand: %d. %B %Y, %H.%M Uhr',\n 'Stand: %A, %d. %B %Y, %H.%M Uhr',\n]\n\nLOC_LUT = {\n 'Stadt Kassel': 'Kassel Stadt',\n 'Landkreis Kassel': 'Kassel Land',\n}\n\nREPR_LUT = {\n 'Fälle insgesamt': 'insgesamt',\n 'Genesene': 'genesen',\n 'Aktuell Infizierte': 'infiziert',\n 'Todesfälle': 'tot',\n}\n\n\nclass CovidSection(StaticSection):\n announce_channel = ValidatedAttribute('announce_channel', default='#flipdot-covid')\n url = ValidatedAttribute('url', default='https://kassel.de/coronavirus')\n\n\ndef update_check(ts_old, update_raw):\n locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8')\n soup = BeautifulSoup(update_raw, 'html.parser')\n ts_str = soup.find('h3', 'SP-Headline--paragraph').text\n ts_new = None\n for ts_format in TS_FORMATS:\n try:\n ts_new = datetime.strptime(ts_str, ts_format).strftime('%s')\n break\n except:\n pass\n if ts_new is None:\n raise SyntaxError(f\"No time format could parse the raw string '{ts_str}'.\")\n else:\n update_required = False\n if int(ts_old) < int(ts_new):\n update_required = True\n data_cases = pd.read_html(str(soup.table))[0].set_index('Ort')\n return update_required, ts_new, data_cases\n\n\ndef update_repr(prefix, update_data_pre, add_prefix=False):\n first_line = True\n ret = ''\n if add_prefix:\n ret += f\"{prefix} \"\n for loc in LOC_LUT.keys():\n if not first_line:\n first_ljust_len = len(LOC_LUT[list(LOC_LUT.keys())[0]]) - len(prefix)\n ret += '\\n'.ljust(first_ljust_len)\n first_line = False\n ret += f\"{LOC_LUT[loc]}: \"\n first_field = True\n for k, v in REPR_LUT.items():\n field = update_data_pre.loc[loc][k]\n try:\n if int(field) == 0:\n continue\n except:\n pass\n if not first_field:\n ret += ', '\n first_field = False\n ret += f\"{field} {v}\"\n return ret\n\n\[email protected](5 * 60)\ndef covid_update(bot, dest=None, update_forced=False):\n print(f\"{PREFIX} Checking COVID-19 data...\")\n ts_old, cases = cache_load(bot)\n req = requests.get(bot.config.covid.url)\n if req.status_code != 200:\n raise ConnectionError(\"Could not download covid data.\")\n update_raw = req.text\n update_required, ts_old, update_data = update_check(ts_old, update_raw)\n if not update_required and not update_forced:\n print(f\"{PREFIX} No update...\")\n return\n msg = update_repr(PREFIX, update_data)\n if dest is None:\n dest = bot.config.covid.announce_channel\n for line in f\"{COLOR_PREFIX} {msg}\".split('\\n'):\n bot.say(line, dest)\n cache_save(bot, ts_old, update_data)\n print(f\"{PREFIX} Updates cached...\")\n\n\[email protected]('covidclear')\ndef covid_clear(bot, trigger):\n print(f\"{PREFIX} Clearing COVID-19 cache...\")\n chan = bot.config.covid.announce_channel\n bot.db.set_channel_value(chan, 'ts_old', 0)\n bot.db.set_channel_value(chan, 'cases', None)\n\n\[email protected]('covid')\ndef covid_print(bot, trigger):\n covid_update(bot)#, dest=trigger.nick, update_forced=True)\n\n\ndef setup(bot):\n bot.config.define_section('covid', CovidSection)\n\n\ndef cache_load(bot):\n chan = bot.config.covid.announce_channel\n ts_old = bot.db.get_channel_value(chan, 'ts_old') or 0\n cases_bot = bot.db.get_channel_value(chan, 'cases')\n if isinstance(cases_bot, pd.DataFrame):\n cases_str = pd.read_json(cases_bot)\n else:\n cases_str = None\n if isinstance(cases_str, str):\n cases = json.loads(cases_str)\n else:\n cases = cases_str\n return ts_old, cases\n\n\ndef cache_save(bot, ts_old, update_data):\n chan = bot.config.covid.announce_channel\n bot.db.set_channel_value(chan, 'ts_old', ts_old)\n bot.db.set_channel_value(chan, 'cases', update_data.to_json())\n" } ]
8
crystalbowie/PerfTestTool
https://github.com/crystalbowie/PerfTestTool
3fdbd475ff912cb8ee0121daeac025ddcb49960f
9e319883454bfe369043394b9ad6140fcb4f2adb
24b3a6c67dd1dde5f79df04200b36e05e896e93c
refs/heads/master
2020-06-10T12:15:09.450834
2019-06-25T07:40:53
2019-06-25T07:40:53
193,644,315
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5528455376625061, "alphanum_fraction": 0.577235758304596, "avg_line_length": 14.375, "blob_id": "e524e02ed4b8575d5e03999a3c40675649aafdea", "content_id": "5125952de3ee0297a20ba775cc7648d530138822", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 123, "license_type": "no_license", "max_line_length": 31, "num_lines": 8, "path": "/PerfTestTool/shell/test.ksh", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#!/usr/bin/ksh\nls -l\nif [ $# = 0 ] ; then\n echo \"Parameter is none.\";\nelse\n echo \"Parameter is multi.\";\nfi\nsleep 10;\n" }, { "alpha_fraction": 0.5963836908340454, "alphanum_fraction": 0.599754810333252, "avg_line_length": 27.935779571533203, "blob_id": "124cd9632cc43434e312f36deea1f141c1f9180d", "content_id": "a5bbebba3b0cbc1e476deb808ba571ba1da90ad4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3605, "license_type": "no_license", "max_line_length": 107, "num_lines": 109, "path": "/PerfTestTool/src/libs/aix/shell.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n#\t【機能】\r\n#\t\tAIXでshellコマンドを実行する。\r\n\r\nimport os, re, tempfile, time\r\nfrom fabric.api import execute, hide, env, get, run\r\n\r\nfrom libs.config.manager import manager\r\nfrom libs.logger.logger import logger\r\n\r\nclass shell(object):\r\n\t'''\r\n\tclassdocs\r\n\t'''\r\n\r\n\r\n\t# コンストラクタ\r\n\t# path\t: 設定ファイルのパス\r\n\tdef __init__(self, path):\r\n\t\tself.sets = manager(path)\r\n\t\tself.log = logger(self.sets.get('logging', 'file')).get()\r\n\r\n\r\n\t# コマンド解析\r\n\t#\tcmd: コマンド\r\n\t# @return: 実行コマンド、パラメータ群\r\n\tdef parse_command(self, cmd):\r\n\t\tsplit_list = re.split(' +', cmd)\r\n\t\treturn split_list[0], split_list[1:]\r\n\r\n\r\n\t# テンポラリファイル作成\r\n\t#\tsrc_path\t\t:\tソースパス\r\n\t#\tparams\t\t\t:\tパラメータ群\r\n\t#\ttmp_dir_name\t:\tテンポラリディレクトリ名\r\n\t# @return: テンポラリファイルパス\r\n\tdef creat_tmp_file(self, src_path, params, tmp_dir_name):\r\n\t\twith open(src_path, 'r') as f_src:\r\n\t\t\tsrc_str = f_src.read()\r\n\t\t\tif len(params) > 0:\r\n\t\t\t\tdst_str = f'set -- {\" \".join(params)}\\n' + src_str\r\n\t\t\telse:\r\n\t\t\t\tdst_str = src_str\r\n\t\t\tdst_str = dst_str.replace('\\r', '')\r\n\t\t\tdst_path = os.path.join(tmp_dir_name, next(tempfile._get_candidate_names()))\r\n#\t\t\tdst_path = os.path.join(tmp_dir_name, os.path.basename(src_path))\r\n\t\t\twith open(dst_path, 'w') as f_dst:\r\n\t\t\t\tf_dst.truncate()\r\n\t\t\t\tf_dst.write(dst_str)\r\n\t\t\tself.log.info(dst_str)\r\n\t\treturn dst_path\r\n\r\n\r\n\t# shellのタスク\r\n\t#\tparams\t:\t[ 実行コマンド群 ]\r\n\tdef shell_task(self, params):\r\n\t\twith tempfile.TemporaryDirectory() as tmp_dir_name:\r\n\t\t\tfor execs in params:\r\n\t\t\t\tfor _ in range(0, execs['loop'], 1):\r\n\t\t\t\t\tfor cmd in execs['commands']:\r\n\t\t\t\t\t\tsrc_path, params = self.parse_command(cmd)\r\n\t\t\t\t\t\tdst_path = self.creat_tmp_file(src_path, params, tmp_dir_name)\r\n\t\t\t\t\t\tself.log.info(f'src_path={src_path}, params={params}, dst_path={dst_path}')\r\n\t\t\t\t\t\twith open(dst_path, 'r') as fd:\r\n\t\t\t\t\t\t\tsh_str = fd.read()\r\n\t\t\t\t\t\tres = run(sh_str)\r\n\t\t\t\t\t\tself.log.info(res.replace('\\r', ''))\r\n\t\treturn\r\n\r\n\r\n\t# ログ取得のタスク\r\n\t#\tparams\t:\t[ ログファイル群 ]\r\n\tdef logget_task(self, params):\r\n\t\tif params == None:\r\n\t\t\treturn\r\n\t\tfor log_file in params:\r\n\t\t\tfiles = re.split(\" +\", log_file)\r\n\t\t\tret = get(files[0], files[1])\r\n\t\t\tself.log.info(f'RemoteFile={files[0]}, LocalFile={files[1]}, Result={ret.succeeded}, Fail={ret.failed}')\r\n\t\t\tif ret.succeeded == True:\r\n\t\t\t\tr = run(f'rm -f {files[0]}')\r\n\t\t\t\tself.log.info(f'Remove Result={r}')\r\n\t\treturn\r\n\r\n\r\n\t# 実行\r\n\t#\tkey\t: キー\r\n\tdef execute(self, key):\r\n\t\tif self.sets.get(key, \"type\") == 'sleep':\r\n\t\t\tself.log.info('【SLEEP開始!】')\r\n\t\t\ttime.sleep(int(self.sets.get(key, \"stim\")))\r\n\t\t\tself.log.info('【SLEEP終了!】')\r\n\t\telse:\r\n\t\t\tenv.user = self.sets.get(key, 'user')\r\n\t\t\tenv.password = self.sets.get(key, 'pass')\r\n\t\t\tenv.shell = 'ksh -c'\r\n\t\t\thost_string = f'{env.user}@{self.sets.get(key, \"host\")}:{self.sets.get(key, \"port\")}'\r\n#\t\t\tenv.host_string = host_string\r\n\t\t\twith hide('everything', 'status'):\r\n\t\t\t\tself.log.info(f'【シェル実行開始!】 {self.sets.get(key, \"host\")}')\r\n\t\t\t\texecute(self.shell_task, self.sets.get(key, 'executes'), hosts = [ host_string ])\r\n\t\t\t\tself.log.info(f'【シェル実行終了!】 {self.sets.get(key, \"host\")}')\r\n\t\t\twith hide('everything', 'status'):\r\n\t\t\t\tself.log.info(f'【ログ取得開始!】 {self.sets.get(key, \"host\")}')\r\n\t\t\t\texecute(self.logget_task, self.sets.get(key, 'logs'), hosts = [ host_string ])\r\n\t\t\t\tself.log.info(f'【ログ取得終了!】 {self.sets.get(key, \"host\")}')\r\n\t\treturn\r\n" }, { "alpha_fraction": 0.582730770111084, "alphanum_fraction": 0.588260293006897, "avg_line_length": 21.510000228881836, "blob_id": "450eacadc7e4614cfb0a12b5bfa2e238df42b690", "content_id": "d286e596964e10e9fa3ea93bbf65b353018db806", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2781, "license_type": "no_license", "max_line_length": 87, "num_lines": 100, "path": "/PerfTestTool/src/libs/aix/vmstat.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n#\t【機能】\r\n#\t\tAIXでvmstatコマンドを実行してxlsxファイルに保存する。\r\n\r\nimport codecs, csv, datetime, queue, re, sys, traceback\r\n\r\nfrom fabric.api import execute, hide, env, run\r\nfrom libs.config.manager import manager\r\nfrom libs.logger.logger import logger\r\n\r\n\r\nclumnName = [\r\n\t'タイムスタンプ',\r\n\t'実行可スレッド',\r\n\t'待機中スレッド',\r\n\t'アクティブ仮想ページ',\r\n\t'フリーリスト',\r\n\t'ページャ入力リスト',\r\n\t'ページイン',\r\n\t'ページアウト',\r\n\t'空きページ',\r\n\t'スキャンページ',\r\n\t'クロックサイクル',\r\n\t'割り込み回数',\r\n\t'システムコール',\r\n\t'コンテキストスイッチ',\r\n\t'ユーザ時間',\r\n\t'システム時間',\r\n\t'アイドル時間',\r\n\t'待ち時間',\r\n\t'プロセッサ数',\r\n\t'消費キャパシティ'\r\n]\r\n\r\nclass vmstat(object):\r\n\t'''\r\n\tclassdocs\r\n\t'''\r\n\r\n\r\n\t# コンストラクタ\r\n\t# path\t: 設定ファイルのパス\r\n\t# myq\t: キュー\r\n\tdef __init__(self, path, myq):\r\n\t\tself.sets = manager(path)\r\n\t\tself.log = logger(self.sets.get('logging', 'file')).get()\r\n\t\tself.myq = myq\r\n\r\n\r\n\t# vmstatのタスク\r\n\t#\tparams\t:\t[ ホスト名, コマンド, インターバル, 出力先 ]\r\n\tdef vmstat_task(self, params):\r\n\t\tfname = params[3].format(datetime.datetime.now().strftime('%Y%m%d%H%M%S'))\r\n\t\twith codecs.open(fname, 'w', 'ms932') as fd:\r\n\t\t\twriter = csv.writer(fd, lineterminator='\\n')\r\n\t\t\twriter.writerow(clumnName)\r\n\t\t\tfor _ in range(0, sys.maxsize, 1):\r\n\t\t\t\tres = run(params[1], quiet=True)\r\n\t\t\t\tres_lines = re.split('\\r\\n', res)\r\n\t\t\t\ttargets = re.split(' +', res_lines[-1].strip())\r\n\t\t\t\tnow_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]\r\n\t\t\t\ttargets.insert(0, now_time)\r\n\t\t\t\twriter.writerow(targets)\r\n\t\t\t\ttry:\r\n\t\t\t\t\trsv = self.myq.get(timeout=int(params[2]) - 1)\r\n\t\t\t\t\tself.log.info(f'受信 {rsv}')\r\n\t\t\t\t\tbreak\r\n\t\t\t\texcept queue.Empty:\r\n\t\t\t\t\tpass\r\n\t\t\t\texcept:\r\n\t\t\t\t\tself.log.error(traceback.format_exc())\r\n\t\t\t\t\tbreak\r\n\t\treturn\r\n\r\n\r\n\t# 実行\r\n\t#\tkey\t: キー\r\n\tdef execute(self, key):\r\n\t\tself.log.info(f'【vmstat開始!】 {self.sets.get(key, \"host\")}')\r\n\t\tenv.user = self.sets.get(key, 'user')\r\n\t\tenv.password = self.sets.get(key, 'pass')\r\n\t\tenv.shell = 'ksh -c'\r\n\t\thost_string = f'{env.user}@{self.sets.get(key, \"host\")}:{self.sets.get(key, \"port\")}'\r\n\t\tenv.host_string = host_string\r\n#\t\tprint(env.user, env.password, env.shell, host_string)\r\n\t\twith hide('everything', 'status'):\r\n\t\t\texecute(\r\n\t\t\t\tself.vmstat_task,\r\n\t\t\t\t[\r\n\t\t\t\t\tself.sets.get(key, 'host'),\r\n\t\t\t\t\tself.sets.get(key, 'scmd'),\r\n\t\t\t\t\tself.sets.get(key, 'itvl'),\r\n\t\t\t\t\tself.sets.get(key, 'outf')\r\n\t\t\t\t],\r\n\t\t\t\thosts = [ host_string ]\r\n\t\t)\r\n\t\tself.log.info(f'【vmstat取得終了!】 {self.sets.get(key, \"host\")}')\r\n\t\treturn\r\n" }, { "alpha_fraction": 0.6340930461883545, "alphanum_fraction": 0.6412737965583801, "avg_line_length": 23.0234375, "blob_id": "1a7136d9fdb9f4cccc9a95e8b9c5ead7e4188e93", "content_id": "f688513b2abed8f2ff5d932087877bf07d65184d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3473, "license_type": "no_license", "max_line_length": 102, "num_lines": 128, "path": "/PerfTestTool/src/WxPerfTestTool.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n\r\nimport argparse, queue, threading, traceback, wx\r\nfrom libs.config.manager import manager\r\nfrom libs.aix.vmstat import vmstat\r\nfrom libs.aix.shell import shell\r\nfrom libs.logger.logger import logger\r\n\r\nfrom fabric.network import disconnect_all\r\n\r\nparser = argparse.ArgumentParser(prog='WxPerfTestTool.py', add_help=True)\r\nparser.add_argument('--config', type=str, default='./settings.yml', help='設定ファイルパス')\r\nargs = parser.parse_args()\r\n\r\nsets = manager(args.config)\r\nlog = logger(sets.get('logging', 'file')).get()\r\n\r\n#\r\n# アプリケーションクラス\r\n#\r\nclass PerfTestFrame(wx.Frame):\r\n\t# 初期化\r\n\t#\tparent\t:\t親インスタンス\r\n\t#\ttitle\t:\t画面タイトル\r\n\tdef __init__(self, parent, title):\r\n\t\tsuper(PerfTestFrame, self).__init__(\r\n\t\t\tparent,\r\n\t\t\ttitle = title,\r\n\t\t\tsize=(300, 150),\r\n\t\t\tstyle=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER ^ wx.MINIMIZE_BOX ^ wx.MAXIMIZE_BOX ^ wx.CLOSE_BOX)\r\n\t\tself.InitUI()\r\n\t\tself.Centre()\r\n\t\tself.Show()\r\n\t\treturn\r\n\r\n\r\n\t# UI初期化\r\n\tdef InitUI(self):\r\n\t\tpanel = wx.Panel(self)\r\n\t\tsizer = wx.GridBagSizer(0, 0)\r\n\r\n\t\tself.btnConfirm = wx.Button(panel, label = \"実行\")\r\n\t\tself.btnExit = wx.Button(panel, wx.ID_EXIT, label = \"終了\" )\r\n\t\tself.btnConfirm.SetBackgroundColour('#e0ffff')\r\n\t\tself.btnExit.SetBackgroundColour ('#e0ffff')\r\n\r\n\t\tsizer.Add(self.btnConfirm, pos = (1, 0), flag = wx.ALL, border = 5)\r\n\t\tsizer.Add(self.btnExit, pos = (1, 2), flag = wx.ALL, border = 5)\r\n\r\n\t\tself.btnConfirm.Bind (wx.EVT_BUTTON, self.clickBtnConfirm)\r\n\t\tself.btnExit.Bind (wx.EVT_BUTTON, self.clickBtnExit)\r\n\r\n\t\tself.CreateStatusBar(2)\r\n\t\tself.SetStatusWidths([-1, -3])\r\n\r\n\t\tself.SetStatusText('レディ', 0)\r\n\r\n\t\tpanel.SetSizerAndFit(sizer)\r\n\r\n\r\n\t# 無効・有効設定\r\n\t#\tflag\t:\tTrue=無効、False=有効\r\n\tdef setDisable(self, flag):\r\n\t\tif flag == True:\r\n\t\t\tself.btnConfirm.Disable()\r\n\t\t\tself.btnExit.Disable()\r\n\t\t\tself.SetStatusText('実行中…', 0)\r\n\t\telse:\r\n\t\t\tself.btnConfirm.Enable(enable=True)\r\n\t\t\tself.btnExit.Enable(enable=True)\r\n\t\t\tself.SetStatusText('レディ', 0)\r\n\r\n\r\n\t# 実行ボタン押下ハンドラ\r\n\t#\tevent\t:\tイベントインスタンス\r\n\tdef clickBtnConfirm(self, event):\r\n\t\ttry:\r\n\t\t\tself.setDisable(True)\r\n\t\t\tth_vmstat = []\r\n\t\t\tfor key in sets.get('vmstat_idx', 'keys'):\r\n\t\t\t\tchild_q = queue.PriorityQueue()\r\n\t\t\t\tth = threading.Thread(target = self.vmstat_thread, name = 'vmstat', args = ([key, child_q]))\r\n\t\t\t\tth.start()\r\n\t\t\t\tth_vmstat.append({'thread': th, 'queue': child_q})\r\n\t\t\tth_shell = threading.Thread(target = self.shell_thread, name = 'shell', args = (['shell']))\r\n\t\t\tth_shell.start()\r\n\t\t\tth_shell.join(timeout=None)\r\n\t\t\tfor th in th_vmstat:\r\n\t\t\t\tth['queue'].put('terminate order')\r\n\t\t\t\tth['thread'].join(timeout=None)\r\n\t\t\tth_vmstat = []\r\n\t\t\tself.setDisable(False)\r\n\t\texcept:\r\n\t\t\tlog.error(traceback.format_exc())\r\n\t\tfinally:\r\n\t\t\tdisconnect_all()\r\n\t\treturn\r\n\r\n\r\n\t# 終了ボタン押下ハンドラ\r\n\t#\tevent\t:\tイベントインスタンス\r\n\tdef clickBtnExit(self, event):\r\n\t\twx.Exit()\r\n\t\treturn\r\n\r\n\r\n\t# shellスレッド\r\n\t#\tkey\t:\tキー\r\n\tdef shell_thread(self, key):\r\n\t\tshell(args.config).execute(key)\r\n\t\treturn\r\n\r\n\r\n\t# vmstatスレッド\r\n\t#\tkey\t:\tキー\r\n\tdef vmstat_thread(self, key, child_q):\r\n\t\tvmstat(args.config, child_q).execute(key)\r\n\t\treturn\r\n\r\n#\r\n# アプリケーション入口\r\n#\r\nif __name__ == '__main__':\r\n\tapp = wx.App()\r\n\tPerfTestFrame(None, title = '性能試験ツール')\r\n\tapp.MainLoop()\r\n" }, { "alpha_fraction": 0.6056337952613831, "alphanum_fraction": 0.608450710773468, "avg_line_length": 12.875, "blob_id": "84cde8113c59a27c8999be1fcf511989071f4c5e", "content_id": "f31ffef93ee82d32a347b48d6621f1598889e3a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 447, "license_type": "no_license", "max_line_length": 55, "num_lines": 24, "path": "/PerfTestTool/src/libs/logger/logger.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n#\t【機能】\r\n#\t\tロギングインスタンスを作成して呼び出し元に返す。\r\n\r\nimport logging.config, yaml\r\n\r\nclass logger(object):\r\n\t'''\r\n\tclassdocs\r\n\t'''\r\n\r\n\r\n\t# コンストラクタ\r\n\t# path\t: 設定ファイルのパス\r\n\tdef __init__(self, path):\r\n\t\tlogging.config.dictConfig(yaml.safe_load(open(path)))\r\n\t\tself.logger = logging.getLogger()\r\n\r\n\r\n\t# 取得\r\n\tdef get(self):\r\n\t\treturn self.logger" }, { "alpha_fraction": 0.652886688709259, "alphanum_fraction": 0.6535994410514832, "avg_line_length": 24.980770111083984, "blob_id": "a98a070b2b75b4a0c22abbe6fa3587cfa5ce356a", "content_id": "d83a0bbb671b52bdf706171e2a2bcf90414489cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1443, "license_type": "no_license", "max_line_length": 90, "num_lines": 52, "path": "/PerfTestTool/src/ClPerfTestTool.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n\r\nimport argparse, queue, threading, traceback\r\nfrom libs.config.manager import manager\r\nfrom libs.aix.vmstat import vmstat\r\nfrom libs.aix.shell import shell\r\nfrom libs.logger.logger import logger\r\n\r\nfrom fabric.network import disconnect_all\r\n\r\nparser = argparse.ArgumentParser(prog='WxPerfTestTool.py', add_help=True)\r\nparser.add_argument('--config', type=str, default='./settings.yml', help='設定ファイルパス')\r\nargs = parser.parse_args()\r\n\r\nsets = manager(args.config)\r\nlog = logger(sets.get('logging', 'file')).get()\r\n\r\n# shellスレッド\r\n#\tkey\t:\tキー\r\ndef shell_thread(key):\r\n\tshell(args.config).execute(key)\r\n\treturn\r\n\r\n\r\n# vmstatスレッド\r\n#\tkey\t:\tキー\r\ndef vmstat_thread(key, child_q):\r\n\tvmstat(args.config, child_q).execute(key)\r\n\treturn\r\n\r\n\r\nif __name__ == '__main__':\r\n\ttry:\r\n\t\tth_vmstat = []\r\n\t\tfor key in sets.get('vmstat_idx', 'keys'):\r\n\t\t\tchild_q = queue.PriorityQueue()\r\n\t\t\tth = threading.Thread(target = vmstat_thread, name = 'vmstat', args = ([key, child_q]))\r\n\t\t\tth.start()\r\n\t\t\tth_vmstat.append({'thread': th, 'queue': child_q})\r\n\t\tth_shell = threading.Thread(target = shell_thread, name = 'shell', args = (['shell']))\r\n\t\tth_shell.start()\r\n\t\tth_shell.join(timeout=None)\r\n\t\tfor th in th_vmstat:\r\n\t\t\tth['queue'].put('terminate order')\r\n\t\t\tth['thread'].join(timeout=None)\r\n\t\tth_vmstat = []\r\n\texcept:\r\n\t\tlog.error(traceback.format_exc())\r\n\tfinally:\r\n\t\tdisconnect_all()\r\n" }, { "alpha_fraction": 0.5707195997238159, "alphanum_fraction": 0.5756824016571045, "avg_line_length": 13.5, "blob_id": "88f6c039025607e65aef0c43ddce0f18cee7d981", "content_id": "4e808bbbc85f37e06d30cae1a6d3d659813c9485", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 493, "license_type": "no_license", "max_line_length": 65, "num_lines": 26, "path": "/PerfTestTool/src/libs/config/manager.py", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "#\r\n# -*- coding: utf-8 -*-\r\n#\r\n#\t【機能】\r\n#\t\t指定されたYAMLファイルを内部展開し、SCTION、KEYを指定して値を受け渡す。\r\n\r\nimport yaml, codecs\r\n\r\nclass manager(object):\r\n\t'''\r\n\tclassdocs\r\n\t'''\r\n\r\n\r\n\tdef __init__(self, path):\r\n\t\t'''\r\n\t\tConstructor\r\n\t\t'''\r\n\t\tself.settings = yaml.safe_load(codecs.open(path, \"r\", \"utf-8\"))\r\n\r\n\r\n\t# 値取得\r\n\t#\tsction\t: セクション値\r\n\t#\tkey\t\t: キー値\r\n\tdef get(self, section, key):\r\n\t\treturn self.settings[section][key]\r\n" }, { "alpha_fraction": 0.5977011322975159, "alphanum_fraction": 0.6781609058380127, "avg_line_length": 42.5, "blob_id": "3864bfed1024bfd719633117e0aa93f6b7c94ae1", "content_id": "780906aa744f2866c7bba85315b666dc0e74e090", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 87, "license_type": "no_license", "max_line_length": 78, "num_lines": 2, "path": "/PerfTestTool/shell/test1.ksh", "repo_name": "crystalbowie/PerfTestTool", "src_encoding": "UTF-8", "text": "/usr/bin/ksh -c \"/apps/claimone/batch/sh5/job_zd1702.sh -param 2 > /dev/null\";\nexit 0;\n" } ]
8
MashaMihalkova/Gender_identifikation
https://github.com/MashaMihalkova/Gender_identifikation
b8264e63e601da85e69f161d8c908487dd858774
b1f4fb61568d71a1cdd5cb7c5c4e695e7a49568c
36377330721a19042502ceac79015897bb00f98f
refs/heads/main
2023-08-30T21:29:53.881106
2021-10-11T15:06:12
2021-10-11T15:06:12
415,968,433
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5798553228378296, "alphanum_fraction": 0.5976627469062805, "avg_line_length": 27.507936477661133, "blob_id": "9e1d62256e33cd0a3b72951a47b64d7f70697df4", "content_id": "13b11a76dddec057e74ad821783512e7d879a0ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1797, "license_type": "no_license", "max_line_length": 120, "num_lines": 63, "path": "/test_gender.py", "repo_name": "MashaMihalkova/Gender_identifikation", "src_encoding": "UTF-8", "text": "import wave\nimport numpy as np\nimport os\nfrom keras.models import load_model\nimport optparse\nimport logging\nfrom logging import error, info\nimport librosa\n\ndef wav2mfcc(path, max_pad_size=11):\n y, sr = librosa.load(path=path, sr=None, mono=1)\n y = y[::3]\n audio_mac = librosa.feature.mfcc(y=y, sr=8000)\n y_shape = audio_mac.shape[1]\n if y_shape < max_pad_size:\n pad_size = max_pad_size - y_shape\n audio_mac = np.pad(audio_mac, ((0, 0), (0, pad_size)), mode='constant')\n else:\n audio_mac = audio_mac[:, :max_pad_size]\n return audio_mac\n\n\n\n\nif __name__ == '__main__':\n parser = optparse.OptionParser()\n\n parser.add_option('-m', '--model', type=str,\n help=\"path to the model\", default=\"/content/drive/MyDrive/iden_gender/my_model1.h5\")\n parser.add_option('-f', '--file', type=str,\n help=\" path to the file for testing\", default=\"/content/drive/MyDrive/iden_gender/inner_test/Sound_22123.wav\")\n\n options, args = parser.parse_args() \n MODEL = getattr(options,'model')\n FILE = getattr(options,'file') \n\n # if not os.path.isfile(MODEL) or not os.stat(MODEL).st_size:\n # error(\"Weights not found. Unable to load the model.\")\n \n # if not os.path.isfile(FILE):\n # error(\"File not found. Unable to load file.\")\n \n try:\n model = load_model(MODEL) \n \n info(f'Loaded model: {MODEL}.')\n\n wavs=[]\n wavs.append(wav2mfcc(FILE,11))\n X=np.array(wavs)\n X= X.reshape(-1, 220)\n \n result=model.predict(X[0:1])[0] # \n print(\"Prediction result\",result)\n \n name = [\"male\",\"female\"]\n ind=0 \n for i in range(len(result)):\n if result[i] > result[ind]:\n ind=1\n print(\"Gender:\",name[ind])\n except Exception as e:\n error(str(e))\n\n" }, { "alpha_fraction": 0.6137930750846863, "alphanum_fraction": 0.7655172348022461, "avg_line_length": 40.35714340209961, "blob_id": "3ea5869c5a472723f31bdec3f5d207bd4b8ff7b0", "content_id": "5b66ff24c58a88edc6db1460dbcd5376927f7b16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 639, "license_type": "no_license", "max_line_length": 111, "num_lines": 14, "path": "/README.md", "repo_name": "MashaMihalkova/Gender_identifikation", "src_encoding": "UTF-8", "text": "# Gender_identifikation\n#### 1. Gender_ident.ipynb - code of data processing and model training.\n#### 2. test_gender.py - script for wav file recognition.\n#### 3. Test - folder with examples for testing. Sound_22123.wav - external example.\n#### 4. my_model1.h5 - example of weights.\n\nПример графика точности распознавания голоса\n\n![image](https://user-images.githubusercontent.com/32015131/136811530-2c899c64-7338-42d3-93ff-327b9d1b6e5d.png)\n\n\nПример вывода скрипта\n\n![image](https://user-images.githubusercontent.com/32015131/136812032-e453f645-c2f2-4d49-825f-2ce9777ea08e.png)\n\n" } ]
2
ivanbgd/Quick3-Sort-Py
https://github.com/ivanbgd/Quick3-Sort-Py
39f57f1298a069fcadfc8e01914c7e4f87af3f4b
68f17e9631066e8bdd90fea0b096fd0d4c2da0a7
8f846c4c98a535363f79a27c14201b0b20dfb769
refs/heads/master
2021-01-19T19:02:12.540472
2017-08-23T13:17:57
2017-08-23T13:17:57
101,182,072
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.38908764719963074, "alphanum_fraction": 0.43291592597961426, "avg_line_length": 21.3799991607666, "blob_id": "bed106137c9ca2e329027551794e202638a8fd1a", "content_id": "b6dc3ac9df283624a1405485fe8e1841b1415c9e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1118, "license_type": "permissive", "max_line_length": 67, "num_lines": 50, "path": "/sorting.py", "repo_name": "ivanbgd/Quick3-Sort-Py", "src_encoding": "UTF-8", "text": "import sys\nimport random\n\ndef partition3(a, l, r):\n x = a[l]\n j, o = l, l\n for i in range(l+1, r+1):\n if a[i] < x:\n o += 1\n a[i], a[o] = a[o], a[i]\n a[j], a[o] = a[o], a[j]\n j += 1\n elif a[i] == x:\n o += 1\n a[i], a[o] = a[o], a[i]\n else:\n continue\n if j > l:\n a[l], a[j-1] = a[j-1], a[l]\n else:\n a[l], a[j] = a[j], a[l]\n return j, o\n\ndef randomized_quick_sort3(a, l, r):\n if l >= r:\n return\n k = random.randint(l, r)\n a[l], a[k] = a[k], a[l]\n m1, m2 = partition3(a, l, r)\n randomized_quick_sort3(a, l, m1 - 1)\n randomized_quick_sort3(a, m2 + 1, r)\n\n\nif __name__ == '__main__':\n #input = sys.stdin.read()\n input = \"5\\n2 3 9 2 2\" # Correct output is: 2 2 2 3 9.\n #input = \"13\\n6 2 3 4 2 6 8 9 2 6 5 6 8\"\n\n input = list(map(int, input.split()))\n n = input[0]\n a = input[1:]\n\n randomized_quick_sort3(a, 0, n - 1)\n #a.sort(); # This is TimSort from Python Standard Library.\n\n for x in a:\n print(x,)\n \n print(a)\n print()" }, { "alpha_fraction": 0.7179487347602844, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 18.5, "blob_id": "1c7c59d53684fdb53ede2c7b16b6d4420b753806", "content_id": "8deeecab92827e0c27fe75c2a9062d2e5f6aeb5e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 39, "license_type": "permissive", "max_line_length": 21, "num_lines": 2, "path": "/README.md", "repo_name": "ivanbgd/Quick3-Sort-Py", "src_encoding": "UTF-8", "text": "# Quick3-Sort-Py\nQuick3 Sort in Python\n" } ]
2
holtwg/hogwartsholt
https://github.com/holtwg/hogwartsholt
7725bd5a79d420bded8e41cfb520150f7fbd57dc
bf24586d270839b580462430d9a771b51d1ced76
1381dc5efae3d6aa7bb78ea8b3b491f3cfaeae29
refs/heads/master
2022-11-20T09:13:19.733055
2020-07-20T09:05:45
2020-07-20T09:05:45
280,993,073
0
0
null
2020-07-20T02:26:04
2020-07-20T02:37:15
2020-07-20T09:05:46
null
[ { "alpha_fraction": 0.4514169991016388, "alphanum_fraction": 0.47975707054138184, "avg_line_length": 23.700000762939453, "blob_id": "5e1002f09c1b4c17bcade8bcb34dd39df96cb373", "content_id": "2535b8db8d94b751b86bb04189f081e0d87ff87a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 502, "license_type": "no_license", "max_line_length": 56, "num_lines": 20, "path": "/testing/test_calc.py", "repo_name": "holtwg/hogwartsholt", "src_encoding": "UTF-8", "text": "# 测试文件\nimport pytest\n\nfrom calculator.calc import Calculator\n\n\nclass Test_calc:\n cal = Calculator()\n\n @pytest.mark.add\n @pytest.mark.parametrize('a, b, result', [(1, 1, 2),\n (2, 3, 5)\n ])\n def test_add(self, a, b, result):\n # cal = Calculator()\n assert result == self.cal.add(a, b)\n\n @pytest.mark.divi\n def test_divi(self):\n assert 1.1 == self.cal.divi(0.11, 0.10)\n" }, { "alpha_fraction": 0.44516128301620483, "alphanum_fraction": 0.44516128301620483, "avg_line_length": 17.600000381469727, "blob_id": "92ae65c65964bc649cedf1f1158293f501de75b8", "content_id": "38d5e1d59ad57eb18bc67dec414489f8c51f309c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 481, "license_type": "no_license", "max_line_length": 48, "num_lines": 25, "path": "/calculator/calc.py", "repo_name": "holtwg/hogwartsholt", "src_encoding": "UTF-8", "text": "from decimal import Decimal\n\n\nclass Calculator:\n # 相加\n def add(self, a, b):\n return a + b\n\n # 相减\n def sub(self, a, b):\n return a - b\n\n # 相乘\n def multi(self, a, b):\n return a * b\n\n # 相除\n def divi(self, a, b):\n if type(a) == float or type(b) == float:\n a = Decimal(str(a))\n b = Decimal(str(b))\n result = float(a / b)\n return result\n else:\n return a / b\n" } ]
2
goodmoo02/js_deeplearing
https://github.com/goodmoo02/js_deeplearing
0453f83a23ff03b3d47f7e629e38ece1ac7f4948
221938565e2de93e23432487ed29a8c0b3985384
f4c4b8ddf37a5554a5dd315ce43ee0330dd7ce96
refs/heads/master
2020-07-12T15:56:55.854251
2019-09-02T05:27:16
2019-09-02T05:27:16
204,856,787
0
1
null
2019-08-28T05:42:52
2019-08-28T07:14:02
2019-08-28T10:10:39
Python
[ { "alpha_fraction": 0.6531996130943298, "alphanum_fraction": 0.6767908334732056, "avg_line_length": 40.88399887084961, "blob_id": "6b3191e627ffc1cd9641f776b7a493195486bbcd", "content_id": "dd075afa22826d588ce9241d72091e0ad7aecca0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10506, "license_type": "no_license", "max_line_length": 114, "num_lines": 250, "path": "/seulgi_4.py", "repo_name": "goodmoo02/js_deeplearing", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'seulgi_4.ui',\n# licensing of 'seulgi_4.ui' applies.\n#\n# Created: Wed Aug 28 11:29:48 2019\n# by: pyside2-uic running on PySide2 5.13.0\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PySide2 import QtCore, QtGui, QtWidgets, QtMultimedia, QtMultimediaWidgets\nfrom PySide2.QtCore import *\nfrom PySide2.QtGui import *\nfrom PySide2.QtWidgets import *\n\nclass Ui_Form(object):\n def setupUi(self, Form):\n Form.setObjectName(\"Form\")\n Form.showMaximized()\n\n self.menu_widget = QtWidgets.QWidget(Form)\n self.menu_widget.setGeometry(QtCore.QRect(10, 10, 131, 641))\n self.menu_widget.setObjectName(\"menu_widget\")\n\n self.menu1_widget = QtWidgets.QWidget(self.menu_widget)\n self.menu1_widget.setGeometry(QtCore.QRect(4, 10, 121, 41))\n self.menu1_widget.setObjectName(\"menu1_widget\")\n\n self.menu_btn = QtWidgets.QPushButton(self.menu1_widget)\n self.menu_btn.setGeometry(QtCore.QRect(20, 10, 75, 23))\n self.menu_btn.setObjectName(\"menu_btn\")\n self.menu_btn.setCheckable(True)\n self.menu_btn.toggle()\n self.menu_btn.clicked.connect(self.menu_btn_clicked)\n\n self.menu2_widget = QtWidgets.QWidget(self.menu_widget)\n self.menu2_widget.setGeometry(QtCore.QRect(3, 70, 121, 111))\n self.menu2_widget.setObjectName(\"menu2_widget\")\n\n self.vid_btn = QtWidgets.QPushButton(self.menu2_widget)\n self.vid_btn.setGeometry(QtCore.QRect(20, 20, 75, 23))\n self.vid_btn.setObjectName(\"vid_btn\")\n\n self.vid_btn.setCheckable(True)\n self.vid_btn.toggle()\n self.vid_btn.clicked.connect(self.vid_btn_clicked)\n\n self.heat_btn = QtWidgets.QPushButton(self.menu2_widget)\n self.heat_btn.setGeometry(QtCore.QRect(20, 50, 75, 23))\n self.heat_btn.setObjectName(\"heat_btn\")\n\n self.heat_btn.setCheckable(True)\n self.heat_btn.toggle()\n self.heat_btn.clicked.connect(self.heat_btn_clicked)\n\n self.gridLayoutWidget = QtWidgets.QWidget(Form)\n self.gridLayoutWidget.setGeometry(QtCore.QRect(150, 10, 761, 641))\n self.gridLayoutWidget.setObjectName(\"gridLayoutWidget\")\n\n self.main_lay = QtWidgets.QGridLayout(self.gridLayoutWidget)\n self.main_lay.setContentsMargins(0, 0, 0, 0)\n self.main_lay.setObjectName(\"main_lay\")\n\n self.vid_lay = QtWidgets.QGridLayout()\n self.vid_lay.setObjectName(\"vid_lay\")\n\n self.vid_widget = QtWidgets.QWidget(self.gridLayoutWidget)\n self.vid_widget.setObjectName(\"vid_widget\")\n\n self.video_widget = QtWidgets.QWidget(self.vid_widget)\n self.video_widget.setGeometry(QtCore.QRect(20, 110, 331, 331))\n self.video_widget.setObjectName(\"video_widget\")\n\n self.verticalLayoutWidget = QtWidgets.QWidget(self.vid_widget)\n self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 10, 371, 80))\n self.verticalLayoutWidget.setObjectName(\"verticalLayoutWidget\")\n\n self.videoMenu_VLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)\n self.videoMenu_VLayout.setContentsMargins(0, 0, 0, 0)\n self.videoMenu_VLayout.setObjectName(\"videoMenu_VLayout\")\n\n self.dateMenu_HLayout = QtWidgets.QHBoxLayout()\n self.dateMenu_HLayout.setObjectName(\"horizontalLayout\")\n\n self.dateLabel = QtWidgets.QLabel(self.verticalLayoutWidget)\n self.dateLabel.setObjectName(\"dateLabel\")\n self.dateMenu_HLayout.addWidget(self.dateLabel)\n\n self.dateEdit = QtWidgets.QDateEdit(self.verticalLayoutWidget)\n self.dateEdit.setObjectName(\"dateEdit\")\n self.dateMenu_HLayout.addWidget(self.dateEdit)\n\n self.videoMenu_VLayout.addLayout(self.dateMenu_HLayout)\n\n self.timeLabel_HLayout = QtWidgets.QHBoxLayout()\n self.timeLabel_HLayout.setObjectName(\"horizontalLayout_2\")\n\n self.timeLabel = QtWidgets.QLabel(self.verticalLayoutWidget)\n self.timeLabel.setObjectName(\"label_2\")\n self.timeLabel_HLayout.addWidget(self.timeLabel)\n\n self.timeEdit_1 = QtWidgets.QTimeEdit(self.verticalLayoutWidget)\n self.timeEdit_1.setObjectName(\"timeEdit\")\n self.timeLabel_HLayout.addWidget(self.timeEdit_1)\n\n self.timeEdit_2 = QtWidgets.QTimeEdit(self.verticalLayoutWidget)\n self.timeEdit_2.setObjectName(\"timeEdit_2\")\n self.timeLabel_HLayout.addWidget(self.timeEdit_2)\n\n self.videoMenu_VLayout.addLayout(self.timeLabel_HLayout)\n\n self.total_videoBtn_HLayout = QtWidgets.QWidget(self.vid_widget)\n self.total_videoBtn_HLayout.setGeometry(QtCore.QRect(70, 460, 231, 80))\n self.total_videoBtn_HLayout.setObjectName(\"videoBtn_HLayout\")\n\n self.videoBtn_HLayout = QtWidgets.QHBoxLayout(self.total_videoBtn_HLayout)\n self.videoBtn_HLayout.setContentsMargins(0, 0, 0, 0)\n self.videoBtn_HLayout.setObjectName(\"horizontalLayout_3\")\n\n self.playBtn = QtWidgets.QPushButton(self.total_videoBtn_HLayout)\n self.playBtn.setObjectName(\"playBtn\")\n self.playBtn.clicked.connect(self.play_btn_clicked)\n self.videoBtn_HLayout.addWidget(self.playBtn)\n\n spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)\n self.videoBtn_HLayout.addItem(spacerItem)\n\n self.stopBtn = QtWidgets.QPushButton(self.total_videoBtn_HLayout)\n self.stopBtn.setObjectName(\"pushButton_2\")\n self.stopBtn.clicked.connect(self.stop_btn_clicked)\n self.videoBtn_HLayout.addWidget(self.stopBtn)\n\n self.vid_lay.addWidget(self.vid_widget, 0, 1, 1, 1)\n self.main_lay.addLayout(self.vid_lay, 0, 0, 1, 1)\n\n self.heat_lay = QtWidgets.QGridLayout()\n self.heat_lay.setObjectName(\"heat_lay\")\n\n self.heat_widget = QtWidgets.QWidget(self.gridLayoutWidget)\n self.heat_widget.setObjectName(\"heat_widget\")\n\n self.heat_label = QtWidgets.QLabel(self.heat_widget)\n self.heat_label.setGeometry(QtCore.QRect(10, 10, 56, 12))\n self.heat_label.setObjectName(\"heat_label\")\n\n self.time_check = QtWidgets.QCheckBox(self.heat_widget)\n self.time_check.setGeometry(QtCore.QRect(10, 33, 81, 16))\n self.time_check.setObjectName(\"time_check\")\n\n self.start_time = QtWidgets.QTimeEdit(self.heat_widget)\n self.start_time.setGeometry(QtCore.QRect(100, 30, 118, 22))\n self.start_time.setObjectName(\"start_time\")\n\n self.end_time = QtWidgets.QTimeEdit(self.heat_widget)\n self.end_time.setGeometry(QtCore.QRect(230, 30, 118, 22))\n self.end_time.setObjectName(\"end_time\")\n\n self.img_label = QtWidgets.QLabel(self.heat_widget)\n self.img_label.setGeometry(QtCore.QRect(10, 70, 56, 12))\n self.img_label.setText(\"\")\n self.img_label.setObjectName(\"img_label\")\n\n self.heat_lay.addWidget(self.heat_widget, 0, 0, 1, 1)\n self.main_lay.addLayout(self.heat_lay, 0, 1, 1, 1)\n\n self.retranslateUi(Form)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n self.menu_mainPage()\n\n # video player widget\n self.vWidget = QtMultimediaWidgets.QVideoWidget(self.video_widget)\n self.vWidget.resize(self.video_widget.size())\n\n # media area\n self.player = QtMultimedia.QMediaPlayer(self.video_widget)\n self.vid_name = \"C:/Users/bit/Downloads/out (2).mp4\"\n self.player.setMedia(QUrl.fromLocalFile(self.vid_name))\n self.player.setVideoOutput(self.vWidget)\n\n self.img_label = QtWidgets.QLabel(self.heat_widget)\n self.img_label.setGeometry(QtCore.QRect(10, 80, 640, 360))\n self.img_label.setObjectName(\"img_label\")\n # self.img_label.setBackgroundRole(QtGui.QPalette.Base)\n # self.img_label.setSizePolicy(QtWidgets.QSizePolicy.Ignored,\n # QtWidgets.QSizePolicy.Ignored)\n self.img_label.setScaledContents(True)\n\n self.file_name = \"C:/Users/bit/Anaconda3/envs/qt/qt/cat.jpg\"\n\n image = QtGui.QImage(self.file_name)\n if image.isNull():\n QtGui.QMessageBox.information(self, \"Image Viewer\",\n \"Cannot load %s.\" % self.file_name)\n self.img_label.setPixmap(QtGui.QPixmap.fromImage(image))\n\n def retranslateUi(self, Form):\n Form.setWindowTitle(QtWidgets.QApplication.translate(\"Form\", \"Form\", None, -1))\n self.menu_btn.setText(QtWidgets.QApplication.translate(\"Form\", \"메뉴\", None, -1))\n self.vid_btn.setText(QtWidgets.QApplication.translate(\"Form\", \"영상\", None, -1))\n self.heat_btn.setText(QtWidgets.QApplication.translate(\"Form\", \"히트맵\", None, -1))\n self.dateLabel.setText(QtWidgets.QApplication.translate(\"Form\", \"Date\", None, -1))\n self.timeLabel.setText(QtWidgets.QApplication.translate(\"Form\", \"Time\", None, -1))\n self.playBtn.setText(QtWidgets.QApplication.translate(\"Form\", \"재생\", None, -1))\n self.stopBtn.setText(QtWidgets.QApplication.translate(\"Form\", \"정지\", None, -1))\n self.heat_label.setText(QtWidgets.QApplication.translate(\"Form\", \"히트맵\", None, -1))\n self.time_check.setText(QtWidgets.QApplication.translate(\"Form\", \"적용시간\", None, -1))\n\n def menu_btn_clicked(self):\n if self.menu_btn.isChecked():\n self.menu2_widget.setVisible(True)\n else:\n self.menu2_widget.setVisible(False)\n\n def vid_btn_clicked(self):\n self.vid_widget.setVisible(True)\n self.heat_widget.setVisible(False)\n\n def heat_btn_clicked(self):\n self.heat_widget.setVisible(True)\n self.vid_widget.setVisible(False)\n\n def menu_mainPage(self):\n self.mainPage = QtWidgets.QWidget(self.gridLayoutWidget)\n self.mainPage.setObjectName(\"mainPage\")\n\n self.mainPage_lay = QtWidgets.QGridLayout()\n self.mainPage_lay.setObjectName(\"mainPage_lay\")\n\n self.mainPage_lay.addWidget(self.mainPage, 0, 0, 1, 1)\n self.main_lay.addLayout(self.mainPage_lay, 0, 1, 1, 1)\n\n self.mainPage.setVisible(True)\n self.vid_widget.setVisible(False)\n self.heat_widget.setVisible(False)\n\n def play_btn_clicked(self):\n self.player.play()\n\n def stop_btn_clicked(self):\n self.player.stop()\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n Form = QtWidgets.QWidget()\n ui = Ui_Form()\n ui.setupUi(Form)\n Form.show()\n sys.exit(app.exec_())" }, { "alpha_fraction": 0.3020833432674408, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 4.388888835906982, "blob_id": "3846abfddaa08b9c41058da67b2bafce4564deaa", "content_id": "5aac7a9db17bcaf37ffc11bdb3459c37785d78b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 96, "license_type": "no_license", "max_line_length": 18, "num_lines": 18, "path": "/README.md", "repo_name": "goodmoo02/js_deeplearing", "src_encoding": "UTF-8", "text": "hello\n\n\n\ntest1\n\ntest2\n\ntest3\n\n<<<<<<< HEAD\ntest4\n=======\n111111111111111111\n\n222222222222\n\ntest4" } ]
2
paul90hn/applied-text-mining-in-python
https://github.com/paul90hn/applied-text-mining-in-python
f2c059152786fe23fa7be33a9111014ab3dcb458
815d63dd642b556c03679ee7ae19c5da9caddf13
db71d3a66df612c3309ee8db0276d805bc873780
refs/heads/master
2020-12-29T12:38:18.171478
2020-02-06T04:50:43
2020-02-06T04:50:43
238,610,235
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5452598929405212, "alphanum_fraction": 0.583204448223114, "avg_line_length": 37.37899398803711, "blob_id": "16b460a6abff3094c8300445977d7b88583635ba", "content_id": "7eaad95e5872c03bfae5a830c6518e03600648fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8407, "license_type": "no_license", "max_line_length": 287, "num_lines": 219, "path": "/Assignment+1.py", "repo_name": "paul90hn/applied-text-mining-in-python", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# ---\n# \n# _You are currently looking at **version 1.1** of this notebook. To download notebooks and datafiles, as well as get help on Jupyter notebooks in the Coursera platform, visit the [Jupyter Notebook FAQ](https://www.coursera.org/learn/python-text-mining/resources/d9pwm) course resource._\n# \n# ---\n\n# # Assignment 1\n# \n# In this assignment, you'll be working with messy medical data and using regex to extract relevant infromation from the data. \n# \n# Each line of the `dates.txt` file corresponds to a medical note. Each note has a date that needs to be extracted, but each date is encoded in one of many formats.\n# \n# The goal of this assignment is to correctly identify all of the different date variants encoded in this dataset and to properly normalize and sort the dates. \n# \n# Here is a list of some of the variants you might encounter in this dataset:\n# * 04/20/2009; 04/20/09; 4/20/09; 4/3/09\n# * Mar-20-2009; Mar 20, 2009; March 20, 2009; Mar. 20, 2009; Mar 20 2009;\n# * 20 Mar 2009; 20 March 2009; 20 Mar. 2009; 20 March, 2009\n# * Mar 20th, 2009; Mar 21st, 2009; Mar 22nd, 2009\n# * Feb 2009; Sep 2009; Oct 2010\n# * 6/2008; 12/2009\n# * 2009; 2010\n# \n# Once you have extracted these date patterns from the text, the next step is to sort them in ascending chronological order accoring to the following rules:\n# * Assume all dates in xx/xx/xx format are mm/dd/yy\n# * Assume all dates where year is encoded in only two digits are years from the 1900's (e.g. 1/5/89 is January 5th, 1989)\n# * If the day is missing (e.g. 9/2009), assume it is the first day of the month (e.g. September 1, 2009).\n# * If the month is missing (e.g. 2010), assume it is the first of January of that year (e.g. January 1, 2010).\n# * Watch out for potential typos as this is a raw, real-life derived dataset.\n# \n# With these rules in mind, find the correct date in each note and return a pandas Series in chronological order of the original Series' indices.\n# \n# For example if the original series was this:\n# \n# 0 1999\n# 1 2010\n# 2 1978\n# 3 2015\n# 4 1985\n# \n# Your function should return this:\n# \n# 0 2\n# 1 4\n# 2 0\n# 3 1\n# 4 3\n# \n# Your score will be calculated using [Kendall's tau](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient), a correlation measure for ordinal data.\n# \n# *This function should return a Series of length 500 and dtype int.*\n\n# In[1]:\n\n\nimport pandas as pd\nimport numpy as np\n\ndoc = []\nwith open('dates.txt') as file:\n for line in file:\n doc.append(line)\n\ndf0 = pd.Series(doc)\n\n\n# In[78]:\n\n\ndef date_sorter():\n \n# Your code here\n df = df0\n #extract year\n df = df.str.lower()\n df = df.str.replace(',', '')\n df = df.str.replace('.', '')\n df = df.str.replace('st', '')\n df = df.str.replace('nd', '')\n df = df.str.replace('rd', '')\n df = df.str.replace('th', '')\n\n month_dict = {'jan': '01',\n 'feb': '02',\n 'mar': '03',\n 'apr': '04',\n 'may': '05',\n 'jun': '06',\n 'jul': '07',\n 'aug': '08',\n 'sep': '09',\n 'oct': '10',\n 'nov': '11',\n 'dec': '12'}\n\n month2_dict = {'january': '01',\n 'february': '02',\n 'march': '03',\n 'april': '04',\n 'may': '05',\n 'june': '06',\n 'july': '07',\n 'august': '08',\n 'september': '09',\n 'october': '10',\n 'november': '11',\n 'december': '12'}\n\n def clean_series(series):\n return series[series.notnull()]\n\n def get_next_dates(previous_dfs):\n clean_indeces = []\n for dfs in previous_dfs:\n #print(dfs.index)\n new_indeces = list(dfs.index)\n clean_indeces.extend(new_indeces)\n #print(clean_indeces)\n all_indices = df.index\n next_indeces = np.delete(all_indices, clean_indeces)\n next_dates = df[next_indeces]\n return next_dates\n\n\n def copy_index(df):\n indeces = df.index\n values = df.values\n df = pd.DataFrame()\n df['indeces'] = indeces\n df['date'] = values\n return df\n\n def replace_month(df):\n for month, code in month_dict.items():\n x = df['month'].str.find(month) #indexes where the substring is present\n df['month'][x>-1] = df['month'].replace(to_replace=r'\\w+\\W?', value=code, regex=True) #replace current month in cells with match\n\n df['month'] = df['month'].replace(to_replace= r'[A-Z]*[a-z]+', value=np.nan, regex=True)\n if 'day' in df.columns:\n df.loc[df['month'].isnull(), 'day'] = '01'\n df.loc[df['month'].isnull(), 'month'] = '01'\n return df\n\n def split_dates(series):\n dates = series.str.split('/', expand=True)\n dates.columns = ['month', 'day', 'year']\n dates['month'] = dates['month'].astype(int)\n dates['day'] = dates['day'].astype(int)\n dates['year'] = ['19'+str(i) if len(i)==2 else i for i in dates['year'] ]\n return dates\n\n date_dd_mm_yy = df.str.extract(r'(\\d{1,2}[/-]\\d{1,2}[/-]\\d+)')\n date_dd_mm_yy = date_dd_mm_yy.str.replace('-', '/')\n date_dd_mm_yy = clean_series(date_dd_mm_yy)\n date_dd_mm_yy = split_dates(date_dd_mm_yy)\n next_dates = get_next_dates([date_dd_mm_yy])\n\n # #extract dd month year\n dates_dd_mmm_yyyy = next_dates.str.extract(r'(\\d{1,2}\\s[a-z]{3,10}\\s[1,2]\\d{3})')\n dates_dd_mmm_yyyy = clean_series(dates_dd_mmm_yyyy)\n #split \n dates_dd_mmm_yyyy = dates_dd_mmm_yyyy.str.split(' ', expand=True)\n dates_dd_mmm_yyyy.columns = ['day', 'month', 'year']\n dates_dd_mmm_yyyy = replace_month(dates_dd_mmm_yyyy)\n dates_dd_mmm_yyyy = dates_dd_mmm_yyyy['month'].astype(str) + '/' +dates_dd_mmm_yyyy['day'].astype(str) + '/' + dates_dd_mmm_yyyy['year'].astype(str)\n dates_dd_mmm_yyyy = split_dates(dates_dd_mmm_yyyy)\n next_dates = get_next_dates([date_dd_mm_yy, dates_dd_mmm_yyyy])\n\n #######\n\n # #extract Month date yeat\n month_day_year = next_dates.str.extract(r'([a-z]{3,10}\\W?\\s?\\d{2}\\W?\\s?[1,2]\\d{3})') \n month_day_year = clean_series(month_day_year)\n month_day_year = month_day_year.str.extract(r'([a-z]{3,10})\\W?\\s?(\\d{2})\\W?\\s?([1,2]\\d{3})') \n month_day_year.columns = ['month', 'day', 'year']\n month_day_year = replace_month(month_day_year)\n month_day_year = month_day_year['month'].astype(str) + '/' + month_day_year['day'].astype(str) + '/' + month_day_year['year'].astype(str)\n month_day_year = split_dates(month_day_year)\n next_dates = get_next_dates([date_dd_mm_yy, dates_dd_mmm_yyyy,month_day_year])\n\n\n\n # #extract Month yeat\n month_year = next_dates.str.extract(r'([a-z]{3,10}\\W?\\s?[1,2]\\d{3})')\n month_year = clean_series(month_year)\n month_year = month_year.str.split(' ', expand=True)\n month_year.columns = ['month', 'year']\n month_year = replace_month(month_year)\n month_year = month_year['month'].astype(str) + '/01/' + month_year['year'].astype(str)\n month_year = split_dates(month_year)\n next_dates = get_next_dates([date_dd_mm_yy, dates_dd_mmm_yyyy,month_day_year,month_year])\n\n\n mm_year = next_dates.str.extract(r'([0-9]{1,2}/[1,2]\\d{3})')\n mm_year = clean_series(mm_year)\n mm_year = mm_year.str.extract(r'([0-9]{1,2})/([1,2]\\d{3})', expand=True)\n mm_year.columns = ['month', 'year']\n mm_year = mm_year['month'].astype(str) + '/' + '01' + '/' + mm_year['year'].astype(str)\n mm_year = split_dates(mm_year)\n next_dates = get_next_dates([date_dd_mm_yy, dates_dd_mmm_yyyy,month_day_year,month_year,mm_year])\n\n yyyy = next_dates.str.extract(r'\\s?\\w*\\W*(\\d{4})\\w*\\W*\\s?', expand=False)\n yyyy = '01/01/' + yyyy.astype(str)\n yyyy = split_dates(yyyy)\n\n final_df = pd.concat([date_dd_mm_yy, dates_dd_mmm_yyyy,month_day_year,month_year,mm_year, yyyy])\n\n length = 0\n for i in [date_dd_mm_yy, dates_dd_mmm_yyyy,month_day_year,month_year,mm_year, yyyy]:\n length += len(i)\n length\n\n final_df.reset_index(inplace=True)\n final_df.sort_values(by=['year', 'month', 'day'], inplace=True)\n result = final_df['index']\n result.reset_index(inplace=True, drop=True)\n return result\n\n" }, { "alpha_fraction": 0.6752634644508362, "alphanum_fraction": 0.6930116415023804, "avg_line_length": 29.548728942871094, "blob_id": "1486b7873ee719cbd39ccbcff77b4c9d86cb9fa1", "content_id": "4a6bfb4b430635e94f65ee352b7c25b53e2e6fab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14424, "license_type": "no_license", "max_line_length": 287, "num_lines": 472, "path": "/Assignment+3.py", "repo_name": "paul90hn/applied-text-mining-in-python", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# ---\n# \n# _You are currently looking at **version 1.1** of this notebook. To download notebooks and datafiles, as well as get help on Jupyter notebooks in the Coursera platform, visit the [Jupyter Notebook FAQ](https://www.coursera.org/learn/python-text-mining/resources/d9pwm) course resource._\n# \n# ---\n\n# # Assignment 3\n# \n# In this assignment you will explore text message data and create models to predict if a message is spam or not. \n\n# In[187]:\n\n\nimport pandas as pd\nimport numpy as np\n\nspam_data = pd.read_csv('spam.csv')\n\nspam_data['target'] = np.where(spam_data['target']=='spam',1,0)\nspam_data.head(10)\n\n\n# In[188]:\n\n\nfrom sklearn.model_selection import train_test_split\n\n\nX_train, X_test, y_train, y_test = train_test_split(spam_data['text'], \n spam_data['target'], \n random_state=0)\n\n\n# ### Question 1\n# What percentage of the documents in `spam_data` are spam?\n# \n# *This function should return a float, the percent value (i.e. $ratio * 100$).*\n\n# In[189]:\n\n\ndef answer_one():\n is_spam = np.sum(spam_data['target']==1)\n total = spam_data.shape[0]\n result = is_spam/total\n result = result*100\n \n return result #Your answer here\n\n\n\n# In[190]:\n\n\nanswer_one()\n\n\n# ### Question 2\n# \n# Fit the training data `X_train` using a Count Vectorizer with default parameters.\n# \n# What is the longest token in the vocabulary?\n# \n# *This function should return a string.*\n\n# In[191]:\n\n\nfrom sklearn.feature_extraction.text import CountVectorizer\n\ndef answer_two():\n model = CountVectorizer().fit(X_train)\n tokens = model.get_feature_names()\n max_lenght = max([len(token) for token in tokens])\n max_token = [token for token in tokens if len(token)== max_lenght][0]\n return max_token#Your answer here\n\n\n# In[192]:\n\n\nanswer_two()\n\n\n# ### Question 3\n# \n# Fit and transform the training data `X_train` using a Count Vectorizer with default parameters.\n# \n# Next, fit a fit a multinomial Naive Bayes classifier model with smoothing `alpha=0.1`. Find the area under the curve (AUC) score using the transformed test data.\n# \n# *This function should return the AUC score as a float.*\n\n# In[193]:\n\n\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.metrics import roc_auc_score\n\ndef answer_three():\n count_vectorizer = CountVectorizer().fit(X_train)\n transformed_train = count_vectorizer.transform(X_train)\n transformed_test = count_vectorizer.transform(X_test)\n\n model = MultinomialNB(alpha=0.1).fit(transformed_train, y_train)\n prediction = model.predict(transformed_test)\n result = roc_auc_score(y_test, prediction)\n return result #Your answer here\n\n\n# In[194]:\n\n\nanswer_three()\n\n\n# ### Question 4\n# \n# Fit and transform the training data `X_train` using a Tfidf Vectorizer with default parameters.\n# \n# What 20 features have the smallest tf-idf and what 20 have the largest tf-idf?\n# \n# Put these features in a two series where each series is sorted by tf-idf value and then alphabetically by feature name. The index of the series should be the feature name, and the data should be the tf-idf.\n# \n# The series of 20 features with smallest tf-idfs should be sorted smallest tfidf first, the list of 20 features with largest tf-idfs should be sorted largest first. \n# \n# *This function should return a tuple of two series\n# `(smallest tf-idfs series, largest tf-idfs series)`.*\n\n# In[195]:\n\n\nfrom sklearn.feature_extraction.text import TfidfVectorizer\n\ndef answer_four():\n vectorizer = TfidfVectorizer().fit(X_train)\n transformed_data = vectorizer.transform(X_train)\n feature_names = np.array(vectorizer.get_feature_names())\n\n max_tf_idfs = transformed_data.max(0).toarray()[0]\n sorted_indices = max_tf_idfs.argsort() #sort indices from smallest to largest\n \n bot_20_features = feature_names[sorted_indices[:20]]\n bot_20_scores = max_tf_idfs[sorted_indices[:20]]\n bot_20 = pd.Series(bot_20_scores, index=bot_20_features)\n\n top_20_features = feature_names[sorted_indices[:-21: -1]]\n top_20_values = max_tf_idfs[sorted_indices[:-21: -1]]\n top_20 = pd.Series(top_20_values, index=top_20_features)\n\n\n return (bot_20, top_20) #Your answer here\n\n\n# In[196]:\n\n\nanswer_four()\n\n\n# ### Question 5\n# \n# Fit and transform the training data `X_train` using a Tfidf Vectorizer ignoring terms that have a document frequency strictly lower than **3**.\n# \n# Then fit a multinomial Naive Bayes classifier model with smoothing `alpha=0.1` and compute the area under the curve (AUC) score using the transformed test data.\n# \n# *This function should return the AUC score as a float.*\n\n# In[197]:\n\n\ndef answer_five():\n vectorizer = TfidfVectorizer(min_df=3).fit(X_train)\n transformed_train = vectorizer.transform(X_train)\n transformed_test = vectorizer.transform(X_test)\n\n model = MultinomialNB(alpha=0.1).fit(transformed_train, y_train)\n prediction = model.predict(transformed_test)\n result = roc_auc_score(y_test, prediction)\n \n return result #Your answer here\n\n\n# In[198]:\n\n\nanswer_five()\n\n\n# ### Question 6\n# \n# What is the average length of documents (number of characters) for not spam and spam documents?\n# \n# *This function should return a tuple (average length not spam, average length spam).*\n\n# In[199]:\n\n\ndef answer_six():\n spam = spam_data[spam_data['target']==1]\n not_spam = spam_data[spam_data['target']==0]\n\n spam_lenght = [len(text) for text in spam['text']]\n total_spam_lenght = np.mean(spam_lenght)\n not_spam_lenght = [len(text) for text in not_spam['text']]\n total_not_spam_lenght = np.mean(not_spam_lenght)\n return (total_not_spam_lenght, total_spam_lenght) #Your answer here\n\n\n# In[200]:\n\n\nanswer_six()\n\n\n# <br>\n# <br>\n# The following function has been provided to help you combine new features into the training data:\n\n# In[201]:\n\n\ndef add_feature(X, feature_to_add):\n \"\"\"\n Returns sparse feature matrix with added feature.\n feature_to_add can also be a list of features.\n \"\"\"\n from scipy.sparse import csr_matrix, hstack\n return hstack([X, csr_matrix(feature_to_add).T], 'csr')\n\n\n# ### Question 7\n# \n# Fit and transform the training data X_train using a Tfidf Vectorizer ignoring terms that have a document frequency strictly lower than **5**.\n# \n# Using this document-term matrix and an additional feature, **the length of document (number of characters)**, fit a Support Vector Classification model with regularization `C=10000`. Then compute the area under the curve (AUC) score using the transformed test data.\n# \n# *This function should return the AUC score as a float.*\n\n# In[202]:\n\n\nfrom sklearn.svm import SVC\n\ndef answer_seven():\n vectorizer = TfidfVectorizer(min_df=5).fit(X_train)\n transformed_train = vectorizer.transform(X_train)\n transformed_test = vectorizer.transform(X_test)\n\n length_train = X_train.str.len() #[len(text) for text in X_train]\n length_test = X_test.str.len()\n\n transformed_train = add_feature(transformed_train, length_train)\n transformed_test= add_feature(transformed_test, length_test)\n\n model = SVC(C=10000).fit(transformed_train, y_train)\n prediction = model.predict(transformed_test)\n result = roc_auc_score(y_test, prediction)\n\n return result#Your answer here\n\n\n# In[203]:\n\n\nanswer_seven()\n\n\n# ### Question 8\n# \n# What is the average number of digits per document for not spam and spam documents?\n# \n# *This function should return a tuple (average # digits not spam, average # digits spam).*\n\n# In[204]:\n\n\n\ndef answer_eight():\n import re\n spam = spam_data[spam_data['target']==1]\n not_spam = spam_data[spam_data['target']==0]\n\n spam['count'] = [len(re.findall(r'\\d', string)) for string in spam['text']]\n not_spam['count'] = [len(re.findall(r'\\d', string)) for string in not_spam['text']]\n\n spam_average = np.mean(spam['count'])\n not_spam_average = np.mean(not_spam['count'])\n \n return (not_spam_average, spam_average)#Your answer here\n\n\n# In[205]:\n\n\nanswer_eight()\n\n\n# ### Question 9\n# \n# Fit and transform the training data `X_train` using a Tfidf Vectorizer ignoring terms that have a document frequency strictly lower than **5** and using **word n-grams from n=1 to n=3** (unigrams, bigrams, and trigrams).\n# \n# Using this document-term matrix and the following additional features:\n# * the length of document (number of characters)\n# * **number of digits per document**\n# \n# fit a Logistic Regression model with regularization `C=100`. Then compute the area under the curve (AUC) score using the transformed test data.\n# \n# *This function should return the AUC score as a float.*\n\n# In[206]:\n\n\nfrom sklearn.linear_model import LogisticRegression\n\ndef answer_nine():\n import re\n vectorizer = TfidfVectorizer(min_df=5, ngram_range=(1,3)).fit(X_train)\n transformed_train = vectorizer.transform(X_train)\n transformed_test = vectorizer.transform(X_test)\n\n\n X_train_n_digist = [len(re.findall(r'\\d', string)) for string in X_train] #digists per document\n X_test_n_digist = [len(re.findall(r'\\d', string)) for string in X_test]\n\n train_len = [len(text) for text in X_train]\n test_len = [len(text) for text in X_test]\n\n transformed_train = add_feature(transformed_train, X_train_n_digist)\n transformed_train = add_feature(transformed_train, train_len)\n\n transformed_test = add_feature(transformed_test, X_test_n_digist)\n transformed_test = add_feature(transformed_test, test_len)\n\n model = LogisticRegression(C=100).fit(transformed_train, y_train)\n prediction = model.predict(transformed_test)\n result = roc_auc_score(y_test, prediction)\n return float(result) #Your answer here\n\n\n# In[207]:\n\n\nanswer_nine()\n\n\n# ### Question 10\n# \n# What is the average number of non-word characters (anything other than a letter, digit or underscore) per document for not spam and spam documents?\n# \n# *Hint: Use `\\w` and `\\W` character classes*\n# \n# *This function should return a tuple (average # non-word characters not spam, average # non-word characters spam).*\n\n# In[208]:\n\n\ndef answer_ten():\n import re\n spam = spam_data[spam_data['target']==1]\n not_spam = spam_data[spam_data['target']==0]\n\n spam['count'] = [len(re.findall('\\W', string)) for string in spam['text']]\n not_spam['count'] = [len(re.findall('\\W',string)) for string in not_spam['text']]\n\n spam_average = np.mean(spam['count'])\n not_spam_average = np.mean(not_spam['count'])\n \n return (not_spam_average, spam_average)#Your answer here\n\n\n# In[209]:\n\n\nanswer_ten()\n\n\n# ### Question 11\n# \n# Fit and transform the training data X_train using a Count Vectorizer ignoring terms that have a document frequency strictly lower than **5** and using **character n-grams from n=2 to n=5.**\n# \n# To tell Count Vectorizer to use character n-grams pass in `analyzer='char_wb'` which creates character n-grams only from text inside word boundaries. This should make the model more robust to spelling mistakes.\n# \n# Using this document-term matrix and the following additional features:\n# * the length of document (number of characters)\n# * number of digits per document\n# * **number of non-word characters (anything other than a letter, digit or underscore.)**\n# \n# fit a Logistic Regression model with regularization C=100. Then compute the area under the curve (AUC) score using the transformed test data.\n# \n# Also **find the 10 smallest and 10 largest coefficients from the model** and return them along with the AUC score in a tuple.\n# \n# The list of 10 smallest coefficients should be sorted smallest first, the list of 10 largest coefficients should be sorted largest first.\n# \n# The three features that were added to the document term matrix should have the following names should they appear in the list of coefficients:\n# ['length_of_doc', 'digit_count', 'non_word_char_count']\n# \n# *This function should return a tuple `(AUC score as a float, smallest coefs list, largest coefs list)`.*\n\n# In[263]:\n\n\ndef answer_eleven():\n import re\n count_vectorizer = CountVectorizer(min_df=5, analyzer='char_wb', ngram_range=(2,5)).fit(X_train)\n transformed_train = count_vectorizer.transform(X_train)\n transformed_test = count_vectorizer.transform(X_test)\n\n train_len = [len(text) for text in X_train]\n test_len = [len(text) for text in X_test]\n\n train_digits = [len(re.findall(r'\\d', string)) for string in X_train]\n test_digits = [len(re.findall(r'\\d', string)) for string in X_test]\n\n train_no_words = [len(re.findall('\\W', string)) for string in X_train]\n test_no_words = [len(re.findall('\\W', string)) for string in X_test]\n\n transformed_train = add_feature(transformed_train, train_len)\n transformed_train = add_feature(transformed_train, train_digits)\n transformed_train = add_feature(transformed_train, train_no_words)\n\n transformed_test = add_feature(transformed_test, test_len)\n transformed_test = add_feature(transformed_test, test_digits)\n transformed_test = add_feature(transformed_test, test_no_words)\n\n model = LogisticRegression(C=100).fit(transformed_train, y_train)\n prediction = model.predict(transformed_test)\n auc_score = roc_auc_score(y_test, prediction)\n\n\n features = np.array(count_vectorizer.get_feature_names()) \n features = np.append(features, ['length_of_doc', 'digit_count', 'non_word_char_count'])\n coef = model.coef_[0]\n sorted_indices = coef.argsort() #sort indices from smallest to largest\n\n# bot_10_features = features[sorted_indices[:10]]\n# bot_10_coef = coef[sorted_indices[:10]]\n# bot_10 = pd.Series(bot_10_coef, index=bot_10_features)\n\n# top_10_features = features[indices_order[:-11: -1]]\n# top_10_coef = coef[sorted_indices[:-11: -1]]\n# top_10 = pd.Series(top_10_coef, index=top_10_features)\n\n df = pd.DataFrame()\n df['features'] = features\n df['coef'] = coef\n df.sort_values(by=['coef'], axis=0, ascending=True, inplace=True)\n\n bot_df =df.head(10)\n index = bot_df['features'].values\n values = bot_df['coef'].values\n bot_10= pd.Series(values, index=index)\n\n top_df = df.tail(10)\n top_df.sort_values(by=['coef'], axis=0, inplace=True, ascending=False)\n index = top_df['features'].values\n values = top_df['coef'].values\n top_10 = pd.Series(values, index=index)\n\n\n return (auc_score, bot_10, top_10) #Your answer here\n\n\n\n# In[264]:\n\n\nanswer_eleven()\n\n\n# In[ ]:\n\n\n\n\n" }, { "alpha_fraction": 0.6568742394447327, "alphanum_fraction": 0.6713669896125793, "avg_line_length": 26.799564361572266, "blob_id": "dd8e3629e775afbfff283efc4651ce6d96c5eeed", "content_id": "0902b01a016a9cbcc81c19888785ea1858f39463", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12765, "license_type": "no_license", "max_line_length": 287, "num_lines": 459, "path": "/Assignment+2.py", "repo_name": "paul90hn/applied-text-mining-in-python", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# ---\n# \n# _You are currently looking at **version 1.0** of this notebook. To download notebooks and datafiles, as well as get help on Jupyter notebooks in the Coursera platform, visit the [Jupyter Notebook FAQ](https://www.coursera.org/learn/python-text-mining/resources/d9pwm) course resource._\n# \n# ---\n\n# # Assignment 2 - Introduction to NLTK\n# \n# In part 1 of this assignment you will use nltk to explore the Herman Melville novel Moby Dick. Then in part 2 you will create a spelling recommender function that uses nltk to find words similar to the misspelling. \n\n# ## Part 1 - Analyzing Moby Dick\n\n# In[1]:\n\n\nimport nltk\nnltk.download('punkt')\nnltk.download('wordnet')\nnltk.download('book')\nfrom nltk.book import *\n\nimport pandas as pd\nimport numpy as np\n\n# If you would like to work with the raw text you can use 'moby_raw'\nwith open('moby.txt', 'r') as f:\n moby_raw = f.read()\n \n# If you would like to work with the novel in nltk.Text format you can use 'text1'\nmoby_tokens = nltk.word_tokenize(moby_raw)\ntext1 = nltk.Text(moby_tokens)\n\n\n# In[9]:\n\n\n\n\n\n# ### Example 1\n# \n# How many tokens (words and punctuation symbols) are in text1?\n# \n# *This function should return an integer.*\n\n# In[12]:\n\n\ndef example_one():\n \n return len(nltk.word_tokenize(moby_raw)) # or alternatively len(text1)\n\nexample_one()\n\n\n# ### Example 2\n# \n# How many unique tokens (unique words and punctuation) does text1 have?\n# \n# *This function should return an integer.*\n\n# In[14]:\n\n\ndef example_two():\n \n return len(set(nltk.word_tokenize(moby_raw))) # or alternatively len(set(text1))\n\nexample_two()\n\nlen(set(moby_tokens))\n\n\n# ### Example 3\n# \n# After lemmatizing the verbs, how many unique tokens does text1 have?\n# \n# *This function should return an integer.*\n\n# In[21]:\n\n\nfrom nltk.stem import WordNetLemmatizer\n\ndef example_three():\n\n lemmatizer = WordNetLemmatizer()\n lemmatized = [lemmatizer.lemmatize(w,'v') for w in text1]\n\n return len(set(lemmatized))\n\nexample_three()\n\n\n# ### Question 1\n# \n# What is the lexical diversity of the given text input? (i.e. ratio of unique tokens to the total number of tokens)\n# \n# *This function should return a float.*\n\n# In[27]:\n\n\ndef answer_one():\n total_words = len(nltk.word_tokenize(moby_raw))\n unique_words = len(set(nltk.word_tokenize(moby_raw)))\n ratio = unique_words / total_words\n \n return ratio # Your answer here\n\nanswer_one()\n#correcta\n\n\n# ### Question 2\n# \n# What percentage of tokens is 'whale'or 'Whale'?\n# \n# *This function should return a float.*\n\n# In[10]:\n\n\ndef answer_two():\n tokens = nltk.word_tokenize(moby_raw)\n whales = [w for w in tokens if (w== 'whale' or w=='Whale')]\n percentage = (len(whales)/len(tokens))*100\n return percentage # Your answer here 0.42593747426137496\n\nanswer_two()\n\n#correcta\n\n\n# ### Question 3\n# \n# What are the 20 most frequently occurring (unique) tokens in the text? What is their frequency?\n# \n# *This function should return a list of 20 tuples where each tuple is of the form `(token, frequency)`. The list should be sorted in descending order of frequency.*\n\n# In[14]:\n\n\ndef answer_three():\n tokens = nltk.word_tokenize(moby_raw)\n frequency = FreqDist(tokens)\n\n words = frequency.keys()\n values = list(frequency.values())\n\n frequency = pd.Series(values, index=words)\n frequency.sort_values(ascending=False, inplace=True)\n top_tokens = frequency.head(20)\n top_values = top_tokens.values\n top_tokens = top_tokens.index\n \n return list(zip(top_tokens, top_values)) # Your answer here \n\nanswer_three()\n#correcta\n\n\n# ### Question 4\n# \n# What tokens have a length of greater than 5 and frequency of more than 150?\n# \n# *This function should return an alphabetically sorted list of the tokens that match the above constraints. To sort your list, use `sorted()`*\n\n# In[89]:\n\n\ndef answer_four():\n tokens = nltk.word_tokenize(moby_raw)\n frequency = FreqDist(tokens)\n\n long_words = [word for word in frequency if (frequency[word]>150 & len(word)>5)]\n long_words = sorted(long_words)\n frequency = pd.DataFrame.from_dict(frequency)\n return # Your answer here\n\n#answer_four()\n\ntokens = nltk.word_tokenize(moby_raw)\nfrequency = FreqDist(tokens)\n\nlong_words = [word for word in frequency if (frequency[word]>150 & len(word)>5)]\nlong_words = sorted(long_words)\nfrequency = pd.DataFrame.from_dict(frequency, orient='index')\nfrequency.reset_index(inplace=True)\n\nfrequency.columns = ['token', 'frequency']\nfrequency = frequency[frequency['frequency']> 150]\nlength = []\nfor token in frequency['token']:\n length.append(len(token))\n \nfrequency['length'] = length\nfrequency = frequency[frequency['length']>5]\nfrequency.sort_values(by=['token'], inplace=True)\nsorted(frequency['token'])\n\n\n# ### Question 5\n# \n# Find the longest word in text1 and that word's length.\n# \n# *This function should return a tuple `(longest_word, length)`.*\n\n# In[54]:\n\n\ndef answer_five():\n tokens = nltk.word_tokenize(moby_raw.lower())\n lengths = [len(word) for word in tokens]\n df = pd.DataFrame()\n df['token'] = tokens\n df['length'] = lengths\n lengths = df\n max_index = lengths['length'].idxmax()\n max_length = lengths['length'][max_index]\n max_word = lengths['token'][max_index]\n \n \n return (max_word, max_length) # Your answer here\n\nanswer_five()\n\n#correcta\n\n\n# ### Question 6\n# \n# What unique words have a frequency of more than 2000? What is their frequency?\n# \n# \"Hint: you may want to use `isalpha()` to check if the token is a word and not punctuation.\"\n# \n# *This function should return a list of tuples of the form `(frequency, word)` sorted in descending order of frequency.*\n\n# In[20]:\n\n\ndef answer_six():\n tokens = nltk.word_tokenize(moby_raw)\n frequency = FreqDist(tokens)\n alphanumeric = [word for word in frequency.keys() if (word.isalpha()==True)]\n plus_2000 = [word for word in alphanumeric if frequency[word]>2000]\n frequencies_plus_2000 = [frequency[word] for word in plus_2000]\n frequencies = pd.DataFrame()\n frequencies['word'] = plus_2000\n frequencies['frequency'] = frequencies_plus_2000\n frequencies.sort_values(by=['frequency'], ascending=False, inplace=True)\n top_words = frequencies['word']\n top_frequencies = frequencies['frequency']\n\n return list(zip(top_frequencies, top_words))# Your answer here\n\nanswer_six()\n\n\n# ### Question 7\n# \n# What is the average number of tokens per sentence?\n# \n# *This function should return a float.*\n\n# In[44]:\n\n\ndef answer_seven():\n tokens = nltk.word_tokenize(moby_raw.lower())\n sentences = nltk.sent_tokenize(moby_raw.lower())\n tokens_per_sentence = len(tokens)/ len(sentences)\n \n return tokens_per_sentence # Your answer here\n\nanswer_seven()\n\n#correcta\n\n\n# ### Question 8\n# \n# What are the 5 most frequent parts of speech in this text? What is their frequency?\n# \n# *This function should return a list of tuples of the form `(part_of_speech, frequency)` sorted in descending order of frequency.*\n\n# In[22]:\n\n\ndef answer_eight():\n tokens = nltk.word_tokenize(moby_raw)\n tags = nltk.pos_tag(tokens)\n\n parts = [word[1] for word in tags]\n frequency = FreqDist(parts)\n frequency = pd.DataFrame.from_dict(frequency, orient='index')\n frequency.columns = ['frequency']\n frequency.sort_values(by=['frequency'], ascending=False, inplace=True)\n frequency = frequency.head(5)\n part_of_speech = frequency.index\n frequency = frequency['frequency'].values\n \n result = []\n for i in range(len(part_of_speech)):\n result.append((part_of_speech[i], frequency[i])) \n \n return list(zip(part_of_speech, frequency)) # Your answer here [('NN', 39860), ('IN', 28831), ('DT', 26033), ('JJ', 19562), (',', 19204)]\n\nanswer_eight()\n\n\n# ## Part 2 - Spelling Recommender\n# \n# For this part of the assignment you will create three different spelling recommenders, that each take a list of misspelled words and recommends a correctly spelled word for every word in the list.\n# \n# For every misspelled word, the recommender should find find the word in `correct_spellings` that has the shortest distance*, and starts with the same letter as the misspelled word, and return that word as a recommendation.\n# \n# *Each of the three different recommenders will use a different distance measure (outlined below).\n# \n# Each of the recommenders should provide recommendations for the three default words provided: `['cormulent', 'incendenece', 'validrate']`.\n\n# In[2]:\n\n\nfrom nltk.corpus import words\n\ncorrect_spellings = words.words()\nlen(correct_spellings)\n\n\n# ### Question 9\n# \n# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:\n# \n# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the trigrams of the two words.**\n# \n# *This function should return a list of length three:\n# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*\n\n# In[73]:\n\n\ndef answer_nine(entries=['cormulent', 'incendenece', 'validrate']):\n def get_recomendation(entry, n):\n \n distances = {}\n entry_gram = set(nltk.ngrams(entry, n=n))\n first_e = entry[0]\n for word in correct_spellings:\n first_w = word[0]\n\n if first_e == first_w :\n word_gram = set(nltk.ngrams(word, n=n)) \n jaccard_distance = nltk.jaccard_distance(entry_gram, word_gram)\n distances[word] = jaccard_distance\n\n distances = pd.DataFrame.from_dict(distances, orient='index')\n distances.columns = ['distance']\n distances.sort_values(by= ['distance'], ascending=True, inplace=True) #, axis=1, inplace=True)\n result = distances.index[0]\n return result\n\n results = []\n for entry in entries:\n results.append(get_recomendation(entry, 3))\n\n return results# Your answer here\n \nanswer_nine()\n\n\n# ### Question 10\n# \n# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:\n# \n# **[Jaccard distance](https://en.wikipedia.org/wiki/Jaccard_index) on the 4-grams of the two words.**\n# \n# *This function should return a list of length three:\n# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*\n\n# In[75]:\n\n\ndef answer_ten(entries=['cormulent', 'incendenece', 'validrate']):\n \n def get_recomendation(entry, n):\n \n distances = {}\n entry_gram = set(nltk.ngrams(entry, n=n))\n first_e = entry[0]\n for word in correct_spellings:\n first_w = word[0]\n\n if first_e == first_w :\n word_gram = set(nltk.ngrams(word, n=n)) \n jaccard_distance = nltk.jaccard_distance(entry_gram, word_gram)\n distances[word] = jaccard_distance\n\n distances = pd.DataFrame.from_dict(distances, orient='index')\n distances.columns = ['distance']\n distances.sort_values(by= ['distance'], ascending=True, inplace=True) #, axis=1, inplace=True)\n result = distances.index[0]\n return result\n\n results = []\n for entry in entries:\n results.append(get_recomendation(entry, 4))\n \n return results# Your answer here\n \nanswer_ten()\n\n\n# ### Question 11\n# \n# For this recommender, your function should provide recommendations for the three default words provided above using the following distance metric:\n# \n# **[Edit distance on the two words with transpositions.](https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance)**\n# \n# *This function should return a list of length three:\n# `['cormulent_reccomendation', 'incendenece_reccomendation', 'validrate_reccomendation']`.*\n\n# In[76]:\n\n\ndef answer_eleven(entries=['cormulent', 'incendenece', 'validrate']):\n def get_recomendation(entry, n):\n \n distances = {}\n #entry_gram = set(nltk.ngrams(entry, n=n))\n first_e = entry[0]\n for word in correct_spellings:\n first_w = word[0]\n\n if first_e == first_w :\n #word_gram = set(nltk.ngrams(word, n=n)) \n edit_distance = nltk.edit_distance(entry, word)\n distances[word] = edit_distance\n\n distances = pd.DataFrame.from_dict(distances, orient='index')\n distances.columns = ['distance']\n distances.sort_values(by= ['distance'], ascending=True, inplace=True) #, axis=1, inplace=True)\n result = distances.index[0]\n return result\n\n results = []\n for entry in entries:\n results.append(get_recomendation(entry, 4))\n \n \n \n return results # Your answer here \n \nanswer_eleven()\n\n\n# In[ ]:\n\n\n\n\n" } ]
3
APAC-DevOps/iac
https://github.com/APAC-DevOps/iac
152d50a88cfd07b9fc204265c365127c9f593c0d
1140b65e3f101e990949740f2989002101031969
4d9dfa4e9d16b2f88913d989b3a1d9be1602f854
refs/heads/master
2020-04-06T17:02:49.368147
2018-11-15T03:05:53
2018-11-15T03:05:53
157,642,120
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.46610307693481445, "alphanum_fraction": 0.4753328561782837, "avg_line_length": 37.19968795776367, "blob_id": "0ab885585d89ee70f1a1026143d96807a7f49c25", "content_id": "7d629b34bc9809dfa5822ded9b960f65c920373e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24486, "license_type": "no_license", "max_line_length": 142, "num_lines": 641, "path": "/Azure/130-appgateway/130-appgateway.py", "repo_name": "APAC-DevOps/iac", "src_encoding": "UTF-8", "text": "import os\nimport sys\nimport argparse\nimport subprocess\nimport requests\nfrom azure.mgmt.network import NetworkManagementClient\nfrom azure.mgmt.resource import ResourceManagementClient\n# Create Application Gateway\ndef agri_Api_AppGateway(envName, agri_site_fqdn, AzureServicePrincipalcredentials, subscription_id, resource_group, location='AustraliaEast'):\n network_client = NetworkManagementClient(AzureServicePrincipalcredentials, subscription_id)\n\n async_network_client = network_client.virtual_networks.create_or_update(\n resource_group,\n resource_group + 'Vnet',\n {\n 'location': location,\n 'address_space': {\n 'address_prefixes': ['10.64.0.0/16']\n }\n }\n )\n\n async_network_client.wait()\n\n async_common_subnet = network_client.subnets.create_or_update(\n resource_group,\n resource_group + 'Vnet',\n \"AgriCommon\",\n {\n 'address_prefix': '10.64.0.0/24'\n }\n )\n\n async_function_subnet = network_client.subnets.create_or_update(\n resource_group,\n resource_group + 'Vnet',\n \"AgriFunctionAPI\",\n {\n 'address_prefix': '10.64.1.0/24'\n }\n )\n\n async_appGateway_subnet = network_client.subnets.create_or_update(\n resource_group,\n resource_group + 'Vnet',\n \"AgriAPIAppGateway\",\n {\n 'address_prefix': '10.64.2.0/24'\n }\n )\n\n async_gateway_subnet = network_client.subnets.create_or_update(\n resource_group,\n resource_group + 'Vnet',\n \"GatewaySubnet\",\n {\n 'address_prefix': '10.64.3.0/24'\n }\n )\n\n AppGW_Subnet_Id = async_appGateway_subnet.result().id\n\n\n\n # Create Public IPv4 Address\n async_appGW_frontend_public_IP_Addr = network_client.public_ip_addresses.create_or_update(\n resource_group,\n resource_group + \"AppGatewayFrontendPublicIPAddr\",\n {\n 'location': location,\n \"sku\": {\n \"name\": \"Basic\"\n },\n 'public_ip_allocation_method': \"Dynamic\",\n 'public_ip_address_version': \"IPv4\",\n }\n )\n Frontend_Public_IP_Configure_Id = async_appGW_frontend_public_IP_Addr.result().id\n\n resource_client = ResourceManagementClient(AzureServicePrincipalcredentials, subscription_id)\n\n\n print(\"\\nInitializing the deployment with subscription id: {}, resource group: {}\\n\".format(subscription_id, resource_group))\n\n async_resource_group = resource_client.resource_groups.get(\n resource_group\n )\n\n resource_group_id = async_resource_group.id\n\n # Create Application Gateway\n print('\\nCreating Application Gateway...')\n AgriAPIAppGatewayName = resource_group + 'AppGateway'\n appgateway_id = resource_group_id + '/providers/Microsoft.Network/applicationGateways/' + AgriAPIAppGatewayName\n appgateway_public_frontip_name = resource_group + \"PublicInterfaceIPv4Addr\"\n # appgateway_private_frontip_name = \"PrivateInterfaceIPv4Addr\"\n FrontPort_Http80 = \"HTTP80\"\n FrontPort_Https443 = \"HTTPS443\"\n FrontPort_Http8080 = \"HTTP8080\"\n\n\n# Define Listeners\n api_http_listener = \"ApiHTTPListener\"\n api_https_listener = \"ApiHTTPSListener\"\n api_http8080_listener = \"ApiHTTP8080Listener\"\n ous_http_listener = \"OusHTTPListener\"\n ous_https_listener = \"OusHTTPSListener\"\n\n# Define backend pool name\n api_backend_pool = \"api_web_app\"\n ous_backend_pool = \"ous_web_app\"\n\n\n# define backend http settings\n appgateway_backend_http_settings_name = \"appGatewayBackendHttpSettings\"\n appgateway_backend_https_settings_name = \"appGatewayBackendHttpSSLSettings\"\n appgateway_backend_http_settings_ous = \"appGatewayBackendHttpSettingsDevOus\"\n appgateway_backend_https_settings_ous = \"appGatewayBackendHttpSSLSettingsDevOus\"\n\n\n# define path maps\n api_http_url_path_maps = \"ApiHttpUrlPathMaps\"\n api_https_url_path_maps = \"ApiHttpsUrlPathMaps\"\n ous_http_url_path_maps = \"OusHttpUrlPathMaps\"\n ous_https_url_path_maps = \"OusHttpsUrlPathMaps\"\n\n\n# define probes\n agri_api_web_app_http_probe_name = \"AgriApiHttpWebAppProbe\"\n agri_api_web_app_https_probe_name = \"AgriApiHttpsWebAppProbe\"\n agri_ous_web_app_http_probe_name = \"AgriOusHttpWebAppProbe\"\n agri_ous_web_app_https_probe_name = \"AgriOusHttpsWebAppProbe\"\n\n\n# define rule name\n api_http_path_based_rule = \"ApiHttp\"\n api_https_path_based_rule = \"ApiHttps\"\n api_http8080_path_based_rule = \"ApiHttp8080\"\n ous_http_path_based_rule = \"OusHttp\"\n ous_https_path_based_rule = \"OusHttps\"\n\n\n# define listner DNS name\n api_agri_fqdn = envName + \"-api.agridigital.io\"\n ous_agri_fqdn = envName + \"-ous.agridigital.io\"\n\n\n# define backend pool hosts name\n api_azure_fqdn = agri_site_fqdn[0]\n ous_azure_fqdn = agri_site_fqdn[1]\n\n wildcast_ssl_cert_name = \"agridigital-io.cer\"\n\n\n# create or update application gateway\n async_ag_creation = network_client.application_gateways.create_or_update(\n resource_group,\n AgriAPIAppGatewayName,\n {\n 'location': location,\n \"sku\": {\n \"name\": \"WAF_Medium\",\n \"tier\": \"WAF\",\n \"capacity\": 1\n },\n \"web_application_firewall_configuration\": {\n \"enabled\": True,\n \"firewall_mode\": \"Detection\",\n \"rule_set_type\": \"OWASP\",\n \"rule_set_version\": \"3.0\",\n \"disabled_rule_groups\": [{\n \"rule_group_name\": \"REQUEST-942-APPLICATION-ATTACK-SQLI\",\n# \"rules\": [942200, 942260, 942340, 942370]\n }\n ]\n },\n \"ssl_certificates\": [{\n \"name\": wildcast_ssl_cert_name,\n \"password\": \"tWLBYgvhxnq9AweMQ5IyoScTPXa8jFCdmO1sDJ4RpGZurfi073\",\n \"data\": \"MIIW2wIBAzCCFpcGCSqGSIb3DQEHAaCCFogEghaEMIIWgDCCBikGCSqGSIb3DQEHAaCCBhoEggYWMIIGEjCCBg4GCyqGSIb3DQEMCgECoIIE/jCCBPowHAYKKoZIhvcNAQwBAzAOBAh6FAU1IyeqKQICB9AEggTYLLX6vYuolgXWjjWLv7ShQfDbE7W04mRYT72vaslR2NUjYQPKoFQlSgaxIG8LCjGLXhpGWbsbn3oEMEcDsKligMdkR7SIAp0b427yZZ3S5A4kRGivMrN86RM1fmssd5ymq/99h39eK9KNA4+liWI7ECJhZN09sX+2+VlzhHA4buvEXu27Siwuz6agp+WIFM5Lu93k6ZCugMVD0EIe+0pk7aWqW6/cYRc872DItFVMF3esxLg2BtMaE6hpgHj7U6+NbnZ9dLojz+wTJBsABttKVRh8hcYwln8jtW0CLgAwlzDY9uaJg8cmMsXfvmYXwNTWGQSwOWiJ6CJ4cNDT/aCQX21rGvm6nGgot3f1prnL/TqQExhB31z1r/1n92uC6xrwwlUS7hLSRZBo3p1Ojp8tCPCwb8nxEOruCu8rdR6otcqZKmOwXAqy2F2a6+mXLtHV7ZbHbax6IzZyfde8Zo0vNuG9z0ULavOFlfticdcxXqAURf2yKeARamkYMK7e3/GPPkAtaNbLtHo9bTw5vWHAL3Eueu7t2RypGjN85vNcV99NF/yx1OPtfzkkjpyFojDNQwrfHv+D0JffpGfoXpSJdXwZ6izurXlElPL+0CjVBpgXQC/QcDptA1lh0aHzE7rSVHKA+CQH1MGK6ajXSTVg4zy/4jhRhkw/ZCdV/9dSWoRze+eHusPXJi4RLOjBF1M9wlnwWiN0kJ1SCR+ckqCaXJIHJXljadme2otwmjJ6sjNWoYFLixHTENQsuKjR5O6NZcK1iVIK8EFaIa3OIwJ6TS6gdrQugkOQq28HrIbXPV1R1XD2SAw5TjqKb20IQGz90O1eOLjhyX4/omndW+85hjVctoYcQbztMKZMihDA44Hvaf0zMI1oS7HRyDk/PzNb4A8SFhrRtofFIIn1+Ve+SbobWLW3nNHavjqYPjC0XkF1Qps2QgHKs8BlN2MW9a6YJiS+ogDDDo+BpmT2SsHFDroZ2VON+l7E7z8kVl1ZjKM/nlnYgAQb30taQntQcMmPBSUNKUGhFD0aquOR1Alc7/LbiC1JxGfGDFODvZmj2usx+Fm+BUW4UOuvGs0dzo0/8vjairxVBCOTEldvaYYcG2WJEp0J1BSYNlsHQ+ijhhCVXjWiw+w7bg1iyQ/CM8um/PoGLjo/nRW4FrNc+6jrhF7ofDExZ8/hB92sxZtr+HLWs8IN4XYzJrcfgx21YgTCmaxuPIaGIVwTsOez7KjLYb5bdJg0Hg0oHUi10p0yf9Toxj/fHNBhdu5tqM/Tv4Hfjjql1VAp6euET2Yl+DhCtvq9Nujn7NFV8P4/w7+7yAENoHoRF959zPNF2dyVAMkgeYN6eCVJvR0XKzw7Z6cSPA5X+7lKWGIZkUYqdk0khYzziTGlncDP6H1IN7+VR7vHGMPiWgeJxIRx40HmnLAN6AQrmv24d1edlxbF9BkS18wvjFBE2fDyTeYu8XD6UTMzH0bAT8OvJHQqBhvR0tUlW4UYfEIyAzkaUGi1MiAdQj27YRrE/ztCyNuvIajeQ64c/aswKvzxjGQfZSBMYj0K/51Sq5ALMREwvyd8LPnsHzZM6iImTYZRnu3U7E4zPtuvVGuOXG+Lf0x4a6/nRCHPm7AtJ71puZaMEDijWGspA79qf69H4MhE9jGB/DANBgkrBgEEAYI3EQIxADATBgkqhkiG9w0BCRUxBgQEAQAAADBbBgkqhkiG9w0BCRQxTh5MAHsAOQBBAEYANQBCAEYAOAA0AC0ARAA5ADgANgAtADQANAAwADQALQA4ADgAOAA3AC0AOAA1AEYANAAwADEARQBGADAAMQA0AEQAfTB5BgkrBgEEAYI3EQExbB5qAE0AaQBjAHIAbwBzAG8AZgB0ACAARQBuAGgAYQBuAGMAZQBkACAAUgBTAEEAIABhAG4AZAAgAEEARQBTACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjCCEE8GCSqGSIb3DQEHBqCCEEAwghA8AgEAMIIQNQYJKoZIhvcNAQcBMBwGCiqGSIb3DQEMAQMwDgQISDZv2hnfZx8CAgfQgIIQCNX4SvSfTHj4nBvrXW8gVLN2TqFP3hHbuPZMl7peb6oGctZAczEqY44N+9zJLHy1GuF2kxJdZ6JEvoEjtDupv8aOagHJ0SRxJOJJTCWWgmp+QJJHRUoZoDL33a6Rffe962UqaMbKdAa20v3G+a/zCb1N7QnfOgORkuOeOOnkVh9+8xCBAtuRBw/Qztr9mpXS+oclEbnD++HW4Bcne1g0cH+enaP/c1aJzcgyoFxIHpss6Sb/KxCJgNgjUxbyPBzvmF5UtbSmdOWo2VryZGBlfKe+H4yEf8HbMEL0zOiuabSS5h/etjQpD9LtF+HLprE5bjpKa0Vg6xVgOvu1d/y4JJRollaUhTDzBs1SWYLiIxoC+5vDxwArWaIIONXCzG0dVgs14qUAkeJFahsUHO9d4Ad4j9qGd701Wz5Eo17yf9ILWR9miu/qsuYjzjoSUfwZm54W+tKE63jsQ/wZ9fI/gmxXE7GtcmVFDW+KCwUmryK4BKth0oXZGWluFd1Krdy/wwaUzPI9Zgx7GnlloP/XYx92TY4Jswxn6125DTjskdWr4Rs0r0R3FiTMcLyttuWminlG5jyenBqlCXnClML0kRAQ4m1bHlhybG37J8xGdPk3AUV92MT7wGBEg2qd92l2+Wh5Ey5dQHG/m7ntjbp3Ux7povN9+BaNijWNSqdkSwtKWlpxLoO26b29ot4H5TBlQJlCBqrOdT+4PypAmoTwxOZNiv01TK2WdP8hXBMB7CNI7sGCSe7dLpSSZgPsQxxVu1vJJbUa7OZ9kPKH6jcmeDEDK/Og3b/RrBQOsUu0obhiIFlrmgdzehz8AY09W8Sj+ppGx2nAcc0H3Yandql/fiKAEj0Ys8tHr3egYGNYvAd0PW3v+WIW8BDpeJhSCC9f6fEo2VZ/PgiU3eCOgNmwIFyAH1Nx8qcVuYyX5PJ91eGxFxWP3S2CCh2ogcrzlPYEpmGF/RtEugFApWleKKgTplXBdElPxGhhkd2Qwq1OiARNrDLRpzRgmsqGG1KKSBHNkj+nY2UDdYcX9k7S/bov0XnPPOlm6xNlnzspvzo3JNOeShroS2lPuG+ERxtcLwheRqemELtugLddwsMgcXeIt8O1ydQ4eDwWNtXSkv9jfx1Ik0SOmqL1/O4NaAK198MHBSP3ZK9uTauf05l6jLkpLwUUy389ZGGrgWwQ3qDX50oVDOHd2oxalPT1CKDSGxGYJyAcVNFPPT+q9XOwl3UeECZ6c4H1yxOZQbFbYynG9asDsQ2sGv/djupkOWkx+ghLvdI2zHNttF5Y0VYzF8aFwC5GEh9G+BkmqGNTBa2zT+63OWLXNplCrmc3zMf0Rkx1zJIkwtDYuijuPcDIXVSelzGZPgDLx/kAL8l/fh22H/7EYlkmEs9TnWyh1GnMYMwA4SnQXotB0fn9ncX68KD+OU+C6xc6ENW+yfhx4Le7AN612VPJr2f0AbBKA+Cuji65GDO8gCHbw/TDYMLqh+RKx6jXCfFhwxo6JXjJ+opLfio0omj9a650IJ01qlbm1oD5m0Vat0KcL/gTyOthqNjUv1SR0kY+xIXK7wD4SiubQ9ZrY8SN1ultp3gUIuYN78oPtk7Zjsj+MRLhrwRLeahmUxChZj3yuftGvhmQYB98Ea/xDFMEnc/72gHM3m+XBhgyT19cErfJ1EsK+l3PEMhLSsBFssXSHBCCTBqqtLPq43CdTns86kI+Psc1ksBAgFynDw5XUXvKopbD3GgQ3M9SV6FnH63s1e/ukC6s7iRcE5hA+Sc5j9NtsMqG8OWNX4NhxzHA42kB3SKzieJUw37KmE2fyrcolBueZ47eU8+rxrIiyZ4taObhK7uxkjKPE9bdbVvzb3kbIEqatq/sAVs+ZvqXvqqunlQHjGxz47Hp3r21qi+ow11AI9FsLqkYnN1h9ANASG+Rz91E6o7/C+QCl/V4gU4AQHdqQCGkKMm5tUk32Y15dbgEqS172Z0Wk7NY1BLWxxUc8EBCf8z92d+bl0glE8I18iKQcWzA6xUQxjths3lPzgBPdXEoh11Z8wK3HQOZFvUrD+tn1sXVcY/mfFrrkl4+fAwpqAAf06tnhirK0I6HQPdOwr3+l8+HjEQWg2764WKKzIS4BcTZ0iENy+wS/DD360NeXuxwKWST+Nb6BiWRRLE20RLfxXGwDuOsWTPrE6ZHiG8SG8ghUhRJJhYdBA21SL+7ciYGbjT37pqG3/yULatOaNZK1NFiDNqFCJtej0g3LRCxnUoNVucE33JJwDQMY00cUZWdFhKp2t8wGAXw2lK1LWvfDCBEGL2Yuam9ocAW4COb2ktI8LQ4uvnfANPxOYhckog5UUp5CKnLDEIUFtK2zp5Bd8puoSm+OyYZZigfqn6vKxg39evAMplPToue90edew4xWByGVgTT8A7RgaB35YobVRBDPC22lepAI0C8uLjR/BjonaJRV/sav+poOdsuud+4bIkv9TN9Z9HOdQr4Z3+VC8SBQhNL8RCfht2lkQ0JfbqK2HsY9M/pyUlNpDzUkjJf514tNDmEWFN3F4oys0Z5A1FaBEQ6tWxw8+RqF9DY2ZfaipVeDXwDwn3ipom0hYRaQ9cnC+fqRKYDoV2yFTz2IJIAz1NnETfXWDReyT+KbqciuSUHjFK3k+oCVT81Ct3QOBR8URgXvucfVLNDRhnLs5TOgkpptKzzOp6k3ePCoK8Pbea7Rr1uP1z8DxlG/Bipnx++hLEYnQeIaKSrM0H8/GemAXbT6nYCLXqSX1l2CnqPffFMbFWZpmm7Yd/SGnXPj9FqqwKCl6qjp6PdePeMTYR0PTOuCMEeOiQ9zJIR0zAJF8n/83uF9huxGxatBoAY2XMiPac7hupD+GcoqB22HI21ZWAkCNz52sJw2fWq4zeEimM1oL7tpCjN3XbEX8zFZmqRe1ThzyV5MVXS44mVDnYqGMP0Zhf0CWaBjsSoT4TrS60Ca4/Vbeh8a5vlBB5gFV+6w5NdLcnXDPa4guLNVtfH7mVvf2Sf678km6asiJVQh/mJu8F8wXAnr/7lokj3e39s8ZyoWxIi/sjd8ylM9w4miQ2MyvkMmJqf/bv/TAqy1HoyvHEenvcXGibgIl9M724nejboWozOirCBbW6Ghu2ipNUwroqjmi6DUM3iCloiJ/KtiLhlTNpjZChrUAUCadCRZTJR1BwFganaLlj6c458RHOL6vknAW9Ld35eDVOcHzIcC7IGj0s6Y5f09SwMVu3ZRcrCi1YWqCGKLHqxIx4a8TPrH71OvrdNin7gXbenyWfch4itxjMND0TffPNTuLotQJFsIVwy9FMjN/k10+5kK8Y/Co5fRoOdICHfiORvytGm3HsLoATlrX2mJocSmN6MCH16XffzwMw+yibuX7VhLuyNmli6FGv+cg1ya5BRyVUxJwGP9Nhkwxsa0LvMT9sgtm4lLkAbM6W6p/FzzaSrY1X0qodFtIHG45fv8q10EMstLMsyK4AIunKxd6gcOZb6mRWfEC8Kjv6999x8HwrJabMrtx/2v3CRGqonWKZZfE51upeWmge3EpCoSrJu55uZQ1bDOw0Kqlyxhvp1MhMNTTTeavESd3qfiG8KCQNB6hjQLIS+HbKhaJ+VPjJ9jhO9Gj9XIgSKAPKAeFe/5q7XGd0vXMsH9VSXx9UXkjdmXUWWxZtqESS2XZXV3e1rx+vldv+1J7iwoxkPP8E3qqZw4b1AejDVQRwdo29Vy5cu5tRSQHT6jXDq2hDj2zT5XaHcdFFRN5X3SolykUf19Vzo71IkybHAo28rbynT7UcZJIqAp4Qn7Xv5tzQr41M/Xp0lxjLtKV7etvU+QobO09Np3Vwl5imw0Pc3esokiV72fGwcQI3VnWqEmjQR447HBkqpJL71c7Tdif1662vWB9RAV0gKcZj0U+Xd0Y0vQg2czFR4Y/WozgikKRCP9f/bqlRNfVp+Dp04b8jwf2MtO3X67CcgQ84SUfn458oH40lA6NylUgp2B3GQ2RBAWEeVVaWU6PxAagpdX34AWUzlPNWAJ3s/AuHFz0dgH2de3bY5cZwzbrZ4vL9roGA38QzsncEy2nKgrOxHc2YcvSVeasxyEI5vPVLkxGuhKkevD8tyO8OpdUBLXdCxZKGrJ1NT1MfBURGBC8Bg4dZCyio+9f1kaLazf+M8aLzbXIqppZsKigbjgZbVEhmpWMG/+CITPxOBdZsgCyIY/0oWVUf6qUz9mM2zBVNx7xECdtJp4ZyqdhrPatf+x07YI6Idnt7YPM35vCFewm27JyTd7Z1/Jm20czvxP5lg2HxvyXwkySpFgEycvFE1/IKn5JbQWt6Lxj/2FYatyNHVfACeWPr01+lWQubL23C78Za09au/2n1GGT7efjMEuf2SNsc/8JzjnlsTWjHb270FtOvFYW4r3bfC25mNVIkQQuK0d6+tQv5afpQ/YC59DSYfM2SLeqQOgBmjIwyg0Sv3gs+Q+PL36oOcHAtRAf7bqASdfEDqBspZtnITz/fRhT7poU0FijQN9XejdjBpfKJZVzXdyf2pcugH/IIfAHydTs/Yxq/PDPKd607UcsdKDVFMHQ6gL/D0ScAIl3WkI+A8sGyfbOn84Gi32y7zIHvkSYiWy0O0+IOvsmFNcBiJUA39VyMqff2t5FjWDuRWexO1LNrGwGWKSnlb9O8mc4McC7zSi5AZbmilMmP3EndmFV/Nm1o379WIi6j9jvDTA5j168UTPanlPN7mclJSCiWbGTeFj8CcJ0et4qHAl66UssASx3OWP11DpqGxC1SiJ0J0QkwcMN6Z/g98qPh8wKZl4zLl5wx+V55hKTs7VTxFKN7oZ3aHtIG6kZIikSgLJifdIyXTDW/aDRzIimbkSQZRN7lLJNLkMn1WViaOCzANYpt1dg/2OZwuGFepiDa0KvvotsNWIJSDGz3mv0JQ3y8ACU99Pbd9+eEyO6TKcW2HKTgGCwx4L0LxhFnMG41EXFmRSAdnRC/pNPWuhVYJa/eNjJa/jXyRmaPalwR9K2OqGcvy4xiHRE+L0RhQ0c2mjEdmEyXztKdWq/18P/mJo8m2cY+SO5hzs/+mr6z3O3geUxVA5v0ACmE2c9NbDwXUuW7t0xvjF9DRfTpUgzFC8pO7xzcn4is8Xw8Z0OlZEIi120oMKnYaMuF+fzbJzYnOPOAgT5GgXltqo/m3OqZTILF0i7NlC8Xq01KiLX7AHWyigIIyyHZzSKsSzBfD5VKGaYs+4NGaTQn/AnEyMESXlkdq4qmbPe7+hb3xr99dwzXvVDzVRn5ZJTePsbrCuIWg0PhzBqY7CJqz/CoihyvnP2HUR2Rwhx0cz+WEwE1bTUtlD/uUP26AeZba3bp7SWBQx5/MFTMvqDvsH40vEdpd4rc2e3zxNBEvBqT2wqYxuKITbyqZldUbuIbbZ4EG/U4TysC/k1eJ+7sH9L2vOf8kbWeTAm3JFSGgIeRxOzjkCIe2e6GUhQ6XYjA7MB8wBwYFKw4DAhoEFCZag7bmiskeG/rVyM8Y7gYJmIzIBBS3B0CEmuP+dGKQ/N2gwPFvmn0ZjQICB9A=\"\n }],\n \"gateway_ip_configurations\": [{\n \"name\": \"appGatewayIpConfig\",\n \"subnet\": {\n \"id\": AppGW_Subnet_Id\n }\n }],\n \"frontend_ip_configurations\": [{\n \"name\": appgateway_public_frontip_name,\n \"public_ip_address\": {\n \"id\": Frontend_Public_IP_Configure_Id\n }\n }],\n \"frontend_ports\": [\n {\n \"name\": FrontPort_Http80,\n \"port\": 80\n },\n {\n \"name\": FrontPort_Https443,\n \"port\": 443\n },{\n \"name\": FrontPort_Http8080,\n \"port\": 8080\n }],\n\n#Application Gateway Backend Address Pools\n \"backend_address_pools\": [\n # Define Backend Pool for Agri Environment\n {\n \"name\": api_backend_pool,\n \"backend_addresses\": [{\n \"fqdn\": api_azure_fqdn\n }]\n }, {\n \"name\": ous_backend_pool,\n \"backend_addresses\": [{\n \"fqdn\": ous_azure_fqdn\n }]\n }\n ],\n\n# # Application Gateway Probes for Agri API and OUS Components\n \"probes\": [\n {\n \"name\": agri_ous_web_app_http_probe_name,\n \"protocol\": \"http\",\n \"path\": \"/api/_organisations/heartbeat\",\n \"interval\": 80,\n \"timeout\": 120,\n \"unhealthy_threshold\": 3,\n \"pick_host_name_from_backend_http_settings\": True,\n \"match\": {\n \"status_codes\": [ \"200-399\" ]\n }\n },\n {\n \"name\": agri_ous_web_app_https_probe_name,\n \"protocol\": \"https\",\n \"path\": \"/api/_organisations/heartbeat\",\n \"interval\": 80,\n \"timeout\": 120,\n \"unhealthy_threshold\": 3,\n \"pick_host_name_from_backend_http_settings\": True,\n \"match\": {\n \"status_codes\": [ \"200-399\" ]\n }\n },\n {\n \"name\": agri_api_web_app_http_probe_name,\n \"protocol\": \"http\",\n \"path\": \"/api\",\n \"interval\": 80,\n \"timeout\": 120,\n \"unhealthy_threshold\": 3,\n \"pick_host_name_from_backend_http_settings\": True,\n \"match\": {\n \"status_codes\": [ \"200-399\" ]\n }\n },\n {\n \"name\": agri_api_web_app_https_probe_name,\n \"protocol\": \"https\",\n \"path\": \"/api\",\n \"interval\": 80,\n \"timeout\": 120,\n \"unhealthy_threshold\": 3,\n \"pick_host_name_from_backend_http_settings\": True,\n \"match\": {\n \"status_codes\": [ \"200-399\" ]\n }\n }\n ],\n\n#Backend HTTP Collections for API and OUS components\n \"backend_http_settings_collection\": [\n {\n \"name\": appgateway_backend_http_settings_ous,\n \"port\": 80,\n \"protocol\": \"Http\",\n \"cookie_based_affinity\": \"Enabled\",\n \"request_timeout\": 360,\n \"pick_host_name_from_backend_address\": True,\n \"probe\": {\n \"id\": appgateway_id + \"/probes/\" + agri_ous_web_app_http_probe_name\n }\n },\n {\n \"name\": appgateway_backend_https_settings_ous,\n \"port\": 443,\n \"protocol\": \"Https\",\n \"cookie_based_affinity\": \"Enabled\",\n \"request_timeout\": 360,\n \"pick_host_name_from_backend_address\": True,\n \"probe\": {\n \"id\": appgateway_id + \"/probes/\" + agri_ous_web_app_https_probe_name\n }\n },\n {\n \"name\": appgateway_backend_http_settings_name,\n \"port\": 80,\n \"protocol\": \"Http\",\n \"cookie_based_affinity\": \"Enabled\",\n \"request_timeout\": 360,\n \"pick_host_name_from_backend_address\": True,\n \"probe\": {\n \"id\": appgateway_id + \"/probes/\" + agri_api_web_app_http_probe_name\n }\n },\n {\n \"name\": appgateway_backend_https_settings_name,\n \"port\": 443,\n \"protocol\": \"Https\",\n \"cookie_based_affinity\": \"Enabled\",\n \"request_timeout\": 360,\n \"pick_host_name_from_backend_address\": True,\n \"probe\": {\n \"id\": appgateway_id + \"/probes/\" + agri_api_web_app_https_probe_name\n }\n }],\n\n# HTTP Listeners Definition\n \"http_listeners\": [\n # Begin HTTP Listeners Definition For Agri Environment\n {\n \"name\": api_http_listener,\n \"frontend_ip_configuration\": {\n \"id\": appgateway_id + \"/frontendIPConfigurations/\" + appgateway_public_frontip_name\n },\n \"frontend_port\": {\n \"id\": appgateway_id + '/frontendPorts/' + FrontPort_Http80\n },\n \"protocol\": \"Http\",\n \"host_name\": api_agri_fqdn,\n \"ssl_certificate\": None\n },\n {\n \"name\": api_https_listener,\n \"frontend_ip_configuration\": {\n \"id\": appgateway_id + \"/frontendIPConfigurations/\" + appgateway_public_frontip_name\n },\n \"frontend_port\": {\n \"id\": appgateway_id + '/frontendPorts/' + FrontPort_Https443\n },\n \"protocol\": \"Https\",\n \"host_name\": api_agri_fqdn,\n \"ssl_certificate\": {\n \"id\": appgateway_id + \"/sslCertificates/\" + wildcast_ssl_cert_name\n }\n }, {\n \"name\": api_http8080_listener,\n \"frontend_ip_configuration\": {\n \"id\": appgateway_id + \"/frontendIPConfigurations/\" + appgateway_public_frontip_name\n },\n \"frontend_port\": {\n \"id\": appgateway_id + '/frontendPorts/' + FrontPort_Http8080\n },\n \"protocol\": \"Http\",\n \"host_name\": api_agri_fqdn,\n \"ssl_certificate\": None\n },\n # {\n # \"name\": ous_http_listener,\n # \"frontend_ip_configuration\": {\n # \"id\": appgateway_id + \"/frontendIPConfigurations/\" + appgateway_public_frontip_name\n # },\n # \"frontend_port\": {\n # \"id\": appgateway_id + '/frontendPorts/' + FrontPort_Http80\n # },\n # \"protocol\": \"Http\",\n # \"host_name\": ous_agri_fqdn,\n # \"ssl_certificate\": None\n # },\n {\n \"name\": ous_https_listener,\n \"frontend_ip_configuration\": {\n \"id\": appgateway_id + \"/frontendIPConfigurations/\" + appgateway_public_frontip_name\n },\n \"frontend_port\": {\n \"id\": appgateway_id + '/frontendPorts/' + FrontPort_Https443\n },\n \"protocol\": \"Https\",\n \"host_name\": ous_agri_fqdn,\n \"ssl_certificate\": {\n \"id\": appgateway_id + \"/sslCertificates/\" + wildcast_ssl_cert_name\n }\n }\n ],\n\n# URL Path Maps Definition\n \"url_path_maps\": [\n # Ous URL Path Maps\n # {\n # \"name\": ous_http_url_path_maps,\n # \"default_backend_address_pool\": {\n # \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n # },\n # \"default_backend_http_settings\": {\n # \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_ous\n # },\n # \"path_rules\": [{\n # \"name\": \"OusBankAccounts\",\n # \"paths\": [ '/api/_bank-accounts*' ],\n # \"backend_address_pool\": {\n # \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n # },\n # \"backend_http_settings\": {\n # \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_ous\n # }\n # }]\n # },\n {\n \"name\": ous_https_url_path_maps,\n \"default_backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"default_backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_ous\n },\n \"path_rules\": [{\n \"name\": \"OusBankAccounts\",\n \"paths\": [ '/api/_bank-accounts*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_ous\n }\n }]\n },\n\n # Api URL Path Maps\n {\n \"name\": api_http_url_path_maps,\n \"default_backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"default_backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_name\n },\n \"path_rules\": [\n\n {\n \"name\": \"OusBankAccounts\",\n \"paths\": [ '/api/_bank-accounts*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_ous\n }\n }, {\n \"name\": \"OusUsers\",\n \"paths\": [ '/api/_users*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_ous\n }\n }, {\n \"name\": \"OusOrganisations\",\n \"paths\": [ '/api/_organisations*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_ous\n }\n },\n\n\n {\n \"name\": \"apiV1\",\n \"paths\": [ '/api/*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_name\n }\n },\n {\n \"name\": \"api\",\n \"paths\": [ '/*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_http_settings_name\n }\n }]\n },\n {\n \"name\": api_https_url_path_maps,\n \"default_backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"default_backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_name\n },\n \"path_rules\": [\n\n {\n \"name\": \"OusBankAccounts\",\n \"paths\": [ '/api/_bank-accounts*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_ous\n }\n }, {\n \"name\": \"OusUsers\",\n \"paths\": [ '/api/_users*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_ous\n }\n }, {\n \"name\": \"OusOrganisations\",\n \"paths\": [ '/api/_organisations*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + ous_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_ous\n }\n },\n\n {\n \"name\": \"apiV1\",\n \"paths\": [ '/api/' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_name\n }\n },{\n \"name\": \"api\",\n \"paths\": [ '/*' ],\n \"backend_address_pool\": {\n \"id\": appgateway_id + '/backendAddressPools/' + api_backend_pool\n },\n \"backend_http_settings\": {\n \"id\": appgateway_id + '/backendHttpSettingsCollection/' + appgateway_backend_https_settings_name\n }\n }]\n }\n ],\n\n# # Ruting Rules Definitions\n \"request_routing_rules\": [\n # ous path based rule\n # {\n # \"name\": ous_http_path_based_rule,\n # \"rule_type\": \"PathBasedRouting\",\n # \"http_listener\": {\n # \"id\": appgateway_id + '/httpListeners/' + ous_http_listener\n # },\n # \"url_path_map\": {\n # \"id\": appgateway_id + '/urlPathMaps/' + ous_http_url_path_maps\n # }\n # },\n {\n \"name\": ous_https_path_based_rule,\n \"rule_type\": \"PathBasedRouting\",\n \"http_listener\": {\n \"id\": appgateway_id + '/httpListeners/' + ous_https_listener\n },\n \"url_path_map\": {\n \"id\": appgateway_id + '/urlPathMaps/' + ous_https_url_path_maps\n }\n },\n\n # api path based rule\n # {\n # \"name\": api_http_path_based_rule,\n # \"rule_type\": \"PathBasedRouting\",\n # \"http_listener\": {\n # \"id\": appgateway_id + '/httpListeners/' + api_http_listener\n # },\n # \"url_path_map\": {\n # \"id\": appgateway_id + '/urlPathMaps/' + api_http_url_path_maps\n # }\n # },\n {\n \"name\": api_https_path_based_rule,\n \"rule_type\": \"PathBasedRouting\",\n \"http_listener\": {\n \"id\": appgateway_id + '/httpListeners/' + api_https_listener\n },\n \"url_path_map\": {\n \"id\": appgateway_id + '/urlPathMaps/' + api_https_url_path_maps\n }\n },\n {\n \"name\": api_http8080_path_based_rule,\n \"rule_type\": \"PathBasedRouting\",\n \"http_listener\": {\n \"id\": appgateway_id + '/httpListeners/' + api_http8080_listener\n },\n \"url_path_map\": {\n \"id\": appgateway_id + '/urlPathMaps/' + api_http_url_path_maps\n }\n }\n\n ]\n\n }\n )\n print_item(async_ag_creation.result())\n\n async_get_frontend_public_IP_Addr = network_client.public_ip_addresses.get(\n resource_group,\n resource_group + \"AppGatewayFrontendPublicIPAddr\",\n )\n\n print(\"the dns name from get public ip address is \" + str(async_get_frontend_public_IP_Addr.dns_settings))\n print(\"the dns name from get public ip address is \" + str(async_get_frontend_public_IP_Addr.dns_settings.fqdn))\n return async_get_frontend_public_IP_Addr.dns_settings.fqdn\n\n\ndef print_item(group):\n \"\"\"Print an Azure object instance.\"\"\"\n print(\"\\tName: {}\".format(group.name))\n print(\"\\tId: {}\".format(group.id))\n print(\"\\tLocation: {}\".format(group.location))\n print(\"\\tTags: {}\".format(group.tags))\n if hasattr(group, 'properties'):\n print_properties(group.properties)\n\ndef print_properties(props):\n \"\"\"Print a ResourceGroup properties instance.\"\"\"\n if props and props.provisioning_state:\n print(\"\\tProperties:\")\n print(\"\\t\\tProvisioning State: {}\".format(props.provisioning_state))\n print(\"\\n\\n\")\n" }, { "alpha_fraction": 0.7274073958396912, "alphanum_fraction": 0.7307407259941101, "avg_line_length": 57.69565200805664, "blob_id": "1b0e4b40359ccb7c5d97ce5cf51051f5ee01f542", "content_id": "80dbe11a1fc8d1b374fa2920c50edc5483862f0e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2700, "license_type": "no_license", "max_line_length": 219, "num_lines": 46, "path": "/Azure/upload_code_to_azure_storage_account.py", "repo_name": "APAC-DevOps/iac", "src_encoding": "UTF-8", "text": "#this Python script uploads the contents in the directory where you execute this script onto your Azure storage account file share. It is location where you run this script that matters, not where you saved this script.\n\n#Prerequisite\n#---Python 3.x\n#---Azure SDK for Python 3.x\n#---A Storage Account in Your Azure Account\n\nfrom azure.storage.file import FileService\nfrom azure.storage.file import ContentSettings\nimport os\n\n\nomnipresence_storage_account_name = 'cloudinfraprovision'\nomnipresence_storage_account_key = 'WVIc4TiKPDLxjtIWLpnk5fITbI6AFoZahvfTz4SgSjyP+fE3/qwgSgIo/UNavXPPjQDWrCfT4da6vnL209pThQ=='\nomnipresence_storage_file_share = 'azure-provision' #Azure Storage Account File Share Name allows only lowercase letters, numbers and hypen.\nremote_dir_path = ''\n\n\n#Initialize an Azure Storage Account File Service Instance\nomnipresence_storage_account = FileService(account_name=omnipresence_storage_account_name, account_key=omnipresence_storage_account_key)\n\n#test if your storage file share exists on Azure or not, if not, create it\nif (not omnipresence_storage_account.exists(omnipresence_storage_file_share)):\n omnipresence_storage_account.create_share(omnipresence_storage_file_share, quota='10')\n\n#walk through current directory, make directorys under Azure File Share and upload local files onto your Azure storage account File Share except for hiden files and directory\nfor base_dir, dirs, file_names in os.walk(\".\", topdown=True):\n file_names = [ f for f in file_names if not f[0] == '.'] #parse out files whose name begins with a dot\n dirs[:] = [d for d in dirs if not d[0] == '.'] #parse out directorys whose name begins with a dot\n for local_file_name in file_names:\n remote_file_name = os.path.join(base_dir, local_file_name)[2:]\n local_file_name = remote_file_name\n if (omnipresence_storage_account.exists(omnipresence_storage_file_share)):\n omnipresence_storage_account.create_file_from_path(\n omnipresence_storage_file_share,\n None, # We want to create files under current remote directory, so we specify None for the directory_name\n remote_file_name,\n local_file_name,\n content_settings=ContentSettings(content_type='file'))\n print('Uploaded the file -', local_file_name, '\\n')\n\n for directory in dirs:\n remote_dir_path = os.path.join(base_dir, directory)[2:]\n if (not omnipresence_storage_account.exists(omnipresence_storage_file_share, directory_name=remote_dir_path)):\n omnipresence_storage_account.create_directory(omnipresence_storage_file_share, remote_dir_path, metadata=None, fail_on_exist=False, timeout=None)\n print('Created the remote folder -', os.path.join(base_dir,directory)[2:])\n" }, { "alpha_fraction": 0.6671226024627686, "alphanum_fraction": 0.6713113188743591, "avg_line_length": 40.04561233520508, "blob_id": "4f9b49d9c08740f484ee1bffba11d4a1b3bb15b0", "content_id": "6e44f324dfe86095ce708afb0107d152445d1f0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11698, "license_type": "no_license", "max_line_length": 232, "num_lines": 285, "path": "/Azure/100-vm/python/100-vm.py", "repo_name": "APAC-DevOps/iac", "src_encoding": "UTF-8", "text": "\"\"\"Create and manage virtual machines.\nThis script expects that the following environment vars are set:\nAZURE_TENANT_ID: your Azure Active Directory tenant id or domain\nAZURE_CLIENT_ID: your Azure Active Directory Application Client ID\nAZURE_CLIENT_SECRET: your Azure Active Directory Application Secret\nAZURE_SUBSCRIPTION_ID: your Azure Subscription Id\n\"\"\"\n\nimport os.path\nimport json\nimport argparse\nimport sys\nfrom azure.common.credentials import ServicePrincipalCredentials\nfrom azure.mgmt.resource import ResourceManagementClient\nfrom azure.mgmt.resource.resources.models import DeploymentMode\nfrom azure.mgmt.dns import DnsManagementClient\nfrom azure.mgmt.network import NetworkManagementClient\nfrom azure.mgmt.compute import ComputeManagementClient\nfrom azure.mgmt.authorization import AuthorizationManagementClient\nimport uuid\n\n# This python script was developed by Jianhua WU ([email protected]) for the\n# provisioing of Azure VM on a fresh Azure Subscription.\nparser = argparse.ArgumentParser(\n description='parse option arguments passed to this script',\n epilog='Author Jianhua WU',\n prefix_chars='-'\n )\n \nparser.add_argument('--DnsZone', nargs='?', default = 'openclouddevops.org', const = 'openclouddevops.org', choices=['openclouddevops.org'], help=\"your Azure DNS Zone which you intend to register your VM's DNS record\")\nparser.add_argument('--RgName', required = True, help='your desired Azure Resource Group Name') # the azure resource group for deployment e.g. cystine-ckm-dev-uscentral\nparser.add_argument('--VmSize', nargs='?', default = 'Standard_DS2_v2', const = 'Standard_DS2_v2', help='specify the Size of your Azure VM')\nparser.add_argument('--DnsPrefixName', required = True, help='specify a DNS prefix name for your VM') #e.g. ckm.ocs.dev.usc\nparser.add_argument('--NetworkPrefix', required = True, help='specify the first two octets of your vNet pool address') #e.g. ckm.ocs.dev.usc\nparser.add_argument('--Location', nargs='?', default = 'southeastasia', const = 'southeastasia', help='specify Azure location for resource deployment')\n\nparsed_argu = parser.parse_args(sys.argv[1:])\n\nwujianhua_subscription_id = os.environ['AZURE_SUBSCRIPTION_ID']\nwujianhua_dns_zone = parsed_argu.DnsZone\nwujianhua_resource_group = parsed_argu.RgName\n#wujianhua_dns_zone_resource_group = parsed_argu.RgDnsZone\nwujianhua_vmSize = parsed_argu.VmSize\nwujianhua_dns_prefix = parsed_argu.DnsPrefixName\nwujianhua_network_prefix = parsed_argu.NetworkPrefix\nwujianhua_location = parsed_argu.Location\n\n# Create Azure Service Principal Crendential for the Authentication of Automation Code Below\nAzureServicePrincipalcredentials = ServicePrincipalCredentials(\n client_id=os.environ['AZURE_CLIENT_ID'],\n secret=os.environ['AZURE_CLIENT_SECRET'],\n tenant=os.environ['AZURE_TENANT_ID']\n)\n\n# Create Azure Resource Management Client handler for Resource Provisioin\nclient = ResourceManagementClient(AzureServicePrincipalcredentials, wujianhua_subscription_id)\n\n\nmsg = \"\\nInitializing the provisioning of Azure VM with subscription id: {}, resource group: {}\" \\\n \"\\nand public key located at: {}...\\n\\n\"\nprint(msg.format(wujianhua_subscription_id, wujianhua_resource_group, wujianhua_location))\n\n\n# Azure uses Resoruce Group for cloud resource organization. Everything which is\n# going to be provisioined by this script would be located in the resource group\n# wujianhua_resource_group at the location wujianhua_location.\n# The value of the variables wujianhua_resource_group and wujianhua_location comes from the argParse above\n\ndef wujianhua_rg(resource_group='openCloudDevOps', location='southeastasia'):\n print(\"Beginning the provisioing of the resource group - {}\\n\\n\".format(resource_group))\n client.resource_groups.create_or_update(\n resource_group,\n {\n 'location': location\n }\n )\n print(\"the resource group - {} has been provisioned successfully\\n\\n\".format(resource_group))\n\n\ndef wujianhua_vNet(AzureServicePrincipalcredentials=AzureServicePrincipalcredentials, subscription_id=wujianhua_subscription_id, resource_group=wujianhua_resource_group, location=wujianhua_location):\n print(\"Beginning the provisioing of the virtual network - {}\\n\\n\".format(wujianhua_resource_group))\n network_client = NetworkManagementClient(\n AzureServicePrincipalcredentials,\n subscription_id\n )\n\n vnet_name = resource_group + 'vNet' # the name of virtual network\n\n async_virtual_network = network_client.virtual_networks.create_or_update(\n resource_group, # the name of resource group within which this resource would be provisioned\n vnet_name,\n {\n 'location': location,\n 'address_space': {\n 'address_prefixes': [wujianhua_network_prefix + '.0.0/16'] # Of course, you can parameterize the CIDR value of your Virtual Networks\n }\n }\n )\n\n async_virtual_network.wait()\n\n# create a common subnet for general purpose. This is not REQUIRED by Azure, however,\n# normally, you would create a subnet for general purpose in your vNet/vNets\n async_common_subnet = network_client.subnets.create_or_update(\n resource_group,\n vnet_name,\n \"wujianhuaCommon\", # the name of this subnet. You don't have to hard code this value\n {\n 'address_prefix': wujianhua_network_prefix + '.0.0/24'\n }\n )\n\n async_common_subnet.wait()\n\n# you VM will be in this subnet\n async_vm_subnet = network_client.subnets.create_or_update(\n resource_group, # the name of resource group within which this resource would be provisioned\n vnet_name, # the name of virtual network\n \"wujianhuaVM\", # the name of this subnet. we will put our VM in this subnet\n {\n 'address_prefix': wujianhua_network_prefix + '.1.0/24'\n }\n )\n async_vm_subnet.wait()\n\n# in a setup where there is Azure Application Gateway in front of your VM, then, in general, we would\n# create a seperated vNet subnet for accomendating Application Gateway resource\n async_appGateway_subnet = network_client.subnets.create_or_update(\n resource_group,\n vnet_name,\n \"wujianhuaAppGateway\",\n {\n 'address_prefix': wujianhua_network_prefix + '.2.0/24'\n }\n )\n async_appGateway_subnet.wait()\n\n# this is a special subnet called GatewaySubnet. It has to be named this way.\n# For detail, pls refer to Microsoft's online Azure documentation at:\n# https://docs.microsoft.com/en-us/office365/enterprise/designing-networking-for-microsoft-azure-iaas#step-5-determine-the-subnets-within-the-vnet-and-the-address-spaces-assigned-to-each\n async_gateway_subnet = network_client.subnets.create_or_update(\n resource_group,\n vnet_name,\n \"GatewaySubnet\",\n {\n 'address_prefix': wujianhua_network_prefix + '.3.0/24'\n }\n )\n async_gateway_subnet.wait()\n\n# get the id of the subnet for virtual machines\n vm_subnet_id = async_vm_subnet.result().id\n return vm_subnet_id\n\n\ndef wujianhua_NetworkInt(AzureServicePrincipalcredentials=AzureServicePrincipalcredentials, subscription_id=wujianhua_subscription_id, resource_group=wujianhua_resource_group, location=wujianhua_location, vm_subnet_id=None):\n # provision a network interface for VM.\n print(\"Beginning the provisioing of the network interface - {}\\n\\n\".format(wujianhua_resource_group))\n network_client = NetworkManagementClient(\n AzureServicePrincipalcredentials,\n subscription_id\n )\n\n public_ip_address_name = \"wujianhuaPubIPAddress\" # the name of public IP address\n\n async_public_ip_address = network_client.public_ip_addresses.create_or_update(\n resource_group,\n public_ip_address_name,\n {\n \"location\": location,\n \"sku\": {\n \"name\": \"Basic\"\n },\n \"public_ip_allocation_method\": \"Dynamic\",\n \"public_ip_address_version\": \"IPv4\"\n\n }\n )\n\n async_public_ip_address.wait()\n\n publicIPAddress = network_client.public_ip_addresses.get(\n resource_group,\n public_ip_address_name,\n )\n\n async_network_interface = network_client.network_interfaces.create_or_update(\n resource_group,\n 'wujianhuaint',\n {\n \"location\": location,\n \"ip_configurations\": [{\n \"name\": \"wujianhua\",\n \"subnet\": {\n \"id\": vm_subnet_id\n },\n \"public_ip_address\": publicIPAddress\n }]\n }\n )\n\n async_network_interface.wait()\n\n network_int_id = async_network_interface.result().id\n return network_int_id\n\n\ndef wujianhua_VM(AzureServicePrincipalcredentials=AzureServicePrincipalcredentials, subscription_id=wujianhua_subscription_id, resource_group=wujianhua_resource_group, location=wujianhua_location, network_int_id=None, vm_size=None):\n print(\"Beginning the provisioing of the virtual machine in the resource group - {}\\n\\n\".format(wujianhua_resource_group))\n print(\"the id of network interface is:\" + network_int_id)\n compute_client = ComputeManagementClient(\n AzureServicePrincipalcredentials,\n wujianhua_subscription_id\n )\n\n async_vm = compute_client.virtual_machines.create_or_update(\n resource_group, # the name of resource group within which this resource would be provisioned\n \"wujianhua\" + \"-openCloudDevops\", # the name of your virtual machines\n {\n 'location': location,\n 'tags': { 'Name':'wujianhuaVM' },\n 'hardware_profile': {\n 'vm_size': vm_size\n },\n 'storage_profile': {\n \"image_reference\": {\n \"publisher\": \"Canonical\",\n \"offer\": \"UbuntuServer\",\n \"sku\": \"18.04-LTS\",\n \"version\": \"latest\"\n },\n 'os_disk': {\n 'name': \"wujianhuaVM-managed-disk\",\n \"caching\": \"ReadWrite\",\n \"create_option\": \"FromImage\",\n 'disk_size_gb': \"100\",\n \"managed_disk\": {\n \"storage_account_type\": \"Premium_LRS\"\n }\n }\n },\n \"os_profile\": {\n \"computer_name\": \"wujianhuavm\",\n \"admin_username\": \"Af123987123987\",\n \"admin_password\": \"Af123987123987\"\n },\n \"network_profile\": {\n \"network_interfaces\": [ {\n \"id\": network_int_id\n } ]\n }\n }\n )\n\n async_vm.wait()\n\nif __name__ == \"__main__\":\n wujianhua_rg(\n resource_group = wujianhua_resource_group,\n location = wujianhua_location\n )\n\n vm_subnet_id = wujianhua_vNet(\n AzureServicePrincipalcredentials = AzureServicePrincipalcredentials,\n subscription_id = wujianhua_subscription_id,\n resource_group = wujianhua_resource_group,\n location = wujianhua_location\n )\n\n network_int_id = wujianhua_NetworkInt(\n AzureServicePrincipalcredentials = AzureServicePrincipalcredentials,\n subscription_id = wujianhua_subscription_id,\n resource_group = wujianhua_resource_group,\n location = wujianhua_location,\n vm_subnet_id = vm_subnet_id\n )\n\n wujianhua_VM(\n AzureServicePrincipalcredentials = AzureServicePrincipalcredentials,\n subscription_id = wujianhua_subscription_id,\n resource_group = wujianhua_resource_group,\n location = wujianhua_location,\n network_int_id = network_int_id,\n vm_size = wujianhua_vmSize\n )\n" }, { "alpha_fraction": 0.8013493418693542, "alphanum_fraction": 0.808845579624176, "avg_line_length": 110.16666412353516, "blob_id": "f052e7ffc500e263b9ca0a3b396814b48b836b05", "content_id": "4878f52a2db25c9877b4e9a684683ad2ac5fb0b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1334, "license_type": "no_license", "max_line_length": 623, "num_lines": 12, "path": "/README.md", "repo_name": "APAC-DevOps/iac", "src_encoding": "UTF-8", "text": "# AzureIaaC\nSample Code for Managing Azure Cloud via Infrastructure As Code\n\nAuthor: Jianhua Wu\nInitial Publish Date: Aprial 06th, 2018\n\nThe purpose of repo is for sharing code at commercial level which can be adopted into your business environment instantly without much effort. While third party tools like Ansible, Terraform and alike tastes good as sugar, their limitation in terms of supporting public cloud Infrastructure, and lack of documentation to tell you their limitation, has destroyed many lovely weekends of many DevOps Engineers and Software Developers. Hence, here comes this project.\n\nCode in this repo is purely clear text file based. You will meet programs written in Python and Javascripts, scripts written in bash, and ARM templates in JSON format. There is no SECRET, nothing is hidden away from you. Code here is not just code, they are self sufficient tools for addressing computer engineers' daily work, and for helping business development. They can be run from your local computer's terminal, and they can be easily integrated into your CI/CD tools like Jenkins or Bamboo, or even into your own programs. Code here is free to public access, and it can be used in your commercial environment freely.\n\n\n1. 100-vm - this subfolder contains the python code and ARM templates for provisioning Azure VM from scratch\n" } ]
4
RicHz13/PythonExercices
https://github.com/RicHz13/PythonExercices
a71d33992f75e315a921749fb3689010cc2704b1
36d0800e82f4902ca9385095fd405ca779ed3405
0f967e799e21e88eb7b2d4a08723a88f14a7cba0
refs/heads/master
2021-02-05T22:05:50.264411
2020-03-09T18:40:32
2020-03-09T18:40:32
243,840,156
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5921052694320679, "alphanum_fraction": 0.6907894611358643, "avg_line_length": 18.125, "blob_id": "97229baa4560845268e663ba7c81048d2bb1dcf8", "content_id": "2fb2f2217cf353a6241c0bac02995a896d21db38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 152, "license_type": "no_license", "max_line_length": 31, "num_lines": 8, "path": "/C16separarCadenasDeTexto.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "string1 = 'wework'\n\nprint(string1[0:4])\nprint(string1[2:])\nprint(string1[:3])\nprint(string1[0:6:2])\nprint(string1[::-1])\n#print(string1[start:end:step])" }, { "alpha_fraction": 0.7082152962684631, "alphanum_fraction": 0.7082152962684631, "avg_line_length": 30.909090042114258, "blob_id": "0284e2c89bee4e0210b5e6823aebd142a2a01ca9", "content_id": "7f94d39d7f76757623b759d88b04cd36b300c700", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 357, "license_type": "no_license", "max_line_length": 86, "num_lines": 11, "path": "/C34ManejoDeArchivos.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#En Python también podemos leer y escribir archivos del sistema con la función \"open\".\n#Existen varios modos en los que podemos manejar archivos\n\n # r = leer\n # w = escribir\n # a = añadir\n # r+ = leer y escribir\n\n#el keyword \"with\" nos permite manejar el contexto al trabajar con archivos\n\n#se debe cerrar el archivo con el método \"close\"\n\n\n" }, { "alpha_fraction": 0.7875264286994934, "alphanum_fraction": 0.7917547821998596, "avg_line_length": 46.29999923706055, "blob_id": "4030edd2a7cf5abaca7e751419f4bb047fe81bdd", "content_id": "4a5cc4f94ac87ecf068cb45db138d67cc06180d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 963, "license_type": "no_license", "max_line_length": 214, "num_lines": 20, "path": "/C15manejoDeStrings.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Estos son algunos de los métodos para trabajar con strings\n#upper cambia a mayuscular toda la cadena\n#isupper verifica si la cadena está en mayúscula\n#lower cambia a minusculas toda la cadena\n#islower verifica si la cadena está en minúscula\n#find busca el índice en una palabra (número del caracter)\n#isdigit verifica si toda la cadena tiene sólo dígitos\n#endswith verifica si termina con un caracter/palabra/oración en específico\n#startwith verifica si empieza con un caracter/palabra/oración en específico\n#split corta las palabras de la cadena en palabras (si no se le mandan parametros), si se mandan, espera un letra y un número, la letra indica donde se cortará la cadena y el número en cuantas secciones se cortará)\n#join Junta/une los valores de lista-tupla-diccionario-string con un separador.\n\n\nstring = 'wework'\nprint(string[1])\nprint(len(string))\n#print[len(string) - 1]\n\nstring2 = \"Betsy tiene problemas con el SAT\"\nstring2.split()\n" }, { "alpha_fraction": 0.7189409136772156, "alphanum_fraction": 0.7474541664123535, "avg_line_length": 34.14285659790039, "blob_id": "35879caee80f0773b7384f7b60cad4fec85d9f77", "content_id": "c94ee2266835f1d209f54e7197f944211467c286", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 498, "license_type": "no_license", "max_line_length": 100, "num_lines": 14, "path": "/C32ComprensionDiccionario.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Pytohn permite crear e inicializar listas y diccionarios con una sintanxis más natural\n#Convierte una secuecia en otra\n#En resumén sirve para evitar declarar funciones que se pueden realizar en una declaración, haciendo\n#que el código sea más legible.\n#Esto es conocido como Sintactic Sugar o Azúcar Sintáctico\n\npares = [x for x in range(100) if x % 2 == 0]\nprint(pares)\n\nnones = [x for x in range(100) if x % 2 != 0]\nprint(nones)\n\ncuadrados = {x: x**2 for x in range(100)}\nprint(cuadrados)" }, { "alpha_fraction": 0.6401816010475159, "alphanum_fraction": 0.660612940788269, "avg_line_length": 25.66666603088379, "blob_id": "ee0bb1e2a60df1eee91f1b5d7950b1ffb837d73d", "content_id": "bc6b7873f9f1e87de6be5170dbc3705d4688bc79", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 887, "license_type": "no_license", "max_line_length": 116, "num_lines": 33, "path": "/C17ciclosFor.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#range nos permite generar un rango a partir de un número. Al igual que el corte de las cadenas (ver notas clase 16)\n#podemos decir de a partir de que número, hasta que número y cada cuanto), en donde no son obligatorios todos los\n#parámetros\n\na = range(3, 30, 3)\na = list(a)\nprint(a)\n\nfor i in range(5):\n print(i)\n\n#for loop\n #se puede utilizar para recorrer strings (una string es una secuencia)\n #Se necesita el keyword in\n #Si se requiere salir antes de una iteración se utiliza el keyword \"break\"\n #Si se requiere pasar a la siguiente iteración se utiliza el keyword \"continue\"\n\nstring = 'ricardo'\n\nfor letter in string:\n print(letter)\n\nfor r in range(30): #ejemplo de \"continue\"\n if r % 3 != 0:\n continue\n else:\n print(r**2)\n\nfor b in range(30): #ejemplo de \"break\"\n if b % 3 == 0:\n print(b)\n elif b == 22:\n break\n\n" }, { "alpha_fraction": 0.6920199394226074, "alphanum_fraction": 0.6932668089866638, "avg_line_length": 29.884614944458008, "blob_id": "67306a05d7e23c8080d519e2dcb0c4a0537213ce", "content_id": "8b522a06852224993eb6d82883e79842c1d1392d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 815, "license_type": "no_license", "max_line_length": 96, "num_lines": 26, "path": "/C38Decoradores.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Un decorador es una función que recibe otra función y regresa una 3er función\n#Para reconocer un decorador, puedes ver que tiene un arroba sobre la declaración de una función\n#Útil para definir si una función debe ejecutarse o no.\n #Por ejemplo:\n #En servidores web, existen ciertas funciones que nada más deben ejecutarse si un usuario\n #se encuentra autenticado\n#NOTA: buscar más información de docoradores\n\ndef protected(func):\n def wrapper(password):\n if password == 'platzi':\n return func()\n else:\n print('La contraseña es incorrecta')\n \n return wrapper\n\n\n@protected\ndef protected_func():\n print('Tu contraseña es correcta')\n\nif __name__ == '__main__':\n password = str(input('Ingresa tu contraseña: '))\n\n protected_func(password)" }, { "alpha_fraction": 0.5753012299537659, "alphanum_fraction": 0.5873494148254395, "avg_line_length": 14.136363983154297, "blob_id": "d2eaea0d5c1a461e600530521a521841cdb85edc", "content_id": "3481be49c707abfd6a55a59a6263397d20eefe9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 336, "license_type": "no_license", "max_line_length": 43, "num_lines": 22, "path": "/estructurasCondicionales.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#operadores relacionales\n# == es igual a\n# != es diferente a\n# > mayor que\n# >= mayor igual que\n# < menor que\n# <= menor igual que\n\n#Operadores lógicos\n# and\n# or\n# not\n\n\ndef say_hello(age):\n age = int(input('¿Cuál es tu edad?: '))\n if age > 18:\n print('hola adulto')\n else:\n print('hola niño')\n\nsay_hello(45)" }, { "alpha_fraction": 0.6693306565284729, "alphanum_fraction": 0.6853147149085999, "avg_line_length": 40.70833206176758, "blob_id": "a36c82d8e5cbff5c2468ff3ee37672dcdb405c03", "content_id": "7eec64c088c9077d3ce930bd72b8a1af68600e9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1014, "license_type": "no_license", "max_line_length": 99, "num_lines": 24, "path": "/C33ManejoDeErrores.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Cuando se avienta (throw) un error, si el error no se \"atrapa\", entonces el programa se detiene\n#hay veces que queremos evitar este comportamiento porque sabemos como arreglar el error.\n#Para manejar el error se utilizan los keywords \"try/except\"\n#Consta de 4 partes:\n # 1 Try: es el código que se tratará de ejecutar y que estará protegido si se produce un error.\n # 2 Except: Es el código que se ejecutará si el código del \"try\" genera un error.\n # 3 Else: Si no se produce un error en el \"try\", la ejecución del código seguirá adelante\n # 4 Finally: Se ejecutará sin importar si se produce o no un error.\n\ncoutries = {\n 'mexico': 122,\n 'colombia': 49,\n 'argentina': 43,\n 'chile': 18,\n 'peru': 31\n}\n\nwhile True:\n country = str(input('Escribe el nombre de un país: ')).lower()\n\n try:\n print('La población de {} es: {} millones'.format(country, coutries[country]))\n except KeyError:\n print('No tenemos el dato de la población de {}'.format(country))\n" }, { "alpha_fraction": 0.657306969165802, "alphanum_fraction": 0.6975111961364746, "avg_line_length": 25.133333206176758, "blob_id": "eff3852dd81586f3054f44842864a1fc08f35a5b", "content_id": "2db8e12427148638576bf5df4595f4ff58a0b95d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1580, "license_type": "no_license", "max_line_length": 107, "num_lines": 60, "path": "/C21operacionesConListas.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#las operaciones con listas son similares a las que se pueden hacer con números\n#Por ejemplo sumar listas:\nmi_lista_1 = [1, 2, 3, 4]\nmi_lista_2 = [5, 6, 7]\nmi_lista_3 = mi_lista_1 + mi_lista_2\n\nprint('Esta es la unión de las listas 1 y 2: ', mi_lista_3)\n\n#multiplicación de listas\nmi_lista_4 = ['a', 'b']\nprint('esta es una multiplicación de la lista 4', mi_lista_4 * 5)\n\n#rebanar listas\n\nprint('Este es un pedazo de lista ingresando los índices deseados y salto que queremos', mi_lista_3[2:5:1])\n\n#reemplazo de valor en una lista\nmi_lista_3[6] = 9\n\nprint('aquí reemplazamos el valor del índice 6, cambiando de 7 a 9: ', mi_lista_3)\n\n#Eliminar el último elemento de una lista\n\nmi_lista_3 = mi_lista_3.pop()\n\nprint('Eliminamos el último elemento de la lista: ', mi_lista_3)\n\n#ordenar una lista con sorted\n\nmi_lista_5 = [5, 3, 7, 2, 4, 1, 6]\n\nmi_lista_5 = sorted(mi_lista_5)\n\nprint('lista ordenada con la función sorted: ', mi_lista_5)\n\n#ampliar una lista\n\nmi_lista_6 = ['ab', 'cd', 'ef']\n\nmi_lista_7 = [10, 11, 12, 13]\n\nmi_lista_6.extend(mi_lista_7)\n\nprint('lista 3 extendida con la lista 5: ', mi_lista_6)\n\n#eliminar un elemento de la lista\n\ndel mi_lista_6[0]\n\nprint('Borramos el índice 0 de la lista, que correspondía a \"ab\": ', mi_lista_6)\n\n#una cadena de texto se puede convertir una lista\n\npersona = 'betsy'\nlist_betsy = list(persona)\nprint('creamos una lista a partir de una cadena de texto:', list_betsy)\n\n#recostruir cadena de texto a partir de una lista\nrec_word = ''.join(list_betsy)\nprint('recostrucción de la cadena de texto a partir la lista: ', rec_word)" }, { "alpha_fraction": 0.6191646456718445, "alphanum_fraction": 0.6265356540679932, "avg_line_length": 17.545454025268555, "blob_id": "bff2a47381967005507f586ea7a070d6a944790d", "content_id": "be6cffaab55a3c9be1e0014e263f39cc94356a32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 408, "license_type": "no_license", "max_line_length": 47, "num_lines": 22, "path": "/claseFunciones.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "import turtle\ndef main():\n window = turtle.Screen()\n ricardo = turtle.Turtle()\n\n make_square(ricardo)\n turtle.mainloop()\n\ndef make_square(ricardo):\n length = int(input('Tamaño de cuadrado: '))\n\n for i in range(4):\n make_line_and_turn(ricardo, length)\n\ndef make_line_and_turn(ricardo, length):\n ricardo.forward(length)\n ricardo.left(90)\n\n\n\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.7985672354698181, "alphanum_fraction": 0.7985672354698181, "avg_line_length": 93.87999725341797, "blob_id": "cd77a641afbfe117282db1b125c85cb3906330e0", "content_id": "815bfa44451eb387f74f999a251b32e4b22174cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2405, "license_type": "no_license", "max_line_length": 310, "num_lines": 25, "path": "/C36ProgramacionOrientadaObjetos.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Programación orientada a objetos\n#Permite definir tipos propios\n#Permite manejar datos y lógica en un solo contenedor\n#Las clases son como fábricas (moldes) para crear otros objetos\n#los objetos tiene atributos que se puede definir al momento de \"inicializar\" un nuevo objeto o directamente\n#en la estancia\n#las clases pueden tener varibales de clase, variables de instancia y variables locales\n#Aunque Python no tiene un concepto de varibales privadas integrado al lenguaje, es práctica común\n#definirlas con un guíon bajo\n#\"isinstance\" y \"hasattr\"\n#Los métodos son como funciones que tienen sentido únicamente en el contexto de una clase\n#Al igual que las varibles, los métodos privados se definen con un guión bajo\n#La \"ENCAPSULACIÓN\" es un concepto clave de la progración orientada a objetos.\n #la practica de aplicar este principio es declarar todas las variables y métodos como privados,\n # a menos que sea necesario exponerlos a otros programadores\n#Un método clave que casi todas las clases deben tener es \"__init__\"\n# Otro es \"__str__\" \n#Existen varios tipos de métodos, estáticos, de clase, de instancia, getters y setters\n #CLASE: Plantilla o modelo a partir de la cual podemos crear objetos y donde se determinan los atributos (características) y métodos (acciones realizables) que tendrán.\n #OBJETO: Normalmente creado a partir de una clase son entidades que almacenan información en forma de argumento para atributos y realizan acciones organizadas a través de métodos que permiten cambiar atributos o estados del objeto o de otros objetos de la clase.\n #ATRIBUTOS: Son variables de diferentes tipos (Entero; Texto; Booleanos) que pueden tener valores por defecto o le podrán ser asignadas al momento de la Instancia de un objeto determinando sus características y estado.\n #MÉTODOS: Son funciones; acciones realizables por el objeto que permiten cambiar sus atributos o el de otros objetos de la clase permitiendo un cambio de estado del objeto; a menudo requiriendo «argumentos (valores)» para determinados «parámetros (nombres de variables establecidas en funciones o rutinas).\n #INSTANCIA: Palabra que refiere a la creación de un objeto partiendo de una clase. Durante la instancia son asignados los valores iniciales para los atributos.\n\n#NOTA: BUSCAR DIFERENCIAS ENTRE MÉTODOS ESTÁTICOS, DE CLASE, DE INSTANCIA, GETTERS Y SETTERS\n\n" }, { "alpha_fraction": 0.742671012878418, "alphanum_fraction": 0.7557003498077393, "avg_line_length": 76, "blob_id": "8c1c474b2edb6099aece3c970907a59db5a758c7", "content_id": "3f562bb34e1bd35f61df0d50231a2fbc7a22b77b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 309, "license_type": "no_license", "max_line_length": 91, "num_lines": 4, "path": "/C12comparacionDeStringsYunicode.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#las cadenas de texto son inmutables en python, no se les puede reasignar un valor.\n#Para hacerlo, debemos asignar una nueva variable\n#en python 3, las strings las toma ya con UNICODE, en python 2 se toman con ASCII\n#para que python 2 tome UNICODE hay que poner esta línea de código: # -*- coding: utf-8 -*-" }, { "alpha_fraction": 0.6218905448913574, "alphanum_fraction": 0.646766185760498, "avg_line_length": 31.200000762939453, "blob_id": "101b3b96eaf410b20c9d7d5c5764be1d9a05d1e5", "content_id": "7863beaed6157ff0e853dfca68fdac7cf1fdf3a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 808, "license_type": "no_license", "max_line_length": 83, "num_lines": 25, "path": "/C20introduccionAlistas.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Una lista es una secuencia de elementos\n#Cuando se asigna a una variable, permite agrupar varios elementos en un solo lugar\n#Se crean con los corchetes [] o con la keyword list\n#Ejemplos:\n #supermercado = ['apio', 'tomate', 'queso']\n\n#Acceso a elementos:\n#Se puede acceder a los elementos de una lista con su índice.\n#Los índices se comienzan a contar desde 0\n #supermercado = ['apio', 'tomate', 'queso']\n #índices 0 1 2\n #supermercado[0] nos traería 'apio'\n\ndef average_temps(temps):\n sum_of_temps = 0\n for temp in temps:\n sum_of_temps += temp\n\n return sum_of_temps / len(temps)\n\nif __name__ == '__main__':\n temps = [21, 26, 25, 20, 30, 22, 24]\n\n average = average_temps(temps)\n print('El promedio de las temperaturas es: {}'.format(average))" }, { "alpha_fraction": 0.6049222946166992, "alphanum_fraction": 0.6599740982055664, "avg_line_length": 34.1136360168457, "blob_id": "a5a895d72c8426bef24c8addf6fb0fae06fc47b0", "content_id": "48d254fb151eb621130afbeb2ba55bc6501f7306", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1560, "license_type": "no_license", "max_line_length": 117, "num_lines": 44, "path": "/C25C26BusquedaBinaria.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Con el algoritmo de búsqueda binaria partimos de la lista ordenada, nosotros sabemos que hay números mayores\n#y menores que el que estamos buscando.\n#Seleccionamos un número aleatorio para dividir la lista, puedes escoger cualquier número\n#en éste caso sumanos el primer y el último índice de la lista, los sumamos y dividimos en dos (por eso se llama\n# binario), luego compraramos el número que está en el índice, de esta manera ya eliminamos la mitad de las opciones.\n#Podemos continuar dividiendo la lista y comparando hasta que lleguemos al resultado esperado\n\n#Implementación CLASE 26\n\ndef binary_search(numbers, number_to_find, low, high):\n if low > high:\n return False\n\n mid = (low + high) // 2\n\n if numbers[mid] == number_to_find:\n return True\n elif numbers[mid] > number_to_find:\n return binary_search(numbers, number_to_find, low, mid - 1)\n else:\n return binary_search(numbers, number_to_find, mid + 1, high)\n\n\n\nif __name__ == '__main__':\n #numbers = [1, 3, 4, 5, 6 , 9, 10, 11, 25, 27, 28, 34, 36, 49, 51]\n\n numbers = [9, 3, 78, 15, 98, 100, 34, 423, 16, 88, 2, 77, 1000]\n\n numbers.sort()\n\n #numbers_des = [9, 3, 78, 15, 98, 100, 34, 423, 16, 88, 2, 77, 1000]\n\n #numbers = sorted(numbers_des)\n\n number_to_find = int(input('ingresa un número: '))\n\n result = binary_search(numbers, number_to_find, 0, len(numbers) - 1)\n\n if result is True:\n print('El número si está en la lista'),\n print(numbers)\n else:\n print('El número no está en la lista')" }, { "alpha_fraction": 0.6342412233352661, "alphanum_fraction": 0.6750972867012024, "avg_line_length": 21.39130401611328, "blob_id": "037265fd5040680ac17d447b5d0c103c05391328", "content_id": "a2345151b48ef4afc1cbdb575b3483b6073bd9b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 518, "license_type": "no_license", "max_line_length": 84, "num_lines": 23, "path": "/C31UsoDeSets.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#Los sets son muy similares a las listas, pero estas no permiten elementos repetidos\n\n#las operaciones principales son la unión, intersección y diferencia\n\n#unión\ns = set([1, 2, 3])\nt = set([3, 4, 5])\nprint(s.union(t))\n\n#intersección\ns = set([1, 2, 3])\nt = set([3, 4, 5])\nprint(s.intersection(t))\n\n#diferencia\ns = set([1, 2, 3])\nt = set([3, 4, 5])\nprint(s.difference(t)) #que elementos tiene s que no tiene t\nprint(t.difference(s)) #que elementos tiene t que no tiene s\n\n1 in s #True\n1 not in t #True\n1 in t #False" }, { "alpha_fraction": 0.6385068893432617, "alphanum_fraction": 0.6463654041290283, "avg_line_length": 24.5, "blob_id": "d9ffec0684824061a4125d79496578b953aa97c5", "content_id": "fd58ac51ca7dafb9237723cccbc8e324031698e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 510, "license_type": "no_license", "max_line_length": 92, "num_lines": 20, "path": "/claseFuncionesConParametros.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "def foreign_exchange_calculator(ammount):\n mex_to_us_rate = 19.08\n\n return ammount / mex_to_us_rate\n\ndef run():\n print('Calculadora de divisas')\n print('Convierte pesos mexicanos a dolares')\n print('')\n\n ammount = float(input('Ingresa la cantidad de pesos mexicanos que quieres convertir: '))\n\n result = foreign_exchange_calculator(ammount)\n\n print('${} pesos mexicanos son ${} dólares'.format(ammount, result))\n print('')\n\nif __name__ == '__main__':\n run()\n print('Final {}')" }, { "alpha_fraction": 0.7429760694503784, "alphanum_fraction": 0.7502601742744446, "avg_line_length": 30, "blob_id": "3f628bf39948e740bf32591480d3df4e25849bc2", "content_id": "7fba6a517862a3d8a93a7361a24beeac417576a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 961, "license_type": "no_license", "max_line_length": 100, "num_lines": 31, "path": "/C27Diccionarios.py", "repo_name": "RicHz13/PythonExercices", "src_encoding": "UTF-8", "text": "#se pueden recorrer por llave, valor y ambos.\n\n#ejemplos\nmi_diccionario = {}\nmi_diccionario['primer_elemento'] = 'Hola'\nmi_diccionario['segundo_elemento'] = 'Adios'\nprint (mi_diccionario['primer_elemento'])\n\ncalificaciones = {}\ncalificaciones['algoritmos'] = 9\ncalificaciones['historia'] = 10\ncalificaciones['calculo_integral'] = 9\ncalificaciones['informatica'] = 7\ncalificaciones['bases de datos'] = 6\n\nfor key in calificaciones: #itera/recorre el diccionario, regresando las llaves\n print(key)\n\nfor value in calificaciones.values(): #itera/recorre el diccionario, regresando los valores\n print(value)\n\nfor key, value in calificaciones.items(): #itera/recorre el diccionario, regresando llaves y valores\n print('llave: {}, valor {}'.format(key,value))\n\nsuma_de_calificaciones = 0\nfor calificacion in calificaciones.values():\n suma_de_calificaciones += calificacion\n\npromedio = suma_de_calificaciones / len(calificaciones.values())\n\nprint(promedio)\n" } ]
17
12161003677/Projeto-Metodologias-de-Pesquisa
https://github.com/12161003677/Projeto-Metodologias-de-Pesquisa
f37651019311d41f40058b271aef41a534205ac8
1faab27d43f82720383a4fb5610fe57d36cb530a
9fee259ca5d9d49e5b492c2a743d31f61ad04de7
refs/heads/master
2020-07-24T00:29:07.821844
2020-05-01T00:11:39
2020-05-01T00:11:39
207,748,398
3
0
null
null
null
null
null
[ { "alpha_fraction": 0.6867196559906006, "alphanum_fraction": 0.6992054581642151, "avg_line_length": 37.21739196777344, "blob_id": "6a7706a1385026c411b83bbc4432487ae0d26a37", "content_id": "d54f0c527ad9c0945b9eb41f01be09eeb7604a72", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 881, "license_type": "no_license", "max_line_length": 74, "num_lines": 23, "path": "/core/models.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\n\nclass Pet(models.Model):\n city = models.CharField(max_length=100, null=True)\n bedrooms = models.IntegerField(default=0)\n toilets = models.IntegerField(default=0)\n peoples = models.IntegerField(default=0)\n price = models.DecimalField(max_digits=8, decimal_places=2, default=0)\n description = models.TextField(null=True)\n phone = models.CharField(max_length=13, null=True)\n email = models.EmailField(null=True)\n begin_date = models.DateField(auto_now_add=True,blank=True, null=True)\n photo = models.ImageField(upload_to='pet', null=True)\n active = models.BooleanField(default=True)\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n\n def __str__(self):\n return str(self.id)\n\n class Meta:\n verbose_name_plural = 'Pets'\n db_table = 'pet'\n\n\n" }, { "alpha_fraction": 0.4864864945411682, "alphanum_fraction": 0.6824324131011963, "avg_line_length": 15.44444465637207, "blob_id": "bff683933a2e809943b7b66be5e0caf652ca85f4", "content_id": "5181d9fcf40ed4620a68fe7f885997bc51b30e43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 148, "license_type": "no_license", "max_line_length": 22, "num_lines": 9, "path": "/requirements-dev.txt", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "dj-database-url==0.5.0\ndj-static==0.0.6\nDjango==2.2.2\nPillow==6.1.0\nPyMySQL==0.9.3\npython-decouple==3.1\npytz==2019.1\nsqlparse==0.3.0\nstatic3==0.7.0\n" }, { "alpha_fraction": 0.5025641322135925, "alphanum_fraction": 0.5324786305427551, "avg_line_length": 26.85714340209961, "blob_id": "b41093b369bf9c9ae60a5e0a4d2646a829fa4a58", "content_id": "57ca1eea96be35b5b0bfa8f9af8ec06d0cf02170", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1170, "license_type": "no_license", "max_line_length": 82, "num_lines": 42, "path": "/core/migrations/0003_auto_20190911_0406.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.2 on 2019-09-11 07:06\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0002_auto_20190626_1548'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='pet',\n name='bedrooms',\n field=models.IntegerField(default=11),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='pet',\n name='peoples',\n field=models.IntegerField(default=''),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='pet',\n name='price',\n field=models.DecimalField(decimal_places=2, default='', max_digits=8),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='pet',\n name='toilets',\n field=models.IntegerField(default=''),\n preserve_default=False,\n ),\n migrations.AlterField(\n model_name='pet',\n name='begin_date',\n field=models.DateField(auto_now_add=True, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.49355217814445496, "alphanum_fraction": 0.5369284749031067, "avg_line_length": 24.84848403930664, "blob_id": "ec6060ea2287a07d16a58f800afe11aeddb8a510", "content_id": "26a3605df3d0dd02a95c728b8ed181eadff282a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 853, "license_type": "no_license", "max_line_length": 81, "num_lines": 33, "path": "/core/migrations/0006_auto_20190911_0415.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.2 on 2019-09-11 07:15\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0005_auto_20190911_0414'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='pet',\n name='bedrooms',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='pet',\n name='peoples',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='pet',\n name='price',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=8),\n ),\n migrations.AddField(\n model_name='pet',\n name='toilets',\n field=models.IntegerField(default=0),\n ),\n ]\n" }, { "alpha_fraction": 0.7020202279090881, "alphanum_fraction": 0.7020202279090881, "avg_line_length": 27.428571701049805, "blob_id": "40aadd8e4588aeb93c546b484f5faa43a5014e26", "content_id": "605defcb82975ca9922d29597a0f6267114b0f85", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 84, "num_lines": 7, "path": "/core/admin.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom .models import Pet\n\n\[email protected](Pet)\nclass PetAdmin(admin.ModelAdmin):\n list_display = ['id', 'city', 'user', 'bedrooms', 'toilets', 'peoples', 'price']" }, { "alpha_fraction": 0.7331786751747131, "alphanum_fraction": 0.7494199275970459, "avg_line_length": 52.875, "blob_id": "37de13dc863aa4f1cb2362da5b7bd96f7358ee61", "content_id": "73f40fbe5478bd9b9df2f53228bdb64c8e16b21e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 438, "license_type": "no_license", "max_line_length": 124, "num_lines": 8, "path": "/README.md", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "# Projeto-Metodologias-de-Pesquisa\n<h3>Projeto referente à disciplina de Metodologias de Pesquisa do 2º período da Computação no Campus Muzambinho em 2019</h3>\n<p> - Backend em Python utilizando o framework Django</p>\n<p> - Frontend utilizando framework Bootstrap</p>\n<p> - Arquivos de mídia hospedados no Cloudinary</p>\n<p> - Controle de vesões Git</p>\n<p> - Site hospedado no Heroku</p>\n<p>Demo: http://moraki.herokuapp.com/</p>\n" }, { "alpha_fraction": 0.6058527827262878, "alphanum_fraction": 0.6150227189064026, "avg_line_length": 61.38308334350586, "blob_id": "d8bf084328bb761d82af239f695be95e6f3b4ff0", "content_id": "963bd764bb2a9a3acc2b7fa04183f10d272d11ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 12639, "license_type": "no_license", "max_line_length": 299, "num_lines": 201, "path": "/core/templates/template/sobre.html", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "\n <section class=\"probootstrap-slider flexslider probootstrap-inner\">\n <ul class=\"slides\">\n <li style=\"background-image: url(<?php echo base_url(); ?>assets/img/slider_1.jpg);\" class=\"overlay\">\n <div class=\"container\">\n <div class=\"row\">\n <div class=\"col-md-10 col-md-offset-1\">\n <div class=\"probootstrap-slider-text text-center\">\n <p><img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/curve_white.svg\" class=\"seperator probootstrap-animate\" alt=\"Free HTML5 Bootstrap Template\"></p>\n <h1 class=\"probootstrap-heading probootstrap-animate\"><?php echo $tema ?></h1>\n <div class=\"probootstrap-animate probootstrap-sub-wrap\">Conheça a nossa história, nossos propósitos e nossos serviços.</div>\n </div>\n </div>\n </div>\n </div>\n </li>\n </ul>\n </section>\n \n <section class=\"probootstrap-section\">\n <div class=\"container\">\n <div class=\"row mb30\">\n <div class=\"col-md-12\">\n <figure>\n <img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/slider_2.jpg\" alt=\"Free HTML5 Bootstrap Template by uicookies.com\" class=\"img-responsive\">\n </figure>\n </div>\n </div>\n <div class=\"row\">\n <div class=\"col-md-6\">\n <p>A Atlantis surgiu com o propósito de oferecer o que há de melhor em termos de qualidade no ramo de hospedagem. Sendo assim conta com uma equipe profissionalizada e capacitada para lhe atender à qualquer hora em qualquer lugar.</p>\n\n <p>Contamos com parcerias em todo o mundo, pois ao longo de sua tragetória a Atlantis sempre esteve procipada com o bom gosto e a qualidade de seus serviços, deixando assim uma ótima imagem por onde esteva e que passou a sempre estar.</p>\n </div>\n <div class=\"col-md-6\">\n <p>Assim evoluimos e acreditamos que a satisfação do cliente nãpo é um mero conceito, mas sim uma filosofia de empresa. Não perca tem e entre em contato conosco, queremos o seu melhor e praa isso nos capacitamos. Agora só depende de você!</p>\n </div>\n </div>\n </div>\n </section>\n\n <section class=\"probootstrap-section probootstrap-section-dark\">\n <div class=\"container\">\n <div class=\"row\">\n <div class=\"col-md-12 text-center\">\n <h2 class=\"mt0\">Porquê Escolher-nos?</h2>\n <p class=\"mb50\"><img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/curve.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template\"></p>\n </div>\n <div class=\"col-md-4\">\n <div class=\"service left-icon left-icon-sm probootstrap-animate\">\n <div class=\"icon\">\n <i class=\"icon-check\"></i>\n </div> \n <div class=\"text\">\n <h3>Milhares de Quartos e Hotéis</h3>\n <p>Seja para onde quer que você vá, pode se sentir em casa com a Atlantis. Confira centenas de hotéis e milhares de quartos em cada cantinho do globo!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"service left-icon left-icon-sm probootstrap-animate\">\n <div class=\"icon\">\n <i class=\"icon-check\"></i>\n </div>\n <div class=\"text\">\n <h3>Comidas &amp; Bebidas</h3>\n <p>Viajar é experimentar coisas novas inclusive a culinária. Então confira o que preparamos para você em cada lugar que quiser estar. Não perca esta chance!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div> \n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"service left-icon left-icon-sm probootstrap-animate\">\n <div class=\"icon\">\n <i class=\"icon-check\"></i>\n </div>\n <div class=\"text\">\n <h3>Aeroporto Taxi</h3>\n <p>Comodidade e segurança são sempre prioridades. Inclua em suas reservas um motorista para te levar a qualeur lugar quando quiser!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div> \n </div>\n </div>\n </div>\n </div>\n </section>\n \n <section class=\"probootstrap-section\">\n <div class=\"container\">\n <div class=\"row\">\n <div class=\"col-md-8 col-md-offset-2 mb50 text-center probootstrap-animate\">\n <h2 class=\"mt0\">Mais Recursos</h2>\n <p class=\"mb30\"><img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/curve.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template\"></p>\n </div>\n </div>\n <div class=\"row\">\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-heart2\"></i> <span>Destinos Mais Visitados</span></h3>\n <p>Confira os destinos mais procurados e concorridos. Acessando nosso blog você estará em contato com pessoas fazendo uso dos nossos serviços netse momento. Confira o que estão fazendo nos destinos mais requisitados.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-rocket\"></i> <span>Viaje Conosco</span></h3>\n <p>Além do ramo das hospedagens a Atlantis também organiza viajens para diversos pontos turísticos do mundo. Viajando conosco você conta com descontos especiais na hora da hospedagem. não perca tempo e venha conferir.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"clearfix visible-sm-block\"></div>\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-image\"></i> <span>Redistre Seus Melhores Momentos</span></h3>\n <p>A Atlantis não quer que você perca nenhuma das suas melhores lembranças e por isso oferece um dos melhores serviços de filmagem e fotografia. assim você leva para casa seus melhores momentos registrados de pertinho por nós.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"clearfix visible-lg-block visible-md-block\"></div>\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-briefcase\"></i> <span>Serviço de Bagagens</span></h3>\n <p>Para sua maior segurança e comodidade contamos com um serviço especializado no trensporte de bagagens. Assim você pode levar em segurança seus objetos mais delicados e até seus pets. Fle conosco e confira.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"clearfix visible-sm-block\"></div>\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-chat\"></i> <span>Forum do Site</span></h3>\n <p>Para tirar suas duvidas além de poder falar conosco diretamente através do formulário contamos com um forum onde clientes assíduos, funcionários e colaboradores estão à sua disposição. Fale conosco agora mesmo.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"col-md-4 col-sm-6 col-xs-12 probootstrap-animate\">\n <h3 class=\"heading-with-icon\"><i class=\"icon-colours\"></i> <span>Presentes para a Família</span></h3>\n <p>Para que todos se sintam felizes a Atlantis tem um sistema de presentear seus clientes com lembranças referentes aos pontos turísticos visitados. Assim você pode presentear toda a família quando voltar.</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n <div class=\"clearfix visible-lg-block visible-md-block visible-sm-block\"></div>\n </div>\n </div>\n </section>\n \n <section class=\"probootstrap-section\">\n <div class=\"container\">\n <div class=\"row mb30\">\n <div class=\"col-md-8 col-md-offset-2 probootstrap-section-heading text-center\">\n <h2>Explore Nossos Serviços</h2>\n <p class=\"lead\">Viajar é colecionar memórias sobre o mundo. Não perca tempo, venha conhecer o que há de melhor no ramo turístico.</p>\n <p><img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/curve.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template\"></p>\n </div>\n </div>\n <div class=\"row\">\n <div class=\"col-md-4\">\n <div class=\"service left-icon probootstrap-animate\">\n <div class=\"icon\">\n <img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/flaticon/svg/001-building.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template by uicookies.com\">\n </div>\n <div class=\"text\">\n <h3>Milhares de Quartos e Hotéis</h3>\n <p>Seja para onde quer que você vá, pode se sentir em casa com a Atlantis. Confira centenas de hotéis e milhares de quartos em cada cantinho do globo!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div> \n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"service left-icon probootstrap-animate\">\n <div class=\"icon\">\n <img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/flaticon/svg/003-restaurant.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template by uicookies.com\">\n </div>\n <div class=\"text\">\n <h3>Comidas &amp; Bebidas</h3>\n <p>Viajar é experimentar coisas novas inclusive a culinária. Então confira o que preparamos para você em cada lugar que quiser estar!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div> \n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"service left-icon probootstrap-animate\">\n <div class=\"icon\">\n <img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/flaticon/svg/004-parking.svg\" class=\"svg\" alt=\"Free HTML5 Bootstrap Template by uicookies.com\">\n </div>\n <div class=\"text\">\n <h3>Aeroporto Taxi</h3>\n <p>Comodidade e segurança são sempre prioridades. Inclua em suas reservas um motorista para te levar a qualeur lugar quando quiser!</p>\n <p><a href=\"#\" class=\"link-with-icon\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div> \n </div>\n </div>\n </div>\n </div>\n </section>\n\n <section class=\"probootstrap-half\">\n <div class=\"image\" style=\"background-image: url(<?php echo base_url(); ?>assets/img/slider_4.jpg);\"></div>\n <div class=\"text\">\n <div class=\"probootstrap-animate fadeInUp probootstrap-animated\">\n <h2 class=\"mt0\">Melhor Hotel 5 estrelas</h2>\n <p><img alt=\"A imagem não foi carregada corretamente.\" src=\"<?php echo base_url(); ?>assets/img/curve_white.svg\" class=\"seperator\" alt=\"Free HTML5 Bootstrap Template\"></p>\n <div class=\"row\">\n <div class=\"col-md-6\">\n <p>Pesquise a rede de hotel de sua preferência e encontre um quarto de hotel que seja perfeito para você. Encontre desde hotéis all inclusive em Portugal até hotéis baratos para hospedagens em grandes eventos e que oferecem economia!</p>\n </div>\n <div class=\"col-md-6\"><p>Use os nossos mapas para encontrar e reservar uma hospedagem nos melhores hotéis, seja de frente para o mar, em um resort ou pousada e ainda, em um destino montanhoso. Temos hotéis fazenda e chalés. Compare os seguintes benefícios antes de sua reserva.</p> \n </div>\n </div>\n <p><a href=\"#\" class=\"link-with-icon white\">Ler Mais <i class=\" icon-chevron-right\"></i></a></p>\n </div>\n </div>\n </section>" }, { "alpha_fraction": 0.6955530047416687, "alphanum_fraction": 0.7001140117645264, "avg_line_length": 44, "blob_id": "626585f0d139f12dd7a860f8fa9d7a37bb5dba24", "content_id": "1d806a26cc7b0b74fdf44c466ba4708555e5ba3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1755, "license_type": "no_license", "max_line_length": 145, "num_lines": 39, "path": "/sospet/urls.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "\"\"\"sospet URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.2/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.contrib import admin\nfrom django.urls import path\nfrom core.views import login_user, submit_login, logout_user, index, list_all_pets, list_user_pets, pet_detail, pet_register, set_pet, pet_delete\nfrom django.views.generic import RedirectView\nfrom django.contrib.staticfiles.urls import static, staticfiles_urlpatterns #para as imagens . Não sei porque\nfrom . import settings\n\nurlpatterns = [\n path('admin/', admin.site.urls),\n path('login/', login_user, name=\"login\"),\n path('login/submit', submit_login),\n path('login/', logout_user, name=\"logout\"),\n path('', RedirectView.as_view(url='pet/all/')),\n path('pet/all/', list_all_pets, name=\"all_pets\"),\n path('pet/user/', list_user_pets, name=\"user_pets\"),\n path('pet/detail/<id>/', pet_detail, name=\"pet_detail\"),\n path('pet/register/', pet_register, name=\"pet_register\"),\n path('pet/delete/<id>', pet_delete, name=\"pet_delete\"),\n path('pet/register/submit', set_pet),\n path('index/', index, name=\"index\"),\n]\n\nurlpatterns += staticfiles_urlpatterns()\nurlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)" }, { "alpha_fraction": 0.4942667484283447, "alphanum_fraction": 0.5214242339134216, "avg_line_length": 27.568965911865234, "blob_id": "5935a2984c08aded797933238fbb895434f8522b", "content_id": "f72676da884fbfac6d5f4a9cbf83d27c1f264402", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1657, "license_type": "no_license", "max_line_length": 81, "num_lines": 58, "path": "/core/migrations/0004_auto_20190911_0408.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.2 on 2019-09-11 07:08\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0003_auto_20190911_0406'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='pet',\n name='bedrooms',\n field=models.IntegerField(default=0),\n ),\n migrations.AlterField(\n model_name='pet',\n name='city',\n field=models.CharField(max_length=100, null=True),\n ),\n migrations.AlterField(\n model_name='pet',\n name='description',\n field=models.TextField(null=True),\n ),\n migrations.AlterField(\n model_name='pet',\n name='email',\n field=models.EmailField(max_length=254, null=True),\n ),\n migrations.AlterField(\n model_name='pet',\n name='peoples',\n field=models.IntegerField(default=0),\n ),\n migrations.AlterField(\n model_name='pet',\n name='phone',\n field=models.CharField(max_length=13, null=True),\n ),\n migrations.AlterField(\n model_name='pet',\n name='photo',\n field=models.ImageField(null=True, upload_to='pet'),\n ),\n migrations.AlterField(\n model_name='pet',\n name='price',\n field=models.DecimalField(decimal_places=2, default=0, max_digits=8),\n ),\n migrations.AlterField(\n model_name='pet',\n name='toilets',\n field=models.IntegerField(default=0),\n ),\n ]\n" }, { "alpha_fraction": 0.4560000002384186, "alphanum_fraction": 0.5055999755859375, "avg_line_length": 20.55172348022461, "blob_id": "33e9c4f9e9f7bd0ab56a77a331dfca8d2d6a9077", "content_id": "16a34e24abc127fbe7c1b8926aa00b3a2450d889", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 47, "num_lines": 29, "path": "/core/migrations/0005_auto_20190911_0414.py", "repo_name": "12161003677/Projeto-Metodologias-de-Pesquisa", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.2 on 2019-09-11 07:14\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0004_auto_20190911_0408'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='pet',\n name='bedrooms',\n ),\n migrations.RemoveField(\n model_name='pet',\n name='peoples',\n ),\n migrations.RemoveField(\n model_name='pet',\n name='price',\n ),\n migrations.RemoveField(\n model_name='pet',\n name='toilets',\n ),\n ]\n" } ]
10
mapjohns/pdsnd_github
https://github.com/mapjohns/pdsnd_github
8016893247ea6e56fe22eaea6401178752c10128
c6e11c05ebe0fafdd1900b769f81ce9fcb4a115b
5483d9c7a61c32431b061fd5159d5152bd2688ac
refs/heads/master
2020-04-04T08:55:27.432446
2018-11-03T23:16:43
2018-11-03T23:16:43
154,742,674
0
0
null
2018-10-25T21:56:45
2018-08-22T19:30:47
2018-10-18T00:40:48
null
[ { "alpha_fraction": 0.6187356114387512, "alphanum_fraction": 0.6221332550048828, "avg_line_length": 37.330230712890625, "blob_id": "1211b25ddafd680537771bef69d8bcb0c00d22b9", "content_id": "a9bc67182259df10b8364c3beacbbf9f6d4ee64b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8241, "license_type": "no_license", "max_line_length": 184, "num_lines": 215, "path": "/bikeshare.py", "repo_name": "mapjohns/pdsnd_github", "src_encoding": "UTF-8", "text": "import time\nimport pandas as pd\n\nCITY_DATA= { 'chicago': 'chicago.csv',\n 'new york city': 'new_york_city.csv',\n 'washington': 'washington.csv' }\n\n\ndef get_filters():\n \"\"\"\n Asks user to specify a city, month, and day to analyze.\n \"\"\"\n\n print('Hello! Let\\'s explore some US bikeshare data!')\n\n #Gets user input for city (chicago, new york city, washington) and uses a 'while' loop to handle invalid inputs.\n city = input(\"Would you like to see data for Chicago, New York City, or Washington?:\").title()\n while city not in ('Chicago', 'New York City', 'Washington'):\n print(\"Please enter a valid answer\")\n city = input(\"Would you like to see data for Chicago, New York City, or Washington?:\").title()\n\n #Gets user input for month (all, january, february, ... , june) and uses a 'while' loop to handle invalid inputs.\n month = input(\"Which month would you like to see data for? Or type 'all' to see data for all months:\").title()\n while month not in ('January', 'February','March','April','May','June','July','All'):\n print(\"Answer not valid, please try again\")\n month = input(\"Which month would you like to see data for? Or type 'all' to see data for all months:\").title()\n\n\n #Gets user input for day of week (all, monday, tuesday, ... sunday) and uses a 'while' loop to handle invalid inputs.\n day = input(\"Which day would you like to see data for? Or type 'all' for every day of the week:\").title()\n while day not in ('Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday','All'):\n print(\"Please enter a valid answer\")\n day = input(\"Which day would you like to see data for? Or type 'all' for every day of the week:\").title()\n\n print('-'*40)\n return city, month, day\n\ndef load_data(city, month, day):\n \"\"\"\n Loads data for the specified city and filters by month and day if applicable.\n \"\"\"\n month = month.lower()\n day = day.lower()\n\n #Loads CSV based on the city, result returned in df.\n df = pd.read_csv(CITY_DATA[city.lower()])\n\n #Converts string date into datetime format.\n df['Start Time'] = pd.to_datetime(df['Start Time'])\n #Extracts month, day of week, and start hour from Start Time to create new columns.\n df['month'] = df['Start Time'].dt.month\n df['day_of_week'] = df['Start Time'].dt.weekday_name\n df['start_hour'] = df['Start Time'].dt.hour\n\n #Filters by month.\n if month != 'all':\n months = ['january', 'february', 'march', 'april', 'may', 'june']\n month = months.index(month) + 1\n\n #Filters by month to create the new dataframe.\n df = df[df['month'] == month]\n\n #Filters by day of week if applicable.\n if day != 'all':\n #Filters by day of week to create the new dataframe.\n df = df[df['day_of_week'] == day.title()]\n return df\n\ndef time_stats(df):\n \"\"\"Displays statistics on the most frequent times of travel.\"\"\"\n\n print('\\nCalculating The Most Frequent Times of Travel...\\n')\n #Starts tracking the time to process the function.\n start_time = time.time()\n\n #Display the most common month.\n popular_month = df['month'].mode()[0]\n print('The most common month is {}'.format(popular_month))\n\n #Displays the most common day of week.\n common_dow = df['day_of_week'].mode()[0]\n print('The most common day of the week is {}'.format(common_dow))\n\n #Displays the most common start hour.\n common_start_hour = df['start_hour'].mode()[0]\n print('The most common start hour is {}'.format(common_start_hour))\n\n #Prints out the time the function took to be processed.\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)\n\n\ndef station_stats(df):\n \"\"\"Displays statistics on the most popular stations and trip.\"\"\"\n\n print('\\nCalculating The Most Popular Stations and Trip...\\n')\n #Starts tracking the time to process the function.\n start_time = time.time()\n\n #Most frequently used start station.\n most_start_station = df['Start Station'].mode()[0]\n print('The most commonly used start station is: {}'.format(most_start_station))\n\n #Most frequently used end station.\n most_end_station = df['End Station'].mode()[0]\n print('The most commonly used end stations is: {}'.format(most_end_station))\n\n #Most frequently used combination of start and end station.\n df['Start and End Station'] = '\\nTo start at' + ' ' + df['Start Station'] + ' ' + '\\nAnd end at'+ ' ' + df['End Station']\n combined_station = df['Start and End Station'].mode()[0]\n print('The most popular travel combination is: {}'.format(combined_station))\n\n #Prints out the time the function took to be processed.\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)\n\n\ndef trip_duration_stats(df):\n \"\"\"Displays statistics on the total and average trip duration.\"\"\"\n\n print('\\nCalculating Trip Duration...\\n')\n start_time = time.time()\n\n #Takes the sum of the trip duration.\n total_travel = df['Trip Duration'].sum()\n print('Total trip duration for this is: {}'.format(float(total_travel)))\n\n #Calculates the average of the trip duration.\n mean_travel = df['Trip Duration'].mean()\n print('Average trip duration is: {}'.format(float(mean_travel)))\n\n #Prints out the time the function took to be processed.\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)\n\n\ndef user_stats(df):\n \"\"\"Displays statistics on bikeshare users.\"\"\"\n\n print('\\nCalculating User Stats...\\n')\n start_time = time.time()\n\n #Takes a count of and groups the user types together.\n user_count = df['User Type'].value_counts()\n print('Total counts for user types are:\\n{}'.format(user_count))\n\n #Catches if Gender is not present in the CSV file\n try:\n #Takes a count of and groups the genders together.\n gender_count = df['Gender'].value_counts()\n print('\\nTotal gender counts are:\\n{}'.format(gender_count))\n except KeyError:\n pass\n\n #Catches if birth year is not present in the CSV file.\n try:\n\n #Takes the minimum, maximum, and most common Birth Year.\n earliest_birth_year = df['Birth Year'].min()\n recent_birth_year = df['Birth Year'].max()\n common_birth_year = df['Birth Year'].mode()[0]\n print('\\nThe earliest, recent, and most common birth years are {}, {}, and {} respectively.'.format(int(earliest_birth_year),int(recent_birth_year),int(common_birth_year)))\n except KeyError:\n pass\n\n print(\"\\nThis took %s seconds.\" % (time.time() - start_time))\n print('-'*40)\n\ndef raw_data(df):\n #Defined variables used for a while loop that takes additional rows depending on user input.\n i = 5\n v = 10\n ldf = len(df)\n #Loops until answer matches 'yes' or 'no'.\n userinput = input('Would you like to see the raw data? (yes/no)').lower()\n while userinput not in('yes','no'):\n print(\"Please enter 'yes' or 'no'\")\n userinput = input('Would you like to see the raw data? (yes/no)').lower()\n #If answer in loop was 'yes', it prints the first 5 rows of data.\n if userinput in('yes'):\n print(df.iloc[0:5,:])\n #Prompts user if they want 5 more rows of data and loops until they answer 'no'.\n while v <= ldf:\n more_rows = input('Would you like to see an additional 5 rows of data? (yes/no)').lower()\n if more_rows in('yes'):\n print(df.iloc[i:v,:])\n i+=5\n v+=5\n else:\n if more_rows in ('no'):\n break\n else:\n print('Please enter a valid input')\n\n\ndef main():\n #This calls all the declared functions.\n while True:\n city, month, day = get_filters()\n df = load_data(city, month, day)\n\n time_stats(df)\n station_stats(df)\n trip_duration_stats(df)\n user_stats(df)\n raw_data(df)\n\n #Prompts the user if they want to run this again.\n restart = input('\\nWould you like to restart? Enter yes or no.\\n')\n if restart.lower() != 'yes':\n break\n\n\nif __name__ == \"__main__\":\n\tmain()\n" }, { "alpha_fraction": 0.7119565010070801, "alphanum_fraction": 0.73097825050354, "avg_line_length": 29.66666603088379, "blob_id": "3a3d89709e93fabf4b4920e1901fc4913242c612", "content_id": "a0a3ba5592f7391d96d2f15e9afc384db354c320", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 368, "license_type": "no_license", "max_line_length": 104, "num_lines": 12, "path": "/README.md", "repo_name": "mapjohns/pdsnd_github", "src_encoding": "UTF-8", "text": "# Github Project On Bikeshare Project\n***Project Started On 11/01/18***\n\n### What's Included:\nA python :snake: project that allows a user to pull data on city transportation depending on user input.\n\n\n### Files Included:\nREADME.md :book:, Bikeshare.py :bike:, gitignore file with csv datasets :rocket:\n\n\n##### Credit to @rbUdacProjects for the forked repository. :+1:\n" } ]
2
aliceson89/PythonWebScarapper
https://github.com/aliceson89/PythonWebScarapper
f44abf2a8b704d70bc99d7ec8dfd65be4db26d75
3fe39b34a2327d5abe612958adbe737f9b3fb1fa
62910654dfde5786067150f320378120c358644a
refs/heads/master
2023-02-17T11:31:41.227444
2021-01-18T19:59:53
2021-01-18T19:59:53
330,773,507
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6117179989814758, "alphanum_fraction": 0.6330685019493103, "avg_line_length": 34.96428680419922, "blob_id": "c152d97c903d92dafe4e4e2f6400b284d2dc1f57", "content_id": "20ae66c037625ce46066edd522f24b8097300594", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2014, "license_type": "no_license", "max_line_length": 314, "num_lines": 56, "path": "/so.py", "repo_name": "aliceson89/PythonWebScarapper", "src_encoding": "UTF-8", "text": "import requests\nfrom bs4 import BeautifulSoup\nURL = f\"https://stackoverflow.com/jobs?q=python\"\n\n\ndef get_last_page():\n result = requests.get(URL)\n soup = BeautifulSoup(result.text, \"html.parser\")\n #<a href=\"/jobs?q=python&amp;so_source=JobSearch&amp;so_medium=Internal\" title=\"page 1 of 15\" class=\"s-pagination--item is-selected\">\n pages = soup.find(\"div\", {\"class\": \"s-pagination\"}).find_all(\"a\")\n #print(pages)\n #strip = True : remove whitespace\n last_page = pages[-2].get_text(strip=True)\n return int(last_page)\n\n\ndef extract_job(html):\n #<h2 class=\"mb4 fc-black-800 fs-body3\">\n #<a href=\"/jobs/185876/senior-software-engineer-frontend-deepfield-networks?a=10kTLndJTsqc&amp;so=i&amp;pg=2&amp;offset=0&amp;total=361&amp;so_medium=Internal&amp;so_source=JobSearch&amp;q=python\" title=\"Senior Software Engineer (Frontend)\" class=\"s-link stretched-link\">Senior Software Engineer (Frontend)</a>\n #</h2>\n title = html.find(\"h2\", {\"class\": \"mb4\"}).find(\"a\")[\"title\"]\n #print(title)\n #<h3 class=\"fc-black-700 fs-body1 mb4\">\n # <span>Deepfield Networks</span>\n # <span class=\"fc-black-500\">Ann Arbor, MI</span>\n #</h3>\n #what is the meaning of recrusive??\n company, location = html.find(\"h3\", {\n \"class\": \"mb4\"\n }).find_all(\n \"span\", recrusive=False)\n print(company.get_text(strip=True), location.get_text(strip=True))\n\n return {'title': title}\n\n\ndef extract_jobs(last_page):\n jobs = []\n for page in range(last_page):\n print(page + 1)\n result = requests.get(f\"{URL}&pg={page+1}\")\n #print(result.status_code)\n #<div class=\"grid--cell fl1 \">\n soup = BeautifulSoup(result.text, \"html.parser\")\n results = soup.find_all(\"div\", {\"class\": \"grid--cell fl1\"})\n\n for result in results:\n #print(result[\"data-jobid\"])\n job = extract_job(result)\n jobs.append(job)\n\n\ndef get_jobs():\n last_page = get_last_page()\n jobs = extract_jobs(last_page)\n return jobs\n" } ]
1
PhilMano/qgis-newraptor
https://github.com/PhilMano/qgis-newraptor
d5144ef1183839d9b72e9eaecd4f007445893779
a272bac88f6963226f401c62182b6766a91a84a4
72662297d1fe8e5810a38e350145b3eee110e45c
refs/heads/main
2023-03-25T09:44:27.595130
2021-03-16T05:15:01
2021-03-16T05:15:01
346,917,990
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.7758620977401733, "alphanum_fraction": 0.7758620977401733, "avg_line_length": 28, "blob_id": "f4dd2e4a4a2375bf57b294edb1e2a2001f914e89", "content_id": "66086b1ff77d6fffff0dea49ecd4a14fa38c5f75", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 58, "license_type": "permissive", "max_line_length": 40, "num_lines": 2, "path": "/README.md", "repo_name": "PhilMano/qgis-newraptor", "src_encoding": "UTF-8", "text": "# qgis-newraptor\nAdd a new raptor to the Dj Basin Project\n" }, { "alpha_fraction": 0.5743626356124878, "alphanum_fraction": 0.577399492263794, "avg_line_length": 38.21943664550781, "blob_id": "c4410d95a6a7159ef272b84abf05c33a276b6811", "content_id": "4818e6b0eb5672ea16835bece38dcd9761a645ea", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12527, "license_type": "permissive", "max_line_length": 122, "num_lines": 319, "path": "/new_raptor.py", "repo_name": "PhilMano/qgis-newraptor", "src_encoding": "UTF-8", "text": " # -*- coding: utf-8 -*-\n\"\"\"\n/***************************************************************************\n NewRaptor\n A QGIS plugin\n Add a new raptor nest, creat buffer, and impact table\n Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/\n -------------------\n begin : 2021-03-12\n git sha : $Format:%H$\n copyright : (C) 2021 by UEMA\n email : [email protected]\n ***************************************************************************/\n\n/***************************************************************************\n * *\n * This program is free software; you can redistribute it and/or modify *\n * it under the terms of the GNU General Public License as published by *\n * the Free Software Foundation; either version 2 of the License, or *\n * (at your option) any later version. *\n * *\n ***************************************************************************/\n\"\"\"\nfrom qgis.PyQt.QtCore import QSettings, QTranslator, QCoreApplication, QDate\nfrom qgis.PyQt.QtGui import QIcon\nfrom qgis.PyQt.QtWidgets import QAction, QMessageBox, QTableWidgetItem\n\nfrom qgis.core import QgsProject, QgsFeature, QgsGeometry, QgsPoint\n\n# Initialize Qt resources from file resources.py\nfrom .resources import *\n# Import the code for the dialog\nfrom .new_raptor_dialog import NewRaptorDialog\nfrom .impact_table import DlgTable\nimport os.path\n\n\nclass NewRaptor:\n \"\"\"QGIS Plugin Implementation.\"\"\"\n\n def __init__(self, iface):\n \"\"\"Constructor.\n\n :param iface: An interface instance that will be passed to this class\n which provides the hook by which you can manipulate the QGIS\n application at run time.\n :type iface: QgsInterface\n \"\"\"\n # Save reference to the QGIS interface\n self.iface = iface\n # initialize plugin directory\n self.plugin_dir = os.path.dirname(__file__)\n # initialize locale\n locale = QSettings().value('locale/userLocale')[0:2]\n locale_path = os.path.join(\n self.plugin_dir,\n 'i18n',\n 'NewRaptor_{}.qm'.format(locale))\n\n if os.path.exists(locale_path):\n self.translator = QTranslator()\n self.translator.load(locale_path)\n QCoreApplication.installTranslator(self.translator)\n\n # Declare instance attributes\n self.actions = []\n self.menu = self.tr(u'&Add New Raptor')\n\n # Check if plugin was started the first time in current QGIS session\n # Must be set in initGui() to survive plugin reloads\n self.first_start = None\n\n # noinspection PyMethodMayBeStatic\n def tr(self, message):\n \"\"\"Get the translation for a string using Qt translation API.\n\n We implement this ourselves since we do not inherit QObject.\n\n :param message: String for translation.\n :type message: str, QString\n\n :returns: Translated version of message.\n :rtype: QString\n \"\"\"\n # noinspection PyTypeChecker,PyArgumentList,PyCallByClass\n return QCoreApplication.translate('NewRaptor', message)\n\n\n def add_action(\n self,\n icon_path,\n text,\n callback,\n enabled_flag=True,\n add_to_menu=True,\n add_to_toolbar=True,\n status_tip=None,\n whats_this=None,\n parent=None):\n \"\"\"Add a toolbar icon to the toolbar.\n\n :param icon_path: Path to the icon for this action. Can be a resource\n path (e.g. ':/plugins/foo/bar.png') or a normal file system path.\n :type icon_path: str\n\n :param text: Text that should be shown in menu items for this action.\n :type text: str\n\n :param callback: Function to be called when the action is triggered.\n :type callback: function\n\n :param enabled_flag: A flag indicating if the action should be enabled\n by default. Defaults to True.\n :type enabled_flag: bool\n\n :param add_to_menu: Flag indicating whether the action should also\n be added to the menu. Defaults to True.\n :type add_to_menu: bool\n\n :param add_to_toolbar: Flag indicating whether the action should also\n be added to the toolbar. Defaults to True.\n :type add_to_toolbar: bool\n\n :param status_tip: Optional text to show in a popup when mouse pointer\n hovers over the action.\n :type status_tip: str\n\n :param parent: Parent widget for the new action. Defaults None.\n :type parent: QWidget\n\n :param whats_this: Optional text to show in the status bar when the\n mouse pointer hovers over the action.\n\n :returns: The action that was created. Note that the action is also\n added to self.actions list.\n :rtype: QAction\n \"\"\"\n\n icon = QIcon(icon_path)\n action = QAction(icon, text, parent)\n action.triggered.connect(callback)\n action.setEnabled(enabled_flag)\n\n if status_tip is not None:\n action.setStatusTip(status_tip)\n\n if whats_this is not None:\n action.setWhatsThis(whats_this)\n\n if add_to_toolbar:\n # Adds plugin icon to Plugins toolbar\n self.iface.addToolBarIcon(action)\n\n if add_to_menu:\n self.iface.addPluginToVectorMenu(\n self.menu,\n action)\n\n self.actions.append(action)\n\n return action\n\n def initGui(self):\n \"\"\"Create the menu entries and toolbar icons inside the QGIS GUI.\"\"\"\n\n icon_path = ':/plugins/new_raptor/icon.png'\n self.add_action(\n icon_path,\n text=self.tr(u'Add New Raptor nest'),\n callback=self.run,\n parent=self.iface.mainWindow())\n\n # will be set False in run()\n self.first_start = True\n\n\n def unload(self):\n \"\"\"Removes the plugin menu item and icon from QGIS GUI.\"\"\"\n for action in self.actions:\n self.iface.removePluginVectorMenu(\n self.tr(u'&Add New Raptor'),\n action)\n self.iface.removeToolBarIcon(action)\n\n\n def run(self):\n \"\"\"Run method that performs all the real work\"\"\"\n\n # Create the dialog with elements (after translation) and keep reference\n # Only create GUI ONCE in callback, so that it will only load when the plugin is started\n if self.first_start == True:\n self.first_start = False\n self.dlg = NewRaptorDialog()\n\n #\n self.dlg.cmbSpecies.currentTextChanged.connect(self.evt_cmbSpecies_changed)\n\n\n mc = self.iface.mapCanvas()\n\n self.dlg.spbLatitude.setValue(mc.center().y())\n self.dlg.spbLongitude.setValue(mc.center().x())\n self.dlg.dteLast.setDate(QDate.currentDate())\n\n\n # Lista de camada do mapa\n map_layers = []\n for lyr in mc.layers():\n map_layers.append(lyr.name())\n\n # Coloca o nome dos layrs em uma caixa de mensagem, com o titulo camadas\n #QMessageBox.information(self.dlg, \"Layers\", str(map_layers))\n\n # cria uma caixa de mensagem caso não achar alguams das layrs descritas abaixo\n missing_layers = []\n if not 'Raptor Nests' in map_layers:\n missing_layers.append('Raptor Nests')\n if not 'Raptor Buffer' in map_layers:\n missing_layers.append('Raptor Buffer')\n if not 'Linear Buffer' in map_layers:\n missing_layers.append('Linear Buffer')\n\n if missing_layers:\n msg = 'The following layres are missing from ths project\\n'\n for lyr in missing_layers:\n msg += f'\\n {lyr}'\n QMessageBox.critical(self.dlg, 'Missing layers', msg)\n # o return barra a execução das demais linhas de códigos\n return\n\n\n # show the dialog\n self.dlg.show()\n # Run the dialog event loop\n result = self.dlg.exec_()\n # See if OK was pressed\n if result:\n # Do something useful here - delete the line containing pass and\n # substitute with your code.\n\n # irá retornar uma lista com o as camdas com nome Raptor Nests, mas iremos pegar apenas a primeira camadd\n lyrNests = QgsProject.instance().mapLayersByName('Raptor Nests')[0]\n lyrBuffer = QgsProject.instance().mapLayersByName('Raptor Buffer')[0]\n lyrLinear = QgsProject.instance().mapLayersByName('Linear Buffer')[0]\n # Irá pegar o ultimo valor de índice\n idxNestID = lyrNests.fields().indexOf('Nest_ID')\n valNestID = lyrNests.maximumValue(idxNestID) + 1\n\n # Irá pegar os valores que foram digitados na caixa de texto\n valLat = self.dlg.spbLatitude.value()\n valLng = self.dlg.spbLongitude.value()\n valSpecies = self.dlg.cmbSpecies.currentText()\n valBuffer = self.dlg.spbBuffer.value()\n valStatus = self.dlg.cmbStatus.currentText()\n valLast = self.dlg.dteLast.date()\n QMessageBox.information(self.dlg, 'Message', f'New Nest ID: {valNestID}\\n \\nLatitude: {valLat} '\n f'\\nLongitude: {valLng} \\nSpecies: {valSpecies} '\n f'\\nBuffer: {valBuffer} \\nStatus: {valStatus} \\nLast: {valLast}')\n # cria um novo objeto da classe QgsFeature, no qual a classe terá os mesmoa campos do lyrnet.fildes\n # (objeto da Layer Raptor Nests)\n ftrNest = QgsFeature(lyrNests.fields())\n # Irá começar a adicoanr atributos de acordo com o nome dos campos da layer Raptor Nests\n ftrNest.setAttribute('lat_y_dd', valLat)\n ftrNest.setAttribute('long_x_dd', valLng)\n ftrNest.setAttribute('recentspec', valSpecies)\n ftrNest.setAttribute('buf_dist', valBuffer)\n ftrNest.setAttribute('recentstat', valStatus)\n ftrNest.setAttribute('lastsurvey', valLast)\n ftrNest.setAttribute('Nest_ID', valNestID)\n\n # colcoando o tipo de geometria\n geom = QgsGeometry(QgsPoint(valLng, valLat))\n ftrNest.setGeometry(geom)\n\n # Cada tipo de dados pode ter um provedor diferente, o qgis tem extensões para varios tipos de provedor,\n # a função abaixo pega o provedor e inclui os dados de acordo com o proveor\n pr = lyrNests.dataProvider()\n pr.addFeatures([ftrNest])\n lyrNests.reload()\n\n # Adiconando a camada de buffer\n pr = lyrBuffer.dataProvider()\n buffer = geom.buffer(valBuffer, 10)\n ftrNest.setGeometry(buffer)\n pr.addFeatures([ftrNest])\n lyrBuffer.reload()\n\n dlgTable = DlgTable()\n dlgTable.setWindowTitle(f\"Impacts Table for Nest {valNestID}\")\n\n # Find linear projects that will impacted and report then in the table\n bb = buffer.boundingBox()\n linears = lyrLinear.getFeatures(bb)\n for linear in linears:\n valID = linear.attribute('Project')\n valType = linear.attribute('type')\n valDistance = linear.geometry().distance(geom)\n if valDistance < valBuffer:\n #populate table with Linear data\n row = dlgTable.tblImpacts.rowCount()\n dlgTable.tblImpacts.insertRow(row)\n dlgTable.tblImpacts.setItem(row, 0, QTableWidgetItem(str(valID)))\n dlgTable.tblImpacts.setItem(row, 1, QTableWidgetItem(valType))\n twi = QTableWidgetItem(f\"{valDistance:4.5f}\")\n twi.setTextAlignment(QtCore.Qt.AlignRight)\n dlgTable.tblImpacts.setItem(row, 2, twi)\n\n dlgTable.tblImpacts.sortItems(2)\n dlgTable.show()\n dlgTable.exec_()\n\n else:\n QMessageBox.information(self.dlg, \"Message\", \"Should only run if cancelled\")\n\n def evt_cmbSpecies_changed(self, species):\n if species == 'Swainsons Hawk':\n self.dlg.spbBuffer.setValue(0.004)\n else:\n self.dlg.spbBuffer.setValue(0.008)\n\n" }, { "alpha_fraction": 0.6603053212165833, "alphanum_fraction": 0.6755725145339966, "avg_line_length": 25.299999237060547, "blob_id": "3595e5ac61560af2e133411840f0624c6c6e03bd", "content_id": "b6c2df8060dcee84bbe60deee0f48f58a070833b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 262, "license_type": "permissive", "max_line_length": 46, "num_lines": 10, "path": "/impact_table.py", "repo_name": "PhilMano/qgis-newraptor", "src_encoding": "UTF-8", "text": "from qgis.PyQt.QtWidgets import *\nfrom .impact_table_dialog import Ui_dlgImpacts\n\n\nclass DlgTable(QDialog, Ui_dlgImpacts):\n def __init__(self):\n super(DlgTable, self).__init__()\n self.setupUi(self)\n\n self.tblImpacts.setColumnWidth(1, 200)" }, { "alpha_fraction": 0.6833333373069763, "alphanum_fraction": 0.7018518447875977, "avg_line_length": 40.512821197509766, "blob_id": "d04618769d54d5541508528e5647337adff47191", "content_id": "901b5083d1f6546e2640fa2215422d7efdb0c4e0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1620, "license_type": "permissive", "max_line_length": 76, "num_lines": 39, "path": "/impact_table_dialog.py", "repo_name": "PhilMano/qgis-newraptor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file 'impact_table.ui'\n#\n# Created by: PyQt5 UI code generator 5.11.3\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\n\nclass Ui_dlgImpacts(object):\n def setupUi(self, dlgImpacts):\n dlgImpacts.setObjectName(\"dlgImpacts\")\n dlgImpacts.resize(400, 300)\n self.tblImpacts = QtWidgets.QTableWidget(dlgImpacts)\n self.tblImpacts.setGeometry(QtCore.QRect(5, 11, 391, 281))\n self.tblImpacts.setAlternatingRowColors(True)\n self.tblImpacts.setObjectName(\"tblImpacts\")\n self.tblImpacts.setColumnCount(3)\n self.tblImpacts.setRowCount(0)\n item = QtWidgets.QTableWidgetItem()\n self.tblImpacts.setHorizontalHeaderItem(0, item)\n item = QtWidgets.QTableWidgetItem()\n self.tblImpacts.setHorizontalHeaderItem(1, item)\n item = QtWidgets.QTableWidgetItem()\n self.tblImpacts.setHorizontalHeaderItem(2, item)\n\n self.retranslateUi(dlgImpacts)\n QtCore.QMetaObject.connectSlotsByName(dlgImpacts)\n\n def retranslateUi(self, dlgImpacts):\n _translate = QtCore.QCoreApplication.translate\n dlgImpacts.setWindowTitle(_translate(\"dlgImpacts\", \"Impacts Table\"))\n item = self.tblImpacts.horizontalHeaderItem(0)\n item.setText(_translate(\"dlgImpacts\", \"Project\"))\n item = self.tblImpacts.horizontalHeaderItem(1)\n item.setText(_translate(\"dlgImpacts\", \"Type\"))\n item = self.tblImpacts.horizontalHeaderItem(2)\n item.setText(_translate(\"dlgImpacts\", \"Distance\"))\n\n" } ]
4
Bryan-Barrows/03-Python
https://github.com/Bryan-Barrows/03-Python
0354fbd7683a5e8536a12575d29803e19494f559
577df98a515cb5cd3d4d1916dc1abb86fb5a4024
eb6ca80c31725f512ed1207df2eac8243a07fbc8
refs/heads/master
2020-04-14T17:44:02.219875
2019-01-03T15:45:31
2019-01-03T15:45:31
163,992,048
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.44224095344543457, "alphanum_fraction": 0.4481903910636902, "avg_line_length": 26.938461303710938, "blob_id": "8e855a3514746ae7c6a2f76630d5e7f76d9c2f29", "content_id": "ac174a6f39896fe6b22804ed4973f68d0cec03ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2017, "license_type": "no_license", "max_line_length": 93, "num_lines": 65, "path": "/PyPoll/main.py", "repo_name": "Bryan-Barrows/03-Python", "src_encoding": "UTF-8", "text": "# import resources\nimport os\nimport csv\n\n# Path to collect data from resources folder\nelection_data = os.path.join(\"Resources\", \"election_data.csv\")\n\n# Read in the CSV file\nwith open(election_data, 'r') as csvfile:\n\n # Split the data on commas\n csvreader = csv.reader(csvfile, delimiter=',')\n \n # remove headers\n csv_header = next(csvreader)\n \n votes = 0\n candidates = {}\n candidates_percent = {}\n winner = \"\"\n winner_count = 0\n \n \n for row in csvreader:\n \n votes += 1\n if row[2] in candidates.keys():\n candidates[row[2]] += 1\n else:\n candidates[row[2]] = 1\n \n for key,value in candidates.items():\n candidates_percent[key] = round((value/votes) * 100, 2)\n \n \n \n for key in candidates.keys():\n if candidates[key] > winner_count:\n winner = key\n winner_count = candidates[key]\n \n \n \n print(\"Election Results\")\n print(\"------------------\")\n print(\"Total Votes: \" + str(votes))\n print(\"------------------\")\n for key, value in candidates.items():\n print(key + \": \" + str(candidates_percent[key]) + \"% (\" + str(value) + \")\")\n print(\"-------------------\")\n print(\"Winner: \" + winner)\n print(\"-------------------\")\n \n\n output = open(\"Output/results.txt\", \"w\")\n\n output.write(\"Election Results \\n\")\n output.write(\"------------------ \\n\")\n output.write(\"Total Votes: \" + str(votes) + \"\\n\")\n output.write(\"------------------ \\n\")\n for key, value in candidates.items():\n output.write(key + \": \" + str(candidates_percent[key]) + \"% (\" + str(value) + \") \\n\")\n output.write(\"------------------- \\n\")\n output.write(\"Winner: \" + winner + \"\\n\")\n output.write(\"------------------- \\n\") \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n " }, { "alpha_fraction": 0.5611854791641235, "alphanum_fraction": 0.5640535354614258, "avg_line_length": 33.40678024291992, "blob_id": "2be7ade2deb6126a4f95140a9da1e2ebe5eb7f07", "content_id": "bfb7c9297d1de3a0eb8abe112f357f4010937e3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2092, "license_type": "no_license", "max_line_length": 114, "num_lines": 59, "path": "/PyBank/main.py", "repo_name": "Bryan-Barrows/03-Python", "src_encoding": "UTF-8", "text": "# import resources\nimport os\nimport csv\n\n# Path to collect data from resources folder\nbudget_data = os.path.join(\"Resources\", \"budget_data.csv\")\n\n# Read in the CSV file\nwith open(budget_data, 'r') as csvfile:\n\n # Split the data on commas\n csvreader = csv.reader(csvfile, delimiter=',')\n \n # remove headers\n csv_header = next(csvreader)\n \n # Calculate total number of months\n date = [] \n revenue = []\n rev_change = []\n \n for row in csvreader:\n \n revenue.append(float(row[1]))\n date.append(row[0])\n \n print(\"Financial Analysis\")\n print(\"----------------------\")\n print(\"Total Months:\", len(date))\n print(\"Total Revenue: $\", sum(revenue))\n \n for i in range(1,len(revenue)):\n \n rev_change.append(revenue[i] - revenue[i - 1])\n avg_rev_change = sum(rev_change)/len(rev_change)\n \n max_rev_change = max(rev_change)\n max_rev_change_date = str(date[rev_change.index(max(rev_change))])\n \n min_rev_change = min(rev_change)\n min_rev_change_date = str(date[rev_change.index(min(rev_change))])\n \n del rev_change[0]\n del date[0]\n \n print(\"Average Revenue Change: $\", round(avg_rev_change))\n print(\"Greatest Increase in Revenue:\", max_rev_change_date,\"($\", max_rev_change,\")\")\n print(\"Greatest Decrease in Revenue:\", min_rev_change_date,\"($\", min_rev_change,\")\")\n \n output = open(\"Output/results.txt\", \"w\")\n\n output.write(\"Financial Analysis \\n\")\n output.write(\"------------------ \\n\")\n output.write(\"Total Months: \" + str(len(date)) + \"\\n\")\n output.write(\"Total Revenue: $\" + str(sum(revenue)) + \"\\n\")\n output.write(\"------------------ \\n\")\n output.write(\"Average Revenue Change: $\" + str(round(avg_rev_change)) + \"\\n\")\n output.write(\"Greatest Increase in Revenue:\" + str(max_rev_change_date) + \"($\" + str(max_rev_change) + \") \\n\")\n output.write(\"Greatest Decrease in Revenue:\" + str(min_rev_change_date) + \"($\" + str(min_rev_change) + \") \\n\") \n \n \n \n \n \n \n \n\n \n\n\n\n" } ]
2
guoyixiang/ICTestingScripts_Git
https://github.com/guoyixiang/ICTestingScripts_Git
244e44ec786ef7f212f888d3793c6a3d1efb4933
86db91c545fc8bcbf120ff2d630be17ac81fddce
94554d381e0e8dcc176808d0589138bcca9b1783
refs/heads/master
2020-03-27T03:04:05.491338
2018-08-24T01:03:02
2018-08-24T01:03:02
145,837,428
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6800000071525574, "alphanum_fraction": 0.6800000071525574, "avg_line_length": 24.33333396911621, "blob_id": "2027ffe198cf6e84b00916ec62729b2808302960", "content_id": "64eea3ddd00f3128720a4cbc718d00c405d8c88a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 75, "license_type": "no_license", "max_line_length": 33, "num_lines": 3, "path": "/trunk/HelloWorld.py", "repo_name": "guoyixiang/ICTestingScripts_Git", "src_encoding": "UTF-8", "text": "last_name = \"karl\"\nfirst_name = \"yixiang\"\nprint(\"Hello world!\" + last_name)" } ]
1
teh-nicKLess/MultiLeap
https://github.com/teh-nicKLess/MultiLeap
b5b17d4b462d099e825f90b6bfc0345c19ed77e7
5d451a328a5ec45d5edb0f709d00a2b5d5960234
04fa6484d134eb18bd72fbc2e992a2aadae8fba9
refs/heads/master
2020-04-02T14:02:38.728705
2018-10-31T18:48:23
2018-10-31T18:48:23
136,618,117
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6669865846633911, "alphanum_fraction": 0.6880998015403748, "avg_line_length": 20.285715103149414, "blob_id": "399bea1a861dcbce8236746e54a40fab9e527c3c", "content_id": "2fa790352f64b8eb810b1921936cc60fd61b93d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1042, "license_type": "no_license", "max_line_length": 83, "num_lines": 49, "path": "/ws_test.py", "repo_name": "teh-nicKLess/MultiLeap", "src_encoding": "UTF-8", "text": "import sys\n\nif sys.version_info[0] > 2:\n raise Exception(\"Only python 2 currently\")\n\n\nfrom websocket import create_connection\n\nhost = 'localhost'\nport = '6437'\n\nif len(sys.argv) > 1:\n host = sys.argv[1]\n\nif len(sys.argv) > 2:\n port = sys.argv[2]\n\n# open connection to web socket and enable continous sending of background info\nurl = 'ws://' + host + ':' + port + '/v6.json'\nws = create_connection(url)\nws.send('{\\\"background\\\": true}')\n\nversion_info = \"\"\nconfig_info = \"\"\nfirst_data = \"\"\n\n'''\nAttempt to receive three packets. First and second are always received,\nthird only if Leap is sending data.\nIf no third packet is received within 0.2 seconds a timeout exception is triggered,\nending the script execution with code \"0\"\n'''\nws.settimeout(0.2)\ntry:\n\tversion_info = ws.recv()\n\tconfig_info = ws.recv()\n\tfirst_data = ws.recv()\nexcept Exception:\n\tws.close()\n\tsys.exit(0)\n\nws.close()\n\n# Test if third packet starts with expected values\nif str(first_data)[:19] != \"{\\\"currentFrameRate\\\"\":\n\t\tsys.exit(0)\n\n# All seems in order.\nsys.exit(1)" }, { "alpha_fraction": 0.5912274122238159, "alphanum_fraction": 0.5993365049362183, "avg_line_length": 25.851484298706055, "blob_id": "96fad26c59c77c9bd74102d38273ec3f8558bf44", "content_id": "efd548f3ead99b9e357adf4122e42ccbd0c7727e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2713, "license_type": "no_license", "max_line_length": 164, "num_lines": 101, "path": "/start_multileap_docker.sh", "repo_name": "teh-nicKLess/MultiLeap", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nstop_containers() {\n ids=$(docker ps -q --filter ancestor=$docker_name)\n if [[ -n \"${ids}\" ]]; then\n echo \"Stopping running leap docker container(s). Please wait.\"\n docker stop $ids\n fi\n}\n\n\n#################################\n#################################\n#### ####\n#### EXECUTION STARTS HERE ####\n#### ####\n#################################\n#################################\n\n# If there are still old leap containers running, stop them.\nstop_containers\n\nws_starting_port=51000\ndocker_name=\"leap-docker\"\n\n# get device and bus of all connected leap motions\ndevice_ids=($(lsusb | grep \"Leap Motion Controller\" | sed s/\" Device \"/\"\\/\"/g | sed s/\"Bus \"//g | sed s/:[A-Za-z0-9[:space:]:]*$//g))\n\nnum_dev=${#device_ids[@]}\necho \"Found [\" $num_dev \"] leap motion devices.\"\n\n\nif (( num_dev < 1 )); then\n exit\nfi\n\n\necho \"Creating video devices\"\ndeclare -A video_ids\ncounter=1\ndevs=\"0\"\nfor dev in \"${device_ids[@]}\"; do\n\tvideo_ids[$dev]=$counter\n\tdevs=$devs\",\"$counter\n\tcounter=$((counter+1))\ndone\n\nsudo modprobe -r v4l2loopback\nsudo modprobe v4l2loopback video_nr=$devs\n\necho \"---------------------------------------\"\n\n# run docker container for each leap\nfor dev in \"${device_ids[@]}\"; do\n echo \"Starting docker for device\" $dev\n echo docker run -d -e PORT=$ws_starting_port -e DEV=/dev/video${video_ids[$dev]} --device=/dev/bus/usb/$dev --device=/dev/video${video_ids[$dev]} $docker_name\n \n\tsuccess=0\n \n while ((success == 0)); do\n\t\t# Run docker for current leap\n\t\tCID=$(docker run -d -e PORT=$ws_starting_port -e DEV=/dev/video${video_ids[$dev]} --device=/dev/bus/usb/$dev --device=/dev/video${video_ids[$dev]} $docker_name &)\n \n\t\tleap_ip=$(docker inspect --format '{{ .NetworkSettings.IPAddress }}' $CID)\n \n\t\t# Starting of leap tends to fail more often if not for the sleep\n\t\t# Also the python test script will throw a connection rrefused error\n\t\techo \"Waiting for initialization...\"\n\t\tsleep 1\n \n\t\t# Test if docker and leap were started succesfully\n\t\tresult=$(python ws_test.py $leap_ip $ws_starting_port)\n\t\tsuccess=$?\n\t\t\n\t\t# If start was not successful, stop current docker before trying again\n\t\tif ((success == 0)); then\n\t\t\techo \"Initialization failed. Retrying...\"\n\t\t\tid_last=$(docker ps -q --last 1 --filter ancestor=$docker_name)\n\t\t\tdocker stop $id_last\n\t\tfi\n\t\t\n\tdone\n \n echo \"Started leap daemon on \"$leap_ip\":\"$ws_starting_port\" with video stream at /dev/video\"${video_ids[$dev]}\n echo \"---------------------------------------\"\n ws_starting_port=$((ws_starting_port+1))\ndone\n\n\n# docker should be stopped on exit\ntrap ctrl_c SIGINT\nfunction ctrl_c() {\n stop_containers\n exit\n}\n\n# just stay alive\nwhile :\ndo\n sleep 1\ndone\n\n" } ]
2
JefftheGreen/charsheets
https://github.com/JefftheGreen/charsheets
b3d0bc2f28c87f5448dbc6e8192b9b42cb13312a
8bb99f077dcd0820b66007fd905d470dd450946f
112256fd697323dee0f5be4a46fa67e1fcbc01ea
refs/heads/master
2021-05-24T01:20:44.778064
2016-06-23T21:29:26
2016-06-23T21:29:26
59,340,254
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5264726877212524, "alphanum_fraction": 0.5266146063804626, "avg_line_length": 37.70878982543945, "blob_id": "3c41f89404166df1870ce56a07a8b3468949b882", "content_id": "29367084fda877277c73be30eaf84c752227a428", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7045, "license_type": "no_license", "max_line_length": 109, "num_lines": 182, "path": "/main/views/views.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.models import User\nfrom django.forms.models import modelformset_factory\nfrom django.views.generic import View\nfrom django.http import HttpResponse\nfrom django.conf import settings\nfrom main.models import Sheet, Skill\nfrom .forms import *\nfrom .sheet_form import *\n\n\nclass LoginView(View):\n \n def get(self, request):\n form = LoginForm()\n return render(request, 'login.html', {'error': False, 'form': form})\n \n def post(self, request):\n form = LoginForm(request.POST)\n if form.is_valid():\n username = form.cleaned_data['username']\n password = form.cleaned_data['password']\n user = authenticate(username=username, password=password)\n if user is not None:\n login(request, user)\n if password == '1234':\n return redirect('/password_reset/')\n else:\n return redirect('/profile/')\n else:\n form = LoginForm()\n return render(request, 'login.html', {'error': True, \n 'form': form})\n\n\nclass LogoutView(View):\n def get(self, request):\n logout(request)\n return render(request, 'logout.html', {'error': False})\n\n\nclass NewUserView(View):\n \n def get(self, request):\n form = RegisterForm()\n context = {'permission_error': False,\n 'duplicate_error': False,\n 'password_error': False,\n 'form': form}\n return render(request, 'register.html', context)\n \n def post(self, request):\n form = RegisterForm(request.POST)\n if form.is_valid():\n username = form.cleaned_data['username']\n password = form.cleaned_data['password']\n confirm = form.cleaned_data['confirm_password']\n if username not in settings.USER_WHITELIST:\n context = {'permission_error': True,\n 'duplicate_error': False,\n 'password_error': False,\n 'form': form}\n elif len(User.objects.filter(username=username)) > 0:\n context = {'permission_error': False,\n 'duplicate_error': True,\n 'password_error': False,\n 'form': form}\n elif confirm != password:\n context = {'permission_error': False,\n 'duplicate_error': False,\n 'password_error': True,\n 'form': form}\n else:\n print(\"creating new user \" + username)\n new_user = User.objects.create_user(username, password=password)\n return redirect('/login/')\n return render(request, 'register.html', context) \n\n\nclass PasswordResetView(View):\n \n def get(self, request):\n form = ResetForm()\n return render(request, 'pwdreset.html', {'pwderror': False, \n 'matcherror': False,\n 'form': form})\n \n def post(self, request):\n form = ResetForm(request.POST)\n if form.is_valid():\n old_password = form.cleaned_data['old_password']\n new_password = form.cleaned_data['new_password']\n confirm_password = form.cleaned_data['confirm_password']\n if request.user.check_password(old_password):\n if new_password == confirm_password:\n request.user.set_password(new_password)\n request.user.save()\n return redirect('/login/')\n else:\n context = {'pwderror': False, \n 'matcherror': True,\n 'form': ResetForm()}\n else:\n context = {'pwderror': True, \n 'matcherror': False,\n 'form': ResetForm()}\n return render(request, 'pwdreset.html', context)\n\n\nclass ProfileView(View):\n \n def get(self, request):\n form = NewSheetForm()\n context = {'user': request.user,\n 'sheet_list': Sheet.objects.filter(owner=request.user),\n 'form': form}\n return render(request, 'profile.html', context)\n \n def post(self, request):\n form = NewSheetForm(request.POST)\n if form.is_valid():\n name = form.cleaned_data['name']\n type = form.cleaned_data['type']\n sheet = Sheet(name=name, owner=request.user)\n sheet.save()\n form = NewSheetForm()\n context = {'user': request.user,\n 'sheet_list': Sheet.objects.filter(owner=request.user),\n 'form': form}\n return render(request, 'profile.html', context)\n\n\nclass SheetView(View):\n\n @property\n def sheet(self):\n try:\n return Sheet.objects.get(id=int(self.kwargs['id']))\n except:\n return None\n\n def get(self, request, **kwargs):\n form = SheetForm(instance=self.sheet)\n skill_factory = modelformset_factory(Skill, form=SkillForm,\n fields=['name', 'ranks'])\n skill_formset = skill_factory(queryset=self.sheet.skill_set.all())\n context = {'sheet': self.sheet,\n 'form': form,\n 'skills': skill_formset}\n return render(request, 'sheet.html', context)\n\n def post(self, request, **kwargs):\n sheetform = SheetForm(request.POST, instance=self.sheet)\n print(sheetform.errors)\n print(sheetform.cleaned_data['disp_base_str'])\n sheetform.save()\n print(self.sheet.fatigue_degree)\n new_form = SheetForm(instance=self.sheet)\n skill_factory = modelformset_factory(Skill, form=SkillForm,\n fields=['name', 'ranks'])\n skill_formset = skill_factory(queryset=self.sheet.skill_set.all())\n context = {'sheet': self.sheet,\n 'form': new_form,\n 'skills': skill_formset}\n return render(request, 'sheet.html', context)\n\n\ndef home_view(request):\n skills = list([s for s in Skill.objects.all() if s.super_skill == None])\n print(type(skills))\n skills.sort(key=lambda x: x.name)\n skill_subskill_list = []\n for s in skills:\n subskills = list(Skill.objects.filter(super_skill=s))\n if subskills:\n subskills.sort(key=lambda ss: ss.name)\n skill_subskill_list.append((s, tuple(subskills)))\n else:\n skill_subskill_list.append((s, tuple()))\n context = {'skills': skill_subskill_list}\n return render(request, 'home.html', context)\n" }, { "alpha_fraction": 0.5421836376190186, "alphanum_fraction": 0.5818858742713928, "avg_line_length": 25.866666793823242, "blob_id": "5c42cd0ce611247cb9f6ad23885f9fd93d7ef41c", "content_id": "228472aefb988e3935268e5de066e2ef9a4aa9c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 806, "license_type": "no_license", "max_line_length": 123, "num_lines": 30, "path": "/main/migrations/0009_auto_20160521_0506.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-21 05:06\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0008_auto_20160517_2058'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='effect',\n name='sheets',\n ),\n migrations.AddField(\n model_name='effect',\n name='active',\n field=models.BooleanField(default=True),\n ),\n migrations.AddField(\n model_name='effect',\n name='sheet',\n field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='main.Sheet'),\n ),\n ]\n" }, { "alpha_fraction": 0.5105999112129211, "alphanum_fraction": 0.5166892409324646, "avg_line_length": 40.4485969543457, "blob_id": "820b9ce91ff49903e637d0e8f968d6826a81e2da", "content_id": "93bf41fa974ff8fbc0b7441830dadd282e65c43b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4434, "license_type": "no_license", "max_line_length": 88, "num_lines": 107, "path": "/main/views/sheet_form.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django import forms\nfrom django.forms import formsets\nfrom main.models import *\n\n\nclass SheetForm(forms.ModelForm):\n\n ABILITIES = ['str', 'dex', 'con', 'int', 'wis', 'cha']\n SAVES = ['fort', 'ref', 'will']\n UNGROUPED_CONDITIONS = ['blinded', 'confused', 'dazed', 'dazzled',\n 'deafened', 'disabled', 'dying', 'entangled',\n 'fascinated', 'flat_footed', 'grappling',\n 'helpless', 'incorporeal', 'invisible', 'nauseated',\n 'paralyzed', 'petrified', 'pinned', 'prone',\n 'sickened', 'stable', 'staggered', 'stunned',\n 'turned', 'unconscious']\n CONDITION_GROUPS =[('Fear', ['shaken', 'frightened', 'panicked',\n 'cowering']),\n ('Fatigue', ['fatigued', 'exhausted'])]\n\n fatigued = forms.BooleanField(required=False)\n exhausted = forms.BooleanField(required=False)\n shaken = forms.BooleanField(required=False)\n frightened = forms.BooleanField(required=False)\n panicked = forms.BooleanField(required=False)\n\n class Meta:\n model = Sheet\n fields = [#'char_name', 'race', 'classes', 'gender', 'alignment',\n #'deity', 'campaign', 'age', 'disp_size',\n 'disp_base_str',\n 'disp_base_dex', 'disp_base_con', 'disp_base_int',\n 'disp_base_wis', 'disp_base_cha', 'disp_base_fort',\n 'disp_base_ref', 'disp_base_will', 'blinded', 'confused',\n 'dazed', 'dazzled', 'deafened', 'disabled', 'dying',\n 'entangled', 'fascinated', 'flat_footed', 'grappling',\n 'helpless', 'incorporeal', 'invisible', 'nauseated',\n 'paralyzed', 'petrified', 'pinned', 'prone', 'sickened',\n 'stable', 'staggered', 'stunned', 'turned', 'unconscious', 'cowering']\n widgets = {'disp_base_str':\n forms.TextInput(attrs={'placeholder': '10'}),\n 'disp_base_dex':\n forms.TextInput(attrs={'placeholder': '10'}),\n 'disp_base_con':\n forms.TextInput(attrs={'placeholder': '10'}),\n 'disp_base_int':\n forms.TextInput(attrs={'placeholder': '10'}),\n 'disp_base_wis':\n forms.TextInput(attrs={'placeholder': '10'}),\n 'disp_base_cha':\n forms.TextInput(attrs={'placeholder': '10'})}\n\n\n def __init__(self, *args, **kwargs):\n if 'instance' in kwargs:\n sheet = kwargs['instance']\n initial = kwargs.get('initial', {})\n initial['fatigued'] = sheet.fatigue_degree == 1\n initial['exhausted'] = sheet.fatigue_degree == 2\n initial['shaken'] = sheet.fear_degree == 1\n initial['frightened'] = sheet.fear_degree == 2\n initial['panicked'] = sheet.fear_degree == 3\n kwargs['initial'] = initial\n super().__init__(*args, **kwargs)\n\n @property\n def fatigue_degree(self):\n return (2 if self['exhausted'].value()\n else 1 if self['fatigued'].value() else 0)\n\n @property\n def fear_degree(self):\n return (3 if self['panicked'].value()\n else 2 if self['frightened'].value()\n else 1 if self['shaken'].value() else 0)\n\n @property\n def condition_groups(self):\n return [(title, [self[c] for c in cs])\n for title, cs in self.CONDITION_GROUPS]\n\n def save(self, *args, **kwargs):\n self.instance.fatigue_degree = self.fatigue_degree\n self.instance.fear_degree = self.fear_degree\n self.instance.save()\n super().save(*args, **kwargs)\n\n\nclass SkillForm(forms.ModelForm):\n class Meta:\n model = Skill\n fields = ['name', 'ranks']\n widgets = {'ranks': forms.NumberInput(attrs={'step': 0.5, 'min': 0})}\n\n def __init__(self, *args, **kwargs):\n if 'instance' in kwargs:\n skill = kwargs['instance']\n initial = kwargs.get('initial', {})\n if skill.ranks.is_integer():\n initial['ranks'] = int(skill.ranks)\n print(skill)\n kwargs['initial'] = initial\n super().__init__(*args, **kwargs)\n\n\nclass SkillFormSet(formsets.BaseFormSet):\n pass" }, { "alpha_fraction": 0.5254083275794983, "alphanum_fraction": 0.5680580735206604, "avg_line_length": 35.733333587646484, "blob_id": "3c5e1d0059c1bed0f7aa24701cea8e93f1a6b619", "content_id": "e90759d83a68b06b0076f91cea2e19d9284d993a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1102, "license_type": "no_license", "max_line_length": 182, "num_lines": 30, "path": "/main/migrations/0004_auto_20160517_2044.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-17 20:44\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0003_auto_20160517_2041'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='effect',\n name='ability_bonus',\n field=models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None, null=True),\n ),\n migrations.AlterField(\n model_name='effect',\n name='from_x_stat',\n field=models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None, null=True),\n ),\n migrations.AlterField(\n model_name='effect',\n name='save_bonus',\n field=models.IntegerField(choices=[(0, 'Fortitude'), (1, 'Reflex'), (2, 'Will')], default=None, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.5801393985748291, "alphanum_fraction": 0.6358885169029236, "avg_line_length": 26.33333396911621, "blob_id": "b012210e11ec070de72033c7bb6c00f127d02694", "content_id": "cda160c02a1a815d1b1717021db2a1b30c137a94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 574, "license_type": "no_license", "max_line_length": 137, "num_lines": 21, "path": "/main/migrations/0005_auto_20160517_2045.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-17 20:45\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0004_auto_20160517_2044'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='effect',\n name='parent_effect',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sub_effect', to='main.Effect'),\n ),\n ]\n" }, { "alpha_fraction": 0.6335269808769226, "alphanum_fraction": 0.6441493630409241, "avg_line_length": 37.375797271728516, "blob_id": "ed6a93827fd0a3bae70467ea7be86c1734221825", "content_id": "328a286116aae58ae051f0c8663af4c95da493f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6025, "license_type": "no_license", "max_line_length": 79, "num_lines": 157, "path": "/main/models/misc.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.db import models\n \n \nclass Container(models.Model):\n\n # The container's name. This is displayed on the sheet.\n name = models.CharField(max_length=200)\n # The maximum number of items that can be contained in the container.\n max_items = models.IntegerField(null=True, default=None)\n # If true, the contents do not count against encumbrance.\n weightless = models.BooleanField(default=False)\n # The weight of the container itself, factored into encumbrance.\n weight = models.FloatField(null=True, default=None)\n \n \nclass Item(models.Model):\n\n # Value keys for the item types.\n WEAPON = 'W'\n ARMOR = 'A'\n SHIELD = 'S'\n CLOTHING = 'C'\n TOOL = 'T'\n \n TYPE_CHOICES = (\n (WEAPON, 'Weapon'),\n (ARMOR, 'Armor'),\n (SHIELD, 'Shield'),\n (CLOTHING, 'Clothing'),\n (TOOL, 'Tool'))\n\n # The item's name. This is displayed on the sheet.\n name = models.CharField(max_length=200)\n # The type of item. Determines what fields are available and considered.\n item_type = models.CharField(max_length=1, choices=TYPE_CHOICES,\n default=TOOL)\n # The damage a weapon does. This is displayed on the sheet.\n damage = models.CharField(max_length=50)\n # The type of critical a weapon does. This is displayed on the sheet.\n critical = models.CharField(max_length=50)\n # The weapon's range. This is displayed on the sheet.\n weapon_range = models.CharField(max_length=50)\n # The weight of the weapon, parsed and factored into encumbrance.\n weight_disp = models.CharField(max_length=50)\n # The size of the item. This is displayed on the sheet.\n size = models.CharField(max_length=50)\n # The damage type the weapon does. This is displayed on the sheet.\n damage_type = models.CharField(max_length=50)\n # The base AC bonus of armor or shield, parsed and factored into AC.\n base_ac_disp = models.CharField(max_length=50)\n # The base armor check penalty of armor or shield, parsed and factored\n # into ACP.\n base_acp_disp = models.CharField(max_length=50)\n # The base arcane spell failure of armor or shield, parsed and factored\n # into ASF\n base_asf_disp = models.CharField(max_length=50)\n # The enhancement bonus of the weapon.\n weapon_enhancement_bonus = models.CharField(max_length=50)\n # The enhancement bonus of the armor.\n armor_enhancement_bonus = models.CharField(max_length=50)\n # The container the item is in.\n container = models.ForeignKey(Container, on_delete=models.CASCADE)\n\n\nclass Property(models.Model):\n\n # The item the property is attached to.\n item = models.ForeignKey(Item, on_delete=models.CASCADE)\n # The effect the property bestows on the item.\n effect = models.ForeignKey('Effect', null=True, default=None,\n on_delete=models.SET_NULL)\n \n \nclass Skill(models.Model):\n \n # Value keys for the abilities\n STR = 0\n DEX = 1\n CON = 2\n INT = 3\n WIS = 4\n CHA = 5\n \n ABILITY_CHOICES = (\n (STR, 'Strength'),\n (DEX, 'Dexterity'),\n (CON, 'Constitution'),\n (INT, 'Intelligence'),\n (WIS, 'Wisdom'),\n (CHA, 'Charisma')\n )\n\n # The name of the skill, displayed on the sheet.\n name = models.CharField(max_length=200)\n # The default key ability for the skill.\n default_stat = models.IntegerField(default=None, null=True, \n choices=ABILITY_CHOICES)\n # The sheet that the skill is attached to.\n sheet = models.ForeignKey('Sheet', null=True, default=None)\n # Some skills have a super skill that is used for visual grouping and\n # controlling default behavior. For example, Perform for Perform (dance) or\n # Knowledge for Knowledge (arcana)\n super_skill = models.ForeignKey('self', null=True, default=None)\n # The ranks the character has in the skill\n ranks = models.FloatField(default=0)\n # Any miscellaneous modifier not attached to the\n misc_mod = models.FloatField(default=0)\n # The multiplier for armor check penalty. Usually 0 or 1, sometimes higher.\n acp = models.IntegerField(default=0)\n # Is the user allowed to rename the skill.\n rename = models.BooleanField(default=True)\n # Manual override of the key stat. This is used first, then any from\n # effects, then the default key stat.\n stat_override = models.IntegerField(default=None, null=True, \n choices=ABILITY_CHOICES)\n\n def __str__(self):\n return self.name\n\n \n# Class representing feats. Mostly a wrapper for Effect, but stores its own\n# description and name.\nclass Feat(models.Model):\n\n # The name of the feat, displayed on the sheet.\n name = models.CharField(max_length=200)\n # The description of the feat, displayed on the sheet.\n description = models.CharField(max_length=1000)\n # The effect the feat grants.\n effect = models.ForeignKey('Effect', null=True, on_delete=models.SET_NULL)\n \n def __str__(self):\n return self.name\n\n\n# Class representing racial abilities. Mostly a wrapper for Effect, but stores \n# its own description and name. \nclass RacialAbility(models.Model):\n \n name = models.CharField(max_length=200)\n description = models.CharField(max_length=1000)\n effect = models.ForeignKey('Effect', null=True, on_delete=models.SET_NULL)\n \n def __str__(self):\n return self.name\n\n\n# Class representing class abilities. Mostly a wrapper for Effect, but stores \n# its own description and name. \nclass ClassAbility(models.Model):\n \n name = models.CharField(max_length=200)\n description = models.CharField(max_length=1000)\n effect = models.ForeignKey('Effect', null=True, on_delete=models.SET_NULL)\n \n def __str__(self):\n return self.name\n" }, { "alpha_fraction": 0.41908445954322815, "alphanum_fraction": 0.43649259209632874, "avg_line_length": 37.296295166015625, "blob_id": "715eecd41257ed08632e6fbd647198bb1cb4c465", "content_id": "04046ba70b0c026bcdca9a12c033d9099dc49074", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3102, "license_type": "no_license", "max_line_length": 80, "num_lines": 81, "path": "/main/tests.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.test import TestCase\nfrom main.models import *\nfrom main.default_data import setup\nfrom collections import defaultdict\n\n\nclass EffectTest(TestCase):\n \n def setUp(self):\n setup()\n sheet = Sheet(owner_id=0, disp_base_str='10', disp_base_dex='12',\n disp_base_con='14', disp_base_int='16',\n disp_base_wis='18', disp_base_cha='20',\n disp_base_fort='5', disp_base_ref='5', disp_base_will='5',\n name='test_sheet')\n sheet.save()\n supers = [{'skill_bonus_id':10,\n 'bonus_amount':2,\n 'bonus_type':0,\n 'name':'test_sub_effects',\n 'owner':sheet},\n {'skill_bonus_id': 13,\n 'bonus_amount': 2,\n 'bonus_type': 0,\n 'name': 'test_sub_effects2',\n 'owner': sheet}\n ]\n supers_list = []\n for s in supers:\n super = Effect(skill_bonus_id=s['skill_bonus_id'],\n bonus_amount=s['bonus_amount'],\n bonus_type=s['bonus_type'],\n name=s['name'],\n sheet=s['owner'])\n super.save()\n supers_list.append(super)\n parent=supers_list[0]\n subs = [{'skill_bonus_id':11,\n 'bonus_amount':5,\n 'bonus_type':1,\n 'parent_effect': parent},\n {'save_bonus': 0,\n 'bonus_amount': 3,\n 'bonus_type': 1,\n 'parent_effect': parent},\n {'save_bonus': 1,\n 'x_to_y_bonus_ability': 3,\n 'bonus_type': 1,\n 'parent_effect': parent},\n {'save_override': 1,\n 'override_ability': 5,\n 'bonus_type': 1,\n 'parent_effect': parent}\n ]\n subs = [defaultdict(lambda: None, d) for d in subs]\n for s in subs:\n sub = Effect(skill_bonus_id=s['skill_bonus_id'],\n bonus_amount=s['bonus_amount'],\n bonus_type=s['bonus_type'],\n parent_effect=s['parent_effect'],\n save_bonus=s['save_bonus'],\n save_override=s['save_override'],\n x_to_y_bonus_ability=s['x_to_y_bonus_ability'],\n override_ability=s['override_ability']\n )\n sub.save()\n \n def test_sub_effects(self):\n supes = Effect.objects.get(name='test_sub_effects')\n assert supes.skill_bonuses == {\n 10: {0:[2]},\n 11: {1:[5]},\n }\n\n def test_effects(self):\n sheet = Sheet.objects.get(name='test_sheet')\n effect = Effect.objects.get(name='test_sub_effects')\n skill = Skill.objects.get(id=11)\n assert sheet.fin_fort == 10\n assert sheet.fin_ref == 13\n assert sheet.fin_will == 9\n" }, { "alpha_fraction": 0.7289719581604004, "alphanum_fraction": 0.7289719581604004, "avg_line_length": 20.600000381469727, "blob_id": "ec64ed477e12e03afc2558bc8acc3e005eb24722", "content_id": "be75ecc0f265a96ff9e6e40801cd2d8f7a569064", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 107, "license_type": "no_license", "max_line_length": 24, "num_lines": 5, "path": "/main/models/__init__.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from .effect import *\nfrom .sheet import *\nfrom .misc import *\nfrom .misc import *\nfrom .receivers import *" }, { "alpha_fraction": 0.5380658507347107, "alphanum_fraction": 0.5560699701309204, "avg_line_length": 33.71428680419922, "blob_id": "f4d2fc9d4bf3d429a659bfce5ca59c3c45d59344", "content_id": "97a4148c6e48c650305134a82a6f5a08450f8bf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1944, "license_type": "no_license", "max_line_length": 182, "num_lines": 56, "path": "/main/migrations/0002_auto_20160516_1305.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-16 13:05\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='skill',\n name='acp',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='skill',\n name='misc_mod',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='skill',\n name='ranks',\n field=models.IntegerField(default=0),\n ),\n migrations.AddField(\n model_name='skill',\n name='rename',\n field=models.BooleanField(default=True),\n ),\n migrations.AddField(\n model_name='skill',\n name='stat_override',\n field=models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None, null=True),\n ),\n migrations.AddField(\n model_name='skill',\n name='super_skill',\n field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='main.Skill'),\n ),\n migrations.AlterField(\n model_name='skill',\n name='default_stat',\n field=models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None, null=True),\n ),\n migrations.AlterField(\n model_name='skill',\n name='sheet',\n field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='main.Sheet'),\n ),\n ]\n" }, { "alpha_fraction": 0.35110071301460266, "alphanum_fraction": 0.3648974597454071, "avg_line_length": 38.71556854248047, "blob_id": "77780e026f9e8da02a233160fc429b0eb73d59f9", "content_id": "e2a521a63caa9436624f884c93bf04920fb12a05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13264, "license_type": "no_license", "max_line_length": 83, "num_lines": 334, "path": "/main/default_data.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# Value keys for the abilities\nSTR = 0\nDEX = 1\nCON = 2\nINT = 3\nWIS = 4\nCHA = 5\n\n# Value keys for bonus types\nALC = 0\nArm = 1\nCRC = 2\nCMP = 3\nDEF = 4\nDIV = 5\nDDG = 6\nENH = 7\nEXL = 8\nINS = 9\nLCK = 10\nMRL = 11\nNAT = 12\nPRF = 13\nRAC = 14\nRES = 15\nSAC = 16\nSHD = 17\nUNT = 18\n\nSKILL_DEFAULTS_3_5 = (\"Appraise\", \"Autohypnosis\", \"Balance\", \"Bluff\", \"Climb\", \n \"Concentration\", \"Craft\", \"Decipher Script\", \"Diplomacy\", \n \"Disable Device\", \"Disguise\", \"Escape Artist\", \n \"Gather Information\", \"Handle Animal\", \"Heal\", \"Hide\", \n \"Intimidate\", \"Jump\", \"Knowledge\", \"Listen\", \n \"Lucid Dreaming\", \"Martial Lore\", \"Move Silently\", \n \"Open Lock\", \"Perform\", \"Profession\", \"Forgery\", \"Psicraft\", \n \"Ride\", \"Search\", \"Sense Motive\", \"Sleight Of Hand\", \n \"Speak Language\", \"Spellcraft\", \"Spot\", \"Survival\", \"Swim\", \n \"Tumble\", \"Use Magic Device\", \"Use Psionic Device\",\n \"Use Rope\")\n \nSKILL_DEFAULT_ABIlitiES_3_5 = {\"Appraise\": INT,\n \"Autohypnosis\": WIS, \n \"Balance\": DEX, \n \"Bluff\": CHA, \n \"Climb\": STR, \n \"Concentration\": CON, \n \"Craft\": INT, \n \"Decipher Script\": INT, \n \"Diplomacy\": CHA, \n \"Disable Device\": INT,\n \"Disguise\": CHA, \n \"Escape Artist\": DEX, \n \"Gather Information\": CHA, \n \"Handle Animal\": CHA, \n \"Heal\": WIS, \n \"Hide\": DEX, \n \"Intimidate\": CHA, \n \"Jump\": STR, \n \"Knowledge\": INT,\n \"Listen\": WIS,\n \"Lucid Dreaming\": WIS, \n \"Martial Lore\": INT, \n \"Move Silently\": DEX, \n \"Open Lock\": DEX, \n \"Perform\": CHA, \n \"Profession\": WIS, \n \"Forgery\": INT, \n \"Psicraft\": INT, \n \"Ride\": DEX, \n \"Search\": INT,\n \"Sense Motive\": WIS, \n \"Sleight Of Hand\": DEX, \n \"Speak Language\": None, \n \"Spellcraft\": INT, \n \"Spot\": WIS, \n \"Survival\": WIS, \n \"Swim\": STR, \n \"Tumble\": DEX, \n \"Use Magic Device\": CHA, \n \"Use Psionic Device\": CHA,\n \"Use Rope\": DEX}\n \nSUBSKILLS = {\"Craft\": [\"alchemy\", \"armorsmithing\", \"bowmaking\", \n \"poisonmaking\" \"trapmaking\", \"weaponsmithing\"],\n \"Knowledge\": [\"arcana\", \"architecture and engineering\",\n \"dungeoneering\", \"geography\", \"history\",\n \"local\", \"nature\", \"nobility and royalty\",\n \"psionics\", \"religion\", \"the planes\"],\n \"Perform\": [\"Act\",\"Comedy\", \"Dance\", \"Keyboard instruments\",\n \"Oratory\", \"Percussion instruments\", \n \"String instruments\", \"Wind instruments\", \"Sing\"]}\n \nSKILL_ACP = {\"Appraise\": 0,\n \"Autohypnosis\": 0,\n \"Balance\": 1, \n \"Bluff\": 0, \n \"Climb\": 1, \n \"Concentration\": 0,\n \"Craft\": 0, \n \"Decipher Script\": 0,\n \"Diplomacy\": 0, \n \"Disable Device\": 0,\n \"Disguise\": 0, \n \"Escape Artist\": 1, \n \"Gather Information\": 0, \n \"Handle Animal\": 0, \n \"Heal\": 0, \n \"Hide\": 1, \n \"Intimidate\": 0, \n \"Jump\": 1, \n \"Knowledge\": 0,\n \"Listen\": 0,\n \"Lucid Dreaming\": 0, \n \"Martial Lore\": 0, \n \"Move Silently\": 1, \n \"Open Lock\": 0, \n \"Perform\": 0, \n \"Profession\": 0, \n \"Forgery\": 0, \n \"Psicraft\": 0, \n \"Ride\": 0, \n \"Search\": 0,\n \"Sense Motive\": 0, \n \"Sleight Of Hand\": 1, \n \"Speak Language\": 0,\n \"Spellcraft\": 0, \n \"Spot\": 0, \n \"Survival\": 0, \n \"Swim\": 2, \n \"Tumble\": 1, \n \"Use Magic Device\": 0, \n \"Use Psionic Device\": 0,\n \"Use Rope\": 0}\n \nDEFAULT_SKILL_IDS = range(1, 55)\n\nDEFAULT_SAVE_ABILITIES = {0: 2,\n 1: 1,\n 2: 4}\n\nBONUS_TYPE_CHOICES = (\n (ALC, 'Alchemical'),\n (Arm, 'Armor'),\n (CRC, 'Circumstance'),\n (CMP, 'Competence'),\n (DEF, 'Deflection'),\n (DIV, 'Divine'),\n (DDG, 'Dodge'),\n (ENH, 'Enhancement'),\n (EXL, 'Exalted'),\n (INS, 'Insight'),\n (LCK, 'Luck'),\n (MRL, 'Morale'),\n (NAT, 'Natural'),\n (PRF, 'Profane'),\n (RAC, 'Racial'),\n (RES, 'Resistance'),\n (SAC, 'Sacred'),\n (SHD, 'Shield'),\n (UNT, 'Untyped/Other')\n)\n\n# Value keys for the saves\nFORT = 0\nREF = 1\nWILL = 2\n\n# Value keys for the abilities\nSTR = 0\nDEX = 1\nCON = 2\nINT = 3\nWIS = 4\nCHA = 5\n\nSAVE_CHOICES = (\n (FORT, \"Fortitude\"),\n (REF, \"Reflex\"),\n (WILL, \"Will\")\n)\n\nABILITY_CHOICES = (\n (STR, 'Strength'),\n (DEX, 'Dexterity'),\n (CON, 'Constitution'),\n (INT, 'Intelligence'),\n (WIS, 'Wisdom'),\n (CHA, 'Charisma')\n)\n\nDEFAULT_CONDITIONS = ['blinded', 'confused', 'dazed', 'dazzled', 'deafened',\n 'disabled', 'dying', 'entangled', 'fascinated',\n 'flat_footed', 'grappling', 'helpless', 'incorporeal',\n 'invisible', 'nauseated', 'paralyzed', 'petrified',\n 'pinned', 'prone', 'sickened', 'stable', 'staggered',\n 'stunned', 'turned', 'unconscious', 'fatigued',\n 'exhausted', 'shaken', 'frightened', 'panicked',\n 'cowering']\n\nDEFAULT_CONDITION_EFFECTS = {'blinded': (('ac', -2),\n ('no dex to ac', True),\n ('skill', ('Search', -4)),\n ('ability skill', (0, -4)),\n ('ability skill', (1, -4))),\n 'cowering': (('ac', -2),\n ('no dex to ac', True)),\n 'dazzled': (('attack', -1),\n ('skill', ('Search', -1)),\n ('skill', ('Spot', -1))),\n 'deafened': (('initiative', -4),),\n 'entangled': (('attack', -2),\n ('ability', (1, -4))),\n 'exhausted': (('ability', (0, -6)),\n ('ability', (1, -6))),\n 'fatigued': (('ability', (0, -2)),\n ('ability', (1, -2))),\n 'flat_footed': (('no dex to ac', True),),\n 'frightened': (('attack', -2),\n ('save', (0, -2)),\n ('save', (1, -2)),\n ('save', (2, -2)),\n ('skill', ('all', -2))),\n 'grappling': (('no dex to ac', True),),\n 'helpless': (('ability equals', (1, 0)),),\n 'panicked': (('attack', -2),\n ('save', (0, -2)),\n ('save', (1, -2)),\n ('save', (2, -2)),\n ('skill', ('all', -2))),\n 'paralyzed': (('ability equals', (0, 0)),\n ('ability equals', (1, 0))),\n 'petrified': (('ability equals', (1, 0)),),\n 'prone': (('melee', -4),),\n 'shaken': (('attack', -2),\n ('save', (0, -2)),\n ('save', (1, -2)),\n ('save', (2, -2)),\n ('skill', ('all', -2))),\n 'sickened': (('attack', -2),\n ('save', (0, -2)),\n ('save', (1, -2)),\n ('save', (2, -2)),\n ('skill', ('all', -2))),\n 'stunned': (('ac', -2),\n ('no dex to ac', True)),\n 'unconscious': (('ability equals', (1, 0)),)}\n\n\ndef create_default_skills():\n from .models import Skill\n skill_id = 1\n for s in SKILL_DEFAULTS_3_5:\n ability = SKILL_DEFAULT_ABIlitiES_3_5[s]\n acp = SKILL_ACP[s]\n skill = Skill(id=skill_id, rename=False, acp=acp, \n default_stat=ability, name=s)\n skill.save()\n skill_id += 1\n if s == \"Knowledge\":\n for ss in SUBSKILLS[s]:\n sub_skill = Skill(rename=False, acp=acp, default_stat=ability,\n name=ss, super_skill=skill, id=skill_id)\n sub_skill.save()\n skill_id += 1\n\n\ndef create_conditions():\n from .models import Condition\n import warnings\n for condition in DEFAULT_CONDITION_EFFECTS:\n c = Condition(name=condition)\n c.save()\n for condition in DEFAULT_CONDITION_EFFECTS:\n for effect, value in DEFAULT_CONDITION_EFFECTS[condition]:\n if effect == 'ac':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n ac_bonus=True, bonus_amount=value)\n c.save()\n elif effect == 'no dex to ac':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n no_dex_to_ac=True)\n c.save()\n elif effect == 'skill':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n skill_bonus=value[0], bonus_amount=value[1])\n c.save()\n elif effect == 'ability skill':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n ability_skill_bonus=value[0],\n bonus_amount=value[1])\n c.save()\n elif effect == 'attack':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n attack_bonus=2, bonus_amount=value)\n c.save()\n elif effect == 'initiative':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n initiative_bonus=True, bonus_amount=value)\n c.save()\n elif effect == 'ability equals':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n ability_set=value[0], bonus_amount=value[1])\n c.save()\n elif effect == 'ability':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n ability_bonus=value[0], bonus_amount=value[1])\n c.save()\n elif effect == 'melee':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n attack_bonus=0, bonus_amount=value)\n c.save()\n elif effect == 'save':\n c = Condition(parent_effect=\n Condition.objects.get(name=condition),\n save_bonus=value[0], bonus_amount=value[1])\n c.save()\n else:\n w = (\"Condition {0} has an unrecognized effect\"\n .format(condition))\n warnings.warn(w, RuntimeWarning)\n\ndef setup():\n create_conditions()\n create_default_skills()" }, { "alpha_fraction": 0.596503734588623, "alphanum_fraction": 0.5988481640815735, "avg_line_length": 49.18414306640625, "blob_id": "3bf4f33aa1320167fe7fe74509bcbe941bcf3a0e", "content_id": "5615360aa37c2b387934a3de6595f0343a1aa97f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19621, "license_type": "no_license", "max_line_length": 88, "num_lines": 391, "path": "/main/models/effect.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.utils import timezone\nfrom main.default_data import BONUS_TYPE_CHOICES, ABILITY_CHOICES, SAVE_CHOICES\nimport warnings\nfrom main.models.misc import Skill\n\n\nclass Effect(models.Model):\n\n # TODO: Figure out which fields to index\n # The owner of the effect. This is used if the effect is stored for use by\n # multiple sheets owned by a user. Either this or sheet should be None.\n owner = models.ForeignKey(User, null=True, default=None)\n # The sheet the effect is attached to. This is used if the effect is used by\n # a sheet. Either this or owner should be None.\n sheet = models.ForeignKey('sheet', null=True, default=None)\n # The time the effect was created\n date = models.DateTimeField(default=timezone.now)\n # Whether the effect is active. If not, it's not factored into the\n # character sheet.\n active = models.BooleanField(default=True)\n # The effect's name. This is displayed on the sheet.\n name = models.CharField(max_length=200)\n # The effect's description. This is displayed on the sheet.\n description = models.CharField(max_length=1000)\n # Sheets can be nested. The ulimate parent (i.e. the effect with no\n # parents) is the one evaluated by a sheet, collecting the results of all\n # sub effects\n parent_effect = models.ForeignKey('self', related_name='sub_effect', \n related_query_name='sub_effect', \n null=True)\n # The bonus amount, if a flat bonus\n bonus_amount = models.IntegerField(default=None, null=True)\n # If the bonus is equal to an ability modifier, this stores the ability\n # using the values in ABILITY_CHOICES.\n x_to_y_bonus_ability = models.IntegerField(default=None,\n choices=ABILITY_CHOICES,\n null=True)\n # If the effect replaces an ability score in a calculation instead of\n # giving a bonus, this stores the ability using the values in\n # ABILITY_CHOICES.\n x_to_y_replace_ability = models.IntegerField(default=None,\n choices=ABILITY_CHOICES,\n null=True)\n # The bonus type, using the values in BONUS_TYPE_CHOICES\n bonus_type = models.IntegerField(default=18, choices=BONUS_TYPE_CHOICES,\n null=True)\n # If the bonus goes to a skill, this stores the skill\n skill_bonus = models.ForeignKey('Skill', on_delete=models.SET_NULL, \n default=None, null=True)\n # If the bonus goes to all skills with a certain key ability, this stores\n # the ability using the values in ABILItY_CHOICES\n ability_skill_bonus = models.IntegerField(choices=ABILITY_CHOICES,\n default=None, null=True)\n # If the bonus goes to an ability, this stores the ability using the values\n # in ABILITY_CHOICES\n ability_bonus = models.IntegerField(choices=ABILITY_CHOICES, default=None, \n null=True)\n # If the bonus goes to a save, this stores the save using the values\n # in SAVE_CHOICES\n save_bonus = models.IntegerField(choices=SAVE_CHOICES, default=None, \n null=True)\n # If the effect overrides a default ability to some statistic, this stores\n # the ability using the values in ABILITY_CHOICES\n override_ability = models.IntegerField(choices=ABILITY_CHOICES,\n default=None, null=True)\n # If the effect overrides a save's default ability, this stores the save\n # using the values in SAVE_CHOICES\n save_override = models.IntegerField(choices=SAVE_CHOICES, default=None,\n null=True)\n # If the effect sets an ability to a certain value (usually 0), this stores\n # the ability using the values in ABILItY_CHOICES. Sets to bonus_amount\n ability_set = models.IntegerField(choices=ABILITY_CHOICES, default=None,\n null=True)\n # Whether the effect grants a bonus to AC\n ac_bonus = models.BooleanField(default=False)\n # Whether the effect causes the character to lose Dexterity to AC\n no_dex_to_ac = models.BooleanField(default=False)\n # Whether the effect grants a bonus to initiative\n initiative_bonus = models.BooleanField(default=False)\n # Whether the effect grants a bonus to attack. 0 if melee, 1 if ranged,\n # 2 if both\n attack_bonus = models.IntegerField(null=True, default=None)\n\n # Gets the total bonus the effect gives to an ability.\n # ability:\n # the ability to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_ability_bonus(self, ability):\n bonus = []\n if self.ability_bonus == ability:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # X to Y on abilities really screws things up\n elif self.x_to_y_bonus_ability is not None:\n e = \"Effect {0} has from_x_stat for an ability.\".format(self.id)\n raise RuntimeError(e)\n # This shouldn't happen\n else:\n w = (\"Effect {0} has an ability listed but no bonus amount\"\n .format(self.id))\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_ability_bonus(ability)\n return bonus\n\n # Gets the total bonus the effect gives to a skill.\n # ability:\n # the skill to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_skill_bonus(self, skill):\n if type(skill) == int:\n skill = Skill.objects.get(id=skill)\n bonus = []\n # Only add if this effect has a bonus to the specified skill\n if self.skill_bonus in [skill, skill.super_skill]:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # A bonus equal to an ability score modifier\n elif self.x_to_y_bonus_ability is not None:\n bonus.append((self.bonus_type, self.get_x_to_y_bonus_ability_display()))\n # This shouldn't happen\n else:\n w = \"Effect {0} has a skill listed but no bonus amount\".format(\n self.id)\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_skill_bonus(skill)\n return bonus\n\n # Gets the total bonus the effect gives to a save.\n # ability:\n # the save to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_save_bonus(self, save):\n bonus = []\n # Only add if this effect has a bonus to the specified skill\n if self.save_bonus == save:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # A bonus equal to an ability score modifier\n elif self.x_to_y_bonus_ability is not None:\n bonus.append((self.bonus_type, self.get_x_to_y_bonus_ability_display()))\n # This shouldn't happen\n else:\n w = \"Effect {0} has a save listed but no bonus amount\".format(\n self.id)\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_save_bonus(save)\n return bonus\n\n # Gets the save whose ability the effect overrides.\n # save:\n # the save to get bonuses for. integer (see SAVE_CHOICES).\n # Returns a list of of integers from ABILITY_CHOICES.\n def ultimate_save_override(self, save):\n # Add save override if for the given save\n overrides = ([self.override_ability] if self.save_override == save\n else [])\n # Do the same for the sub-effects\n for sub_effect in self.sub_effect.all():\n sub_override = sub_effect.save_override\n override_ability = sub_effect.override_ability\n if sub_override == save and override_ability not in overrides:\n overrides.append(override_ability)\n return overrides\n\n @property\n def skill_bonuses(self):\n # Make skill bonus dictionary\n if self.skill_bonus_id is not None:\n # Add own skill bonus if exists\n # Format = {skill: {bonus_type:[bonus1, bonus2, bonus3]\n bonuses = {self.skill_bonus_id:\n {self.bonus_type: [self.bonus_amount\n if self.bonus_amount is not None\n else self.get_x_to_y_bonus_ability_display()]\n \n }\n }\n else:\n bonuses = dict()\n # Add bonuses from sub-effects to bonuses\n for effect in self.sub_effect.all():\n sub_effect_bonuses = effect.skill_bonuses\n for skill in sub_effect_bonuses:\n for bonus_type in sub_effect_bonuses[skill]:\n if skill in bonuses:\n if bonus_type in bonuses[skill]:\n bonuses[skill][bonus_type] += (sub_effect_bonuses\n [skill][bonus_type])\n else:\n bonuses[skill][bonus_type] = (sub_effect_bonuses\n [skill][bonus_type])\n else:\n bonuses[skill] = {bonus_type: sub_effect_bonuses\n [skill][bonus_type]}\n return bonuses\n\n def __str__(self):\n return self.name\n\n\nclass Condition(models.Model):\n\n # TODO: Figure out which fields to index\n # The owner of the effect. This is used if the effect is stored for use by\n # multiple sheets owned by a user. Either this or sheet should be None.\n owner = models.ForeignKey(User, null=True, default=None)\n # The sheet the effect is attached to. This is used if the effect is used by\n # a sheet. Either this or owner should be None.\n sheet = models.ForeignKey('sheet', null=True, default=None)\n # The time the effect was created\n date = models.DateTimeField(default=timezone.now)\n # Whether the effect is active. If not, it's not factored into the\n # character sheet.\n active = models.BooleanField(default=True)\n # The effect's name. This is displayed on the sheet.\n name = models.CharField(max_length=200)\n # The effect's description. This is displayed on the sheet.\n description = models.CharField(max_length=1000)\n # Sheets can be nested. The ulimate parent (i.e. the effect with no\n # parents) is the one evaluated by a sheet, collecting the results of all\n # sub effects\n parent_effect = models.ForeignKey('self', related_name='sub_effect',\n related_query_name='sub_effect',\n null=True)\n # The bonus amount, if a flat bonus\n bonus_amount = models.IntegerField(default=None, null=True)\n # If the bonus is equal to an ability modifier, this stores the ability\n # using the values in ABILITY_CHOICES.\n x_to_y_bonus_ability = models.IntegerField(default=None,\n choices=ABILITY_CHOICES,\n null=True)\n # If the effect replaces an ability score in a calculation instead of\n # giving a bonus, this stores the ability using the values in\n # ABILITY_CHOICES.\n x_to_y_replace_ability = models.IntegerField(default=None,\n choices=ABILITY_CHOICES,\n null=True)\n # The bonus type, using the values in BONUS_TYPE_CHOICES\n bonus_type = models.IntegerField(default=18, choices=BONUS_TYPE_CHOICES,\n null=True)\n # If the bonus goes to a skill, this stores the skill\n skill_bonus = models.CharField(max_length=100)\n # If the bonus goes to all skills with a certain key ability, this stores\n # the ability using the values in ABILItY_CHOICES\n ability_skill_bonus = models.IntegerField(choices=ABILITY_CHOICES,\n default=None, null=True)\n # If the bonus goes to an ability, this stores the ability using the values\n # in ABILITY_CHOICES\n ability_bonus = models.IntegerField(choices=ABILITY_CHOICES, default=None,\n null=True)\n # If the bonus goes to a save, this stores the save using the values\n # in SAVE_CHOICES\n save_bonus = models.IntegerField(choices=SAVE_CHOICES, default=None,\n null=True)\n # If the effect overrides a default ability to some statistic, this stores\n # the ability using the values in ABILITY_CHOICES\n override_ability = models.IntegerField(choices=ABILITY_CHOICES,\n default=None, null=True)\n # If the effect overrides a save's default ability, this stores the save\n # using the values in SAVE_CHOICES\n save_override = models.IntegerField(choices=SAVE_CHOICES, default=None,\n null=True)\n # If the effect sets an ability to a certain value (usually 0), this stores\n # the ability using the values in ABILItY_CHOICES. Sets to bonus_amount\n ability_set = models.IntegerField(choices=ABILITY_CHOICES, default=None,\n null=True)\n # Whether the effect grants a bonus to AC\n ac_bonus = models.BooleanField(default=False)\n # Whether the effect causes the character to lose Dexterity to AC\n no_dex_to_ac = models.BooleanField(default=False)\n # Whether the effect grants a bonus to initiative\n initiative_bonus = models.BooleanField(default=False)\n # Whether the effect grants a bonus to attack. 0 if melee, 1 if ranged,\n # 2 if both\n attack_bonus = models.IntegerField(null=True, default=None)\n\n # Gets the total bonus the effect gives to an ability.\n # ability:\n # the ability to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_ability_bonus(self, ability):\n bonus = []\n if self.ability_bonus == ability:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # X to Y on abilities really screws things up\n elif self.x_to_y_bonus_ability is not None:\n e = \"Effect {0} has from_x_stat for an ability.\".format(self.id)\n raise RuntimeError(e)\n # This shouldn't happen\n else:\n w = (\"Effect {0} has an ability listed but no bonus amount\"\n .format(self.id))\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_ability_bonus(ability)\n return bonus\n\n # Gets the total bonus the effect gives to a save.\n # ability:\n # the save to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_save_bonus(self, save):\n bonus = []\n # Only add if this effect has a bonus to the specified skill\n if self.save_bonus == save:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # A bonus equal to an ability score modifier\n elif self.x_to_y_bonus_ability is not None:\n bonus.append(\n (self.bonus_type, self.get_x_to_y_bonus_ability_display()))\n # This shouldn't happen\n else:\n w = \"Effect {0} has a save listed but no bonus amount\".format(\n self.id)\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_save_bonus(save)\n return bonus\n\n # Gets the total bonus the effect gives to a skill.\n # ability:\n # the skill to get bonuses for. integer (see default_data).\n # Returns a list of 2-tuples. The first element of the tuples is an integer\n # indicating the bonus type (see default_data); the second is the bonus\n # amount (an integer).\n def total_skill_bonus(self, skill):\n if type(skill) == int:\n skill = Skill.objects.get(id=skill)\n skill = skill.name\n bonus = []\n # Only add if this effect has a bonus to the specified skill\n if self.skill_bonus in [skill, skill.super_skill.name]:\n # A fixed bonus\n if self.bonus_amount is not None:\n bonus.append((self.bonus_type, self.bonus_amount))\n # A bonus equal to an ability score modifier\n elif self.x_to_y_bonus_ability is not None:\n bonus.append(\n (self.bonus_type, self.get_x_to_y_bonus_ability_display()))\n # This shouldn't happen\n else:\n w = \"Effect {0} has a skill listed but no bonus amount\".format(\n self.id)\n warnings.warn(w, RuntimeWarning)\n # Add sub-effect bonuses\n for sub_effect in self.sub_effect.all():\n bonus += sub_effect.total_skill_bonus(skill)\n return bonus\n\n # Gets the save whose ability the effect overrides.\n # save:\n # the save to get bonuses for. integer (see SAVE_CHOICES).\n # Returns a list of of integers from ABILITY_CHOICES.\n def ultimate_save_override(self, save):\n # Add save override if for the given save\n overrides = ([self.override_ability] if self.save_override == save\n else [])\n # Do the same for the sub-effects\n for sub_effect in self.sub_effect.all():\n sub_override = sub_effect.save_override\n override_ability = sub_effect.override_ability\n if sub_override == save and override_ability not in overrides:\n overrides.append(override_ability)\n return overrides" }, { "alpha_fraction": 0.4964314103126526, "alphanum_fraction": 0.4964314103126526, "avg_line_length": 39.67741775512695, "blob_id": "d1b31ca8fc81c887b1ff8f5eb427ca49330ba028", "content_id": "fad1164e58c006c52bb4a5de1740f9bd8fcb7533", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1261, "license_type": "no_license", "max_line_length": 76, "num_lines": 31, "path": "/main/models/receivers.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom main.default_data import DEFAULT_SKILL_IDS\nfrom . import Sheet\nfrom .misc import Skill\n\n@receiver(post_save)\ndef finished_saving(sender, **kwargs):\n if sender == Sheet:\n try:\n created = kwargs['created']\n instance = kwargs['instance']\n skill_ids = {None:None}\n if created:\n # Loop through skills that aren't subskills\n for skill in Skill.objects.filter(id__in=DEFAULT_SKILL_IDS):\n old_id = skill.id\n # Remove \n skill.pk = None\n skill.sheet = instance\n skill.save()\n skill_ids[old_id] = skill.id\n for sub_skill in (Skill.objects.filter\n (id__in=skill_ids.values())\n .exclude(super_skill=None)):\n sub_skill.super_skill_id = skill_ids[sub_skill\n .super_skill.id]\n sub_skill.save()\n except KeyError:\n print('finished_saving did not receive expected kwargs')\n raise\n" }, { "alpha_fraction": 0.5321888327598572, "alphanum_fraction": 0.6008583903312683, "avg_line_length": 22.299999237060547, "blob_id": "056d21ea022ea9fa633d54d8855027be1ec25abb", "content_id": "994410c6637669c68aee1aacfbf33b7c87706e7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 466, "license_type": "no_license", "max_line_length": 63, "num_lines": 20, "path": "/main/migrations/0006_auto_20160517_2046.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-17 20:46\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('main', '0005_auto_20160517_2045'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='effect',\n name='bonus_amount',\n field=models.IntegerField(default=None, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.8186968564987183, "alphanum_fraction": 0.8186968564987183, "avg_line_length": 26.153846740722656, "blob_id": "9b7a2e2e6af0e544657d635dd9e9f46b49460137", "content_id": "17d295375ad8b2464675954af991cba98fb77e60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 353, "license_type": "no_license", "max_line_length": 34, "num_lines": 13, "path": "/main/admin.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom main.models import *\n\n# Register your models here.\nadmin.site.register(Sheet)\nadmin.site.register(Skill)\nadmin.site.register(Feat)\nadmin.site.register(Effect)\nadmin.site.register(RacialAbility)\nadmin.site.register(ClassAbility)\nadmin.site.register(Container)\nadmin.site.register(Item)\nadmin.site.register(Property)\n" }, { "alpha_fraction": 0.5380717515945435, "alphanum_fraction": 0.5554074048995972, "avg_line_length": 51.07638931274414, "blob_id": "6a4cc626d74c2e923664fcaa14d0921c44ab3447", "content_id": "a300d17126b2cf7b5598ee2ba83a14f7c6c60c5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7499, "license_type": "no_license", "max_line_length": 188, "num_lines": 144, "path": "/main/migrations/0001_initial.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-15 02:08\nfrom __future__ import unicode_literals\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='ClassAbility',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('description', models.CharField(max_length=1000)),\n ],\n ),\n migrations.CreateModel(\n name='Container',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('max_items', models.IntegerField()),\n ],\n ),\n migrations.CreateModel(\n name='Effect',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('description', models.CharField(max_length=1000)),\n ('_bonus_amount', models.IntegerField()),\n ('ability_bonus', models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None)),\n ('save_bonus', models.IntegerField(choices=[(0, 'Fortitude'), (1, 'Reflex'), (2, 'Will')], default=None)),\n ('from_x_stat', models.IntegerField(choices=[(0, 'Strength'), (1, 'Dexterity'), (2, 'Constitution'), (3, 'Intelligence'), (4, 'Wisdom'), (5, 'Charisma')], default=None)),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ('parent_effect', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sub_effect', to='main.Effect')),\n ],\n ),\n migrations.CreateModel(\n name='Feat',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('description', models.CharField(max_length=1000)),\n ('effect', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main.Effect')),\n ],\n ),\n migrations.CreateModel(\n name='Item',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('item_type', models.CharField(choices=[('W', 'Weapon'), ('A', 'Armor'), ('S', 'Shield'), ('C', 'Clothing'), ('T', 'Tool')], default='T', max_length=1)),\n ('damage', models.CharField(max_length=50)),\n ('critical', models.CharField(max_length=50)),\n ('weapon_range', models.CharField(max_length=50)),\n ('weight', models.CharField(max_length=50)),\n ('size', models.CharField(max_length=50)),\n ('damage_type', models.CharField(max_length=50)),\n ('base_ac_bonus', models.IntegerField(max_length=50)),\n ('base_acp', models.CharField(max_length=50)),\n ('base_asf', models.CharField(max_length=50)),\n ('weapon_enhancement_bonus', models.CharField(max_length=50)),\n ('armor_enhancement_bonus', models.CharField(max_length=50)),\n ('container', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Container')),\n ],\n ),\n migrations.CreateModel(\n name='Property',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.Item')),\n ],\n ),\n migrations.CreateModel(\n name='RacialAbility',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('description', models.CharField(max_length=1000)),\n ('effect', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main.Effect')),\n ],\n ),\n migrations.CreateModel(\n name='Sheet',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('char_name', models.CharField(max_length=200)),\n ('race', models.CharField(max_length=200)),\n ('classes', models.CharField(max_length=200)),\n ('gender', models.CharField(max_length=200)),\n ('alignment', models.CharField(max_length=200)),\n ('deity', models.CharField(max_length=200)),\n ('campaign', models.CharField(max_length=200)),\n ('age', models.CharField(max_length=200)),\n ('_size', models.CharField(max_length=200)),\n ('disp_base_str', models.CharField(max_length=5)),\n ('disp_base_dex', models.CharField(max_length=5)),\n ('disp_base_con', models.CharField(max_length=5)),\n ('disp_base_int', models.CharField(max_length=5)),\n ('disp_base_wis', models.CharField(max_length=5)),\n ('disp_base_cha', models.CharField(max_length=5)),\n ('disp_base_will', models.CharField(max_length=5)),\n ('disp_base_ref', models.CharField(max_length=5)),\n ('disp_base_fort', models.CharField(max_length=5)),\n ('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='Skill',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=200)),\n ('default_stat', models.IntegerField()),\n ('sheet', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='skill', to='main.Sheet')),\n ],\n ),\n migrations.AddField(\n model_name='effect',\n name='sheets',\n field=models.ManyToManyField(related_name='effect', to='main.Sheet'),\n ),\n migrations.AddField(\n model_name='effect',\n name='skill_bonus',\n field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='main.Skill'),\n ),\n migrations.AddField(\n model_name='classability',\n name='effect',\n field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='main.Effect'),\n ),\n ]\n" }, { "alpha_fraction": 0.7423133254051208, "alphanum_fraction": 0.7467057108879089, "avg_line_length": 30.090909957885742, "blob_id": "fd51cd86501aa54168c56aebde8f5c4a266e15a3", "content_id": "fbc9f7319a1feb2370069db9b787e3dcc83fe385", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 683, "license_type": "no_license", "max_line_length": 66, "num_lines": 22, "path": "/main/views/forms.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django import forms\n\n\nclass LoginForm(forms.Form):\n username = forms.CharField()\n password = forms.CharField(widget=forms.PasswordInput)\n\nclass RegisterForm(forms.Form):\n username = forms.CharField()\n password = forms.CharField(widget=forms.PasswordInput)\n confirm_password = forms.CharField(widget=forms.PasswordInput)\n\n\nclass ResetForm(forms.Form):\n old_password = forms.CharField(widget=forms.PasswordInput)\n new_password = forms.CharField(widget=forms.PasswordInput)\n confirm_password = forms.CharField(widget=forms.PasswordInput)\n\n\nclass NewSheetForm(forms.Form):\n name = forms.CharField()\n type = forms.ChoiceField(choices=[['0','D&D 3.5e']])" }, { "alpha_fraction": 0.6156831979751587, "alphanum_fraction": 0.6208696365356445, "avg_line_length": 43.238094329833984, "blob_id": "a76f65b983e2d379c0eb422cbb958ec41aa930c0", "content_id": "6051e65b657f5101a753ee8b5c7e2ade33363b36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 30659, "license_type": "no_license", "max_line_length": 83, "num_lines": 693, "path": "/main/models/sheet.py", "repo_name": "JefftheGreen/charsheets", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom django.utils import timezone\nfrom main.default_data import DEFAULT_SAVE_ABILITIES, DEFAULT_CONDITION_EFFECTS\nfrom django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist\nfrom main.models import Condition\nfrom django.utils.functional import cached_property\nimport re\n\n\nclass Sheet(models.Model):\n\n # TODO: Figure out which fields to index\n # The user that created the sheet\n owner = models.ForeignKey(User, on_delete=models.CASCADE)\n # The time the effect was created\n date = models.DateTimeField(default=timezone.now)\n # The name of the sheet, displayed where the sheet is referred to\n name = models.CharField(max_length=200, blank=True, default='')\n # The character name, displayed on the sheet\n char_name = models.CharField(max_length=200, blank=True, default='')\n # The character race, displayed on the sheet\n race = models.CharField(max_length=200, blank=True, default='')\n # The character classes, displayed on the sheet\n classes = models.CharField(blank=True, max_length=200)\n # The character's gender, displayed on the sheet\n gender = models.CharField(max_length=200, blank=True, default='')\n # The character's alignment, displayed on the sheet\n alignment = models.CharField(max_length=200, blank=True, default='')\n # The character's deity, displayed on the sheet\n deity = models.CharField(max_length=200, blank=True, default='')\n # The campaign the character is for, displayed on the sheet\n campaign = models.CharField(max_length=200, blank=True, default='')\n # The character's age, displayed on the sheet\n age = models.CharField(max_length=200, blank=True, default='')\n # The character's size, displayed on the sheet, and parsed for calculations\n disp_size = models.CharField(max_length=200, blank=True, default='')\n # The character's Str, displayed on the sheet, and parsed for calculations\n disp_base_str = models.CharField(max_length=5, blank=True, default='')\n # The character's Dex, displayed on the sheet, and parsed for calculations\n disp_base_dex = models.CharField(max_length=5, blank=True, default='')\n # The character's Con, displayed on the sheet, and parsed for calculations\n disp_base_con = models.CharField(max_length=5, blank=True, default='')\n # The character's Int, displayed on the sheet, and parsed for calculations\n disp_base_int = models.CharField(max_length=5, blank=True, default='')\n # The character's Wis, displayed on the sheet, and parsed for calculations\n disp_base_wis = models.CharField(max_length=5, blank=True, default='')\n # The character's Cha, displayed on the sheet, and parsed for calculations\n disp_base_cha = models.CharField(max_length=5, blank=True, default='')\n # The character's Will, displayed on the sheet, and parsed for calculations\n disp_base_will = models.CharField(max_length=5, blank=True, default='0')\n # The character's Ref, displayed on the sheet, and parsed for calculations\n disp_base_ref = models.CharField(max_length=5, blank=True, default='0')\n # The character's Fort, displayed on the sheet, and parsed for calculations\n disp_base_fort = models.CharField(max_length=5, blank=True, default='0')\n # Whether the character is blinded\n blinded = models.BooleanField(default=False)\n # Whether the character is confused\n confused = models.BooleanField(default=False)\n # Whether the character is dazed\n dazed = models.BooleanField(default=False)\n # Whether the character is dazzled\n dazzled = models.BooleanField(default=False)\n # Whether the character is deafened\n deafened = models.BooleanField(default=False)\n # Whether the character is disabled\n disabled = models.BooleanField(default=False)\n # Whether the character is dying\n dying = models.BooleanField(default=False)\n # Whether the character is entangled\n entangled = models.BooleanField(default=False)\n # Whether the character is fascinated\n fascinated = models.BooleanField(default=False)\n # Whether the character is flat-footed\n flat_footed = models.BooleanField(default=False)\n # Whether the character is grappling\n grappling = models.BooleanField(default=False)\n # Whether the character is helpless\n helpless = models.BooleanField(default=False)\n # Whether the character is incorporeal\n incorporeal = models.BooleanField(default=False)\n # Whether the character is invisible\n invisible = models.BooleanField(default=False)\n # Whether the character is nauseated\n nauseated = models.BooleanField(default=False)\n # Whether the character is paralyzed\n paralyzed = models.BooleanField(default=False)\n # Whether the character is petrified\n petrified = models.BooleanField(default=False)\n # Whether the character is pinned\n pinned = models.BooleanField(default=False)\n # Whether the character is prone\n prone = models.BooleanField(default=False)\n # Whether the character is sickened\n sickened = models.BooleanField(default=False)\n # Whether the character is stable\n stable = models.BooleanField(default=False)\n # Whether the character is staggered\n staggered = models.BooleanField(default=False)\n # Whether the character is stunned\n stunned = models.BooleanField(default=False)\n # Whether the character is turned\n turned = models.BooleanField(default=False)\n # Whether the character is unconscious\n unconscious = models.BooleanField(default=False)\n # Whether the character is cowering\n cowering = models.BooleanField(default=False)\n # Whether the character is fatigued 0=normal, 1=fatigued, 2=exhausted\n fatigue_degree = models.IntegerField(null=False, default=0)\n # How much fear 0=normal, 1=shaken, 2=frightened, 3=panicked, 4=cowering\n fear_degree = models.IntegerField(null=False, default=0)\n\n @cached_property\n def disp_abilities(self):\n return (self.disp_base_str, self.disp_base_dex, self.disp_base_con,\n self.disp_base_int, self.disp_base_wis, self.disp_base_cha)\n\n @cached_property\n def disp_saves(self):\n return self.disp_base_fort, self.disp_base_ref, self.disp_base_will\n \n # The character's base Str, used in calculations. numeric\n @cached_property\n def base_str(self):\n return self.base_ability(0)\n \n # The character's base Dex, used in calculations. numeric\n @cached_property\n def base_dex(self):\n return self.base_ability(1)\n \n # The character's base Con, used in calculations. numeric\n @cached_property\n def base_con(self):\n return self.base_ability(2)\n \n # The character's base Int, used in calculations. numeric\n @cached_property\n def base_int(self):\n return self.base_ability(3)\n \n # The character's base Wis, used in calculations. numeric\n @cached_property\n def base_wis(self):\n return self.base_ability(4)\n \n # The character's base Cha, used in calculations. numeric\n @cached_property\n def base_cha(self):\n return self.base_ability(5)\n \n # The character's base Str modifier, calculated from base_str. integer\n @property\n def base_str_mod(self):\n return self.ability_mod(self.base_str)\n \n # The character's base Dex modifier, calculated from base_dex. integer\n @property\n def base_dex_mod(self):\n return self.ability_mod(self.base_dex)\n \n # The character's base Con modifier, calculated from base_con. integer\n @property\n def base_con_mod(self):\n return self.ability_mod(self.base_con)\n \n # The character's base Int modifier, calculated from base_int. integer\n @property\n def base_int_mod(self):\n return self.ability_mod(self.base_int)\n \n # The character's base Wis modifier, calculated from base_wis. integer\n @property\n def base_wis_mod(self):\n return self.ability_mod(self.base_wis)\n \n # The character's base Cha modifier, calculated from base_cha. integer\n @cached_property\n def base_cha_mod(self):\n return self.ability_mod(self.base_cha)\n\n @cached_property\n def total_ability_bonuses(self):\n return [self.total_ability_bonus(i) for i in range(0, 6)]\n\n # The character's Str, accounting for all effects. integer\n @cached_property\n def fin_str(self):\n return self.fin_ability(0)\n \n # The character's Dex, accounting for all effects. integer\n @cached_property\n def fin_dex(self):\n return self.fin_ability(1)\n \n # The character's Con, accounting for all effects. integer\n @cached_property\n def fin_con(self):\n return self.fin_ability(2)\n \n # The character's Int, accounting for all effects. integer\n @cached_property\n def fin_int(self):\n return self.fin_ability(3)\n \n # The character's Wis, accounting for all effects. integer\n @cached_property\n def fin_wis(self):\n return self.fin_ability(4)\n \n # The character's Cha, accounting for all effects. integer\n @cached_property\n def fin_cha(self):\n return self.fin_ability(5)\n \n # The character's Str modifier, calculated from fin_str. integer\n @property\n def fin_str_mod(self):\n return self.ability_mod(self.fin_str)\n \n # The character's Dex modifier, calculated from fin_dex. integer\n @property\n def fin_dex_mod(self):\n return self.ability_mod(self.fin_dex)\n \n # The character's Con modifier, calculated from fin_con. integer\n @property\n def fin_con_mod(self):\n return self.ability_mod(self.fin_con)\n \n # The character's Int modifier, calculated from fin_int. integer\n @property\n def fin_int_mod(self):\n return self.ability_mod(self.fin_int)\n \n # The character's Wis modifier, calculated from fin_wis. integer\n @property\n def fin_wis_mod(self):\n return self.ability_mod(self.fin_wis)\n \n # The character's Cha modifier, calculated from fin_cha. integer\n @property\n def fin_cha_mod(self):\n return self.ability_mod(self.fin_cha)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def base_fort(self):\n return self.base_save(0)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def base_ref(self):\n return self.base_save(1)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def base_will(self):\n return self.base_save(2)\n\n @cached_property\n def fort_ability_mod(self):\n return self.save_ability_mod(0)\n\n @cached_property\n def ref_ability_mod(self):\n return self.save_ability_mod(1)\n\n @cached_property\n def will_ability_mod(self):\n return self.save_ability_mod(2)\n\n @cached_property\n def fort_bonus(self):\n return self.save_bonus(0)\n\n @cached_property\n def ref_bonus(self):\n return self.save_bonus(1)\n\n @cached_property\n def will_bonus(self):\n return self.save_bonus(2)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def fin_fort(self):\n return self.fin_save(0)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def fin_ref(self):\n return self.fin_save(1)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def fin_will(self):\n return self.fin_save(2)\n\n # The character's base Fortitude save, used in calculations. numeric\n @cached_property\n def active_effects(self):\n raw_effects = list(self.effect_set.filter(active=True))\n feat_effects = []#[f.effect for f in self.feat_set.all()]\n # TODO: Figure out how to implement item properties here.\n item_effects = []\n return (raw_effects + feat_effects +\n item_effects + self.active_conditions)\n\n @cached_property\n def active_conditions(self):\n conditions = {}\n active = []\n for condition in DEFAULT_CONDITION_EFFECTS:\n try:\n if self._meta.get_field(condition).value_to_string(self) == 'True':\n active.append(Condition.objects.get(name=condition))\n except (FieldDoesNotExist, ObjectDoesNotExist):\n pass\n if self.fatigue_degree == 1:\n active.append(Condition.objects.get(name='fatigued'))\n elif self.fatigue_degree == 2:\n active.append(Condition.objects.get(name='exhausted'))\n if self.fear_degree == 1:\n active.append(Condition.objects.get(name='shaken'))\n elif self.fear_degree == 2:\n active.append(Condition.objects.get(name='frightened'))\n elif self.fear_degree == 3:\n active.append(Condition.objects.get(name='panicked'))\n return active\n\n @cached_property\n def all_skills(self):\n skills_unsort = list(self.skill_set.filter(super_skill=None))\n skills_unsort.sort(key=lambda s: s.name)\n skills_sort = []\n for s in skills_unsort:\n sub_skills = list(self.skill_set.filter(super_skill=s))\n sub_skills.sort(key=lambda s: s.name)\n skills_sort.append((s, tuple(sub_skills)))\n return tuple(skills_sort)\n\n\n\n # Parses an ability's display value into a numeric.\n # ability:\n # the ability to get bonuses for. integer (see Effect model).\n # Returns a numeric or None. In order of preference, disp_base_X as an\n # integer, disp_base_X as a float, disp_base_X with the first regex\n # indicating a float ('[0-9]+(\\.[0-9]+)*'), and None.\n def base_ability(self, ability):\n disp_ability = self.disp_abilities[ability]\n if disp_ability == '':\n return ''\n # If we can, just turn it into a numeric\n try:\n return int(disp_ability)\n except ValueError:\n try:\n return float(disp_ability)\n # It doesn't turn into a numeric\n except ValueError:\n # Find the first float-like string in disp_ability\n num_regex = re.compile('[0-9]+(\\.[0-9]+)*')\n found = num_regex.search(disp_ability)\n # If it exists\n if found:\n return float(found.group())\n # If there's something other than a number in disp_ability,\n # treat it as a non-ability.\n else:\n return None\n\n # Calculates a final ability score, including all bonuses.\n # ability:\n # the ability to get the modifier for. integer (see Effect model).\n # Returns an integer or None.\n def fin_ability(self, ability):\n # Start with the base ability\n fin_ability = self.base_ability(ability)\n # If the base is empty, so is the final\n if fin_ability == '':\n return ''\n # If we couldn't parse disp_ability, it's a non-ability\n if fin_ability is None:\n return \"–\"\n # Add all penalties and bonuses from effects\n for bonus_type, modifiers in self.total_ability_bonuses[ability].items():\n penalty, bonus = min(modifiers), max(modifiers)\n fin_ability += penalty + bonus\n return fin_ability\n\n # Gets the ability modifier for an ability. (ability - 10) / 2\n # ability:\n # the ability to get the modifier for. integer (see Effect model).\n # Returns an integer or None.\n def ability_mod(self, ability_score):\n try:\n return int((ability_score - 10) / 2)\n # It's not a numeric, so defaults to 0\n except (ValueError, TypeError):\n return 0\n\n # Parses a save's display value into a numeric.\n # save:\n # the save to get bonuses for. integer (see Effect model).\n # Returns a numeric. In order of preference, disp_save as an integer,\n # disp_save as a float, disp_save with the first regex indicating a float\n # ('[0-9]+(\\.[0-9]+)*'), and 0.\n def base_save(self, save):\n disp_save = self.disp_saves[save]\n # If we can, just turn it into a numeric\n try:\n return int(disp_save)\n except ValueError:\n try:\n return float(disp_save)\n # It doesn't turn into a numeric easily.\n except ValueError:\n # Find the first float-like string in disp_save\n num_regex = re.compile('[0-9]+(\\.[0-9]+)*')\n found = num_regex.search(disp_save)\n # If it exists\n if found:\n return float(found.group())\n # If there's something other than a number in disp_save\n else:\n return 0\n\n def save_ability_mod(self, save):\n return self.ability_mod(self.fin_ability(\n self.ultimate_save_ability(save)))\n\n def save_bonus(self, save):\n total_bonus = 0\n # Add all bonuses from effects.\n for bonus_type, modifiers in self.total_save_bonus(save).items():\n penalty, bonus = min(modifiers), max(modifiers)\n total_bonus += penalty + bonus\n return total_bonus\n\n # Gets the final value for a save\n # save:\n # the save to get the final value for. integer (see Effect model).\n # Returns an integer.\n def fin_save(self, save):\n # Start with the base save bonus\n fin_save = self.base_save(save)\n # Add the key ability modifier\n fin_save += self.save_ability_mod(save)\n # Add all the bonuses\n fin_save += self.save_bonus(save)\n return fin_save\n\n # Gets the bonuses and penalties for an ability from a single effect\n # ability:\n # the ability to get bonuses for. integer (see Effect model).\n # effect:\n # the effect to get bonuses from. Effect.\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def effect_ability_bonus(self, ability, effect):\n raw_bonuses = effect.total_ability_bonus(ability)\n # Bonuses are stored as bonus_type: bonus. bonus_type is an integer\n # referring to the bonus types in Effect. bonus is a range with min\n # equal to the worst penalty and a max equal to the best bonus.\n # Remember when setting ranges that the max is set to the second\n # argument _minus one_, so you need to make it one higher.\n bonuses = {}\n for bonus_type, amount in raw_bonuses:\n if bonus_type in bonuses:\n old_bonus = bonuses[bonus_type]\n # If amount is a penalty and worse than the old one, use it\n if amount < min(old_bonus):\n bonuses[bonus_type] = range(amount, max(old_bonus) + 1)\n # If amount is a bonus and better than the old one, use it\n elif amount > max(old_bonus):\n bonuses[bonus_type] = range(min(old_bonus), amount + 1)\n else:\n # If amount is a bonus, set penalty to 0 and bonus to amount\n if amount > 0:\n bonuses[bonus_type] = range(0, amount + 1)\n # If amount is a penalty, set penalty to amount and bonus to 0\n elif amount < 0:\n bonuses[bonus_type] = range(amount, 1)\n # This shouldn't happen\n else:\n bonuses[bonus_type] = range(0, 1)\n return bonuses\n\n # Gets the bonuses and penalties for an ability from a single effect\n # skill:\n # the skill to get bonuses for. Skill or integer refering to Skill id.\n # effect:\n # the effect to get bonuses from. Effect.\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def effect_skill_bonus(self, skill, effect):\n raw_bonuses = effect.total_skill_bonus(skill)\n # Bonuses are stored as bonus_type: bonus. bonus_type is an integer\n # referring to the bonus types in Effect. bonus is a range with min\n # equal to the worst penalty and a max equal to the best bonus.\n # Remember when setting ranges that the max is set to the second\n # argument _minus one_, so you need to make it one higher.\n bonuses = {}\n for bonus_type, amount in raw_bonuses:\n if type(amount) is str:\n amount = {'Strength': self.fin_str_mod,\n 'Dexterity': self.fin_dex_mod,\n 'Constitution': self.fin_con_mod,\n 'Intelligence': self.fin_int_mod,\n 'Wisdom': self.fin_wis_mod,\n 'Charisma': self.fin_cha_mod,\n }[amount]\n if bonus_type in bonuses:\n old_bonus = bonuses[bonus_type]\n # If amount is a penalty and worse than the old one, use it\n if amount < min(old_bonus):\n bonuses[bonus_type] = range(amount, max(old_bonus) + 1)\n # If amount is a bonus and better than the old one, use it\n elif amount > max(old_bonus):\n bonuses[bonus_type] = range(min(old_bonus), amount + 1)\n else:\n # If amount is a bonus, set penalty to 0 and bonus to amount\n if amount > 0:\n bonuses[bonus_type] = range(0, amount + 1)\n # If amount is a penalty, set penalty to amount and bonus to 0\n elif amount < 0:\n bonuses[bonus_type] = range(amount, 1)\n # This shouldn't happen\n else:\n bonuses[bonus_type] = range(0, 1)\n return bonuses\n\n # Gets the bonuses and penalties for an ability from a single effect\n # save:\n # the save to get bonuses for. integer (see Effect model).\n # effect:\n # the effect to get bonuses from. Effect.\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def effect_save_bonus(self, save, effect):\n raw_bonuses = effect.total_save_bonus(save)\n # Bonuses are stored as bonus_type: bonus. bonus_type is an integer\n # referring to the bonus types in Effect. bonus is a range with min\n # equal to the worst penalty and a max equal to the best bonus.\n # Remember when setting ranges that the max is set to the second\n # argument _minus one_, so you need to make it one higher.\n bonuses = {}\n for bonus_type, amount in raw_bonuses:\n if type(amount) is str:\n amount = {'Strength': self.fin_str_mod,\n 'Dexterity': self.fin_dex_mod,\n 'Constitution': self.fin_con_mod,\n 'Intelligence': self.fin_int_mod,\n 'Wisdom': self.fin_wis_mod,\n 'Charisma': self.fin_cha_mod,\n }[amount]\n if bonus_type in bonuses:\n old_bonus = bonuses[bonus_type]\n # If amount is a penalty and worse than the old one, use it\n if amount < min(old_bonus):\n bonuses[bonus_type] = range(amount, max(old_bonus) + 1)\n # If amount is a bonus and better than the old one, use it\n elif amount > max(old_bonus):\n bonuses[bonus_type] = range(min(old_bonus), amount + 1)\n else:\n # If amount is a bonus, set penalty to 0 and bonus to amount\n if amount > 0:\n bonuses[bonus_type] = range(0, amount + 1)\n # If amount is a penalty, set penalty to amount and bonus to 0\n elif amount < 0:\n bonuses[bonus_type] = range(amount, 1)\n # This shouldn't happen\n else:\n bonuses[bonus_type] = range(0, 1)\n return bonuses\n\n # Gets the ability bonuses of each type from all effects\n # ability:\n # the ability to get bonuses for. integer (see Effect model).\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def total_ability_bonus(self, ability):\n bonuses = {}\n # Cycle through all active effects\n for effect in self.active_effects:\n # Get the ability bonuses for the effect\n # {type: range(penalty, bonus)}\n effect_bonuses = self.effect_ability_bonus(ability, effect)\n # Cycle through each bonus type, adding penalties and bonuses\n for bonus_type in effect_bonuses:\n penalty = min(effect_bonuses[bonus_type])\n bonus = max(effect_bonuses[bonus_type])\n # If we've already had this bonus type from another effect, use\n # the worst penalty and the best bonus.\n if bonus_type in bonuses:\n old_penalty = min(bonuses[bonus_type])\n old_bonus = max(bonuses[bonus_type])\n worst_penalty = min(old_penalty, penalty)\n best_bonus = max(old_bonus, bonus)\n bonuses[bonus_type] = range(worst_penalty, best_bonus)\n # If we haven't had this bonus type, just add it to the list\n else:\n bonuses[bonus_type] = range(penalty, bonus + 1)\n return bonuses\n\n # Gets the ability bonuses of each type from all effects\n # save:\n # the save to get bonuses for. integer (see Effect model)\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def total_skill_bonus(self, skill):\n bonuses = {}\n # Cycle through all active effects\n for effect in self.active_effects:\n # Get the skill bonuses for the effect {type: range(penalty, bonus)}\n effect_bonuses = self.effect_skill_bonus(skill, effect)\n # Cycle through each bonus type, adding penalties and bonuses\n for bonus_type in effect_bonuses:\n # Penalties and bonuses are stored as ranges.\n penalty = min(effect_bonuses[bonus_type])\n bonus = max(effect_bonuses[bonus_type])\n # If we've already had this bonus type from another effect, use\n # the worst penalty and the best bonus.\n if bonus_type in bonuses:\n old_penalty = min(bonuses[bonus_type])\n old_bonus = max(bonuses[bonus_type])\n worst_penalty = min(old_penalty, penalty)\n best_bonus = max(old_bonus, bonus)\n bonuses[bonus_type] = range(worst_penalty, best_bonus)\n # If we haven't had this bonus type, just add it to the list\n else:\n bonuses[bonus_type] = range(penalty, bonus + 1)\n return bonuses\n\n # Gets the ability bonuses of each type from all effects\n # save:\n # the save to get bonuses for. Skill or integer refering to Skill id.\n # Returns a dictionary. Format is {bonus_type: modifiers}. bonus_type\n # is an integer referring to the bonus type (see models.Effect). modifiers\n # is a range with minimum equal to the penalty of that type (0 if none)\n # and a maximum equal to the bonus of that type (0 if none).\n def total_save_bonus(self, save):\n bonuses = {}\n # Cycle through all active effects\n for effect in self.active_effects:\n # Get the save bonuses for the effect {type: range(penalty, bonus)}\n effect_bonuses = self.effect_save_bonus(save, effect)\n # Cycle through each bonus type, adding penalties and bonuses\n for bonus_type in effect_bonuses:\n # Penalties and bonuses are stored as ranges.\n penalty = min(effect_bonuses[bonus_type])\n bonus = max(effect_bonuses[bonus_type])\n # If we've already had this bonus type from another effect, use\n # the worst penalty and the best bonus.\n if bonus_type in bonuses:\n old_penalty = min(bonuses[bonus_type])\n old_bonus = max(bonuses[bonus_type])\n worst_penalty = min(old_penalty, penalty)\n best_bonus = max(old_bonus, bonus)\n bonuses[bonus_type] = range(worst_penalty, best_bonus)\n # If we haven't had this bonus type, just add it to the list\n else:\n bonuses[bonus_type] = range(penalty, bonus + 1)\n return bonuses\n\n # Determines which ability will be used for a save\n # save:\n # the save to get the key ability. integer (see Effect model).\n # Returns an integer indicating the key ability (see Effect model). In order\n # of preference, the ability from the most recently added effect overriding\n # the save ability, the default save ability.\n def ultimate_save_ability(self, save):\n # Get the save override from all effects that have one.\n abilities = list({s for effect in self.active_effects\n for s in effect.ultimate_save_override(save)})\n # Add the default\n abilities.append(DEFAULT_SAVE_ABILITIES[save])\n # Get the save override with the highest modifier.\n print('save', save, 'abilities', abilities)\n abilities.sort(key=lambda a: self.fin_ability(a))\n return abilities[-1]\n" } ]
17
danmartinez78/advent-of-code-bot
https://github.com/danmartinez78/advent-of-code-bot
d2fd3406039c9ecc74fb7e00322f5eb058e53b17
7480c3ff2ef2d6d4c722cd4adf2030f841b0b400
c344f5c5a164b3704db3ae9c68f07d63e95403fd
refs/heads/main
2023-01-28T02:16:57.479638
2020-12-15T01:29:19
2020-12-15T01:29:19
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7570781707763672, "alphanum_fraction": 0.7621744275093079, "avg_line_length": 72.58333587646484, "blob_id": "5064ade28059ce3a5ce83a51916c97d1c8128fbf", "content_id": "5574836ec84eb5a60b26180c6087379b98ff1e81", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1766, "license_type": "permissive", "max_line_length": 421, "num_lines": 24, "path": "/README.md", "repo_name": "danmartinez78/advent-of-code-bot", "src_encoding": "UTF-8", "text": "# advent-of-code-bot [![Latest release](https://travis-ci.com/Danyc0/advent-of-code-bot.svg?branch=main)](https://travis-ci.com/Danyc0/advent-of-code-bot)\nA simple Discord bot for Advent Of Code\n\n## Dependencies\n pip install -U Discord.py python-dotenv\n\n## Setup\n\nYou'll need to create a discord bot of your own in the [Discord Developer Portal](https://discord.com/developers/applications) with View Channels and Read Messages permissions. It's also handy if you have an empty server (or \"guild\") for you to test in. This section of [this guide](https://realpython.com/how-to-make-a-discord-bot-python/#how-to-make-a-discord-bot-in-the-developer-portal) may be helpful to set that up.\n\nYou'll need to set three environment variables:\n* DISCORD TOKEN -> The Discord token for the bot you created (Available on your bot page in the developer portal)\n* AOC_URL -> The JSON url for your or Advent Of Code private leaderboard (Available by clicking \"API\" then \"JSON\" on your private leaderboard page)\n* AOC_COOKIE -> Your Advent Of Code session cookie so the bot has permission to view your private leaderboard (You can extract this from your web browser after signing in to AoC)\n\nYou can put these in a .env file in the repo directory as it uses dotenv (See [here](https://pypi.org/project/python-dotenv/) for usage) so you don't have to keep them in your environment\n\n## Contributions\n\nIn short, patches welcome.\n\nIf you raise a PR, I'll test it, give some feedback and then (eventually) merge it.\n\nThis project aims to follow PEP8, but with a line length of 120 characters, and [PEP8 Speaks](https://github.com/OrkoHunter/pep8speaks/) and [Travis CI](https://travis-ci.com/Danyc0/advent-of-code-bot) will perk up in the comments of your PR if you go against this.\n" }, { "alpha_fraction": 0.5840385556221008, "alphanum_fraction": 0.5945680141448975, "avg_line_length": 40.4775505065918, "blob_id": "6458f92da0966fdefa9d7c56580b175e1e2f74e3", "content_id": "96c33cf86cbd68450b3abcf299b5d68ebff57840", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10162, "license_type": "permissive", "max_line_length": 119, "num_lines": 245, "path": "/main.py", "repo_name": "danmartinez78/advent-of-code-bot", "src_encoding": "UTF-8", "text": "import os\nimport time\nimport datetime\nimport json\nimport urllib.request\nfrom dotenv import load_dotenv\n\nfrom discord.ext import commands\n\nload_dotenv()\nTOKEN = os.getenv('DISCORD_TOKEN')\nURL = os.getenv('AOC_URL')\nCOOKIE = os.getenv('AOC_COOKIE')\n\n# Advent Of Code request that you don't poll their API more often than once every 15 minutes\nPOLL_MINS = 15\n\n# Discord messages are limited to 2000 characters. This also includes space for 6 '`' characters for a code block\nMAX_MESSAGE_LEN = 2000 - 6\n\nPLAYER_STR_FORMAT = '{rank:2}) {name:{name_pad}} ({points:{points_pad}}) {stars:{stars_pad}}* ({star_time})\\n'\n\nplayers_cache = ()\n\n\ndef get_players():\n global players_cache\n now = time.time()\n debug_msg = 'Got Leaderboard From Cache'\n\n # If the cache is more than POLL_MINS old, refresh the cache, else use the cache\n if not players_cache or (now - players_cache[0]) > (60*POLL_MINS):\n debug_msg = 'Got Leaderboard Fresh'\n\n req = urllib.request.Request(URL)\n req.add_header('Cookie', 'session=' + COOKIE)\n page = urllib.request.urlopen(req).read()\n\n data = json.loads(page)\n # print(json.dumps(data, indent=4, sort_keys=True))\n\n # Extract the data from the JSON, it's a mess\n players = [(member['name'],\n member['local_score'],\n member['stars'],\n int(member['last_star_ts']),\n member['completion_day_level'],\n member['id']) for member in data['members'].values()]\n\n # Players that are anonymous have no name in the JSON, so give them a default name \"Anon\"\n for i, player in enumerate(players):\n if not player[0]:\n anon_name = \"anon #\" + player[5]\n players[i] = (anon_name, player[1], player[2], player[3], player[4], player[5])\n\n # Sort the table primarily by score, secondly by stars and finally by timestamp\n players.sort(key=lambda tup: tup[3])\n players.sort(key=lambda tup: tup[2], reverse=True)\n players.sort(key=lambda tup: tup[1], reverse=True)\n players_cache = (now, players)\n\n print(debug_msg)\n return players_cache[1]\n\n\nasync def output_leaderboard(context, leaderboard_lst):\n item_len = len(leaderboard_lst[0])\n block_size = MAX_MESSAGE_LEN // item_len\n\n tmp_leaderboard = leaderboard_lst\n\n while (len(tmp_leaderboard) * item_len) > MAX_MESSAGE_LEN:\n output_str = '```'\n output_str += ''.join(tmp_leaderboard[:block_size])\n output_str += '```'\n await context.send(output_str)\n tmp_leaderboard = tmp_leaderboard[block_size:]\n output_str = '```'\n output_str += ''.join(tmp_leaderboard)\n output_str += '```'\n await context.send(output_str)\n\n\n# Create the bot and specify to only look for messages starting with '!'\nbot = commands.Bot(command_prefix='!')\n\n\[email protected]\nasync def on_ready():\n print(f'{bot.user.name} has connected to Discord and is in the following channels:')\n for guild in bot.guilds:\n print(' ', guild.name)\n\n\[email protected](name='leaderboard', help='Responds with the current leaderboard')\nasync def leaderboard(context, num_players: int = 20):\n # Only respond if used in a channel called 'advent-of-code'\n if context.channel.name != 'advent-of-code':\n return\n\n print('Leaderboard requested')\n players = get_players()[:num_players]\n\n # Get string lengths for the format string\n max_name_len = len(max(players, key=lambda t: len(t[0]))[0])\n max_points_len = len(str(max(players, key=lambda t: t[1])[1]))\n max_stars_len = len(str(max(players, key=lambda t: t[2])[2]))\n\n leaderboard = []\n for i, player in enumerate(players):\n leaderboard.append(PLAYER_STR_FORMAT.format(rank=i+1,\n name=player[0], name_pad=max_name_len,\n points=player[1], points_pad=max_points_len,\n stars=player[2], stars_pad=max_stars_len,\n star_time=time.strftime('%H:%M %d/%m', time.localtime(player[3]))))\n\n await output_leaderboard(context, leaderboard)\n\n\[email protected](name='rank', help='Responds with the current ranking of the supplied player')\nasync def rank(context, *name):\n # Only respond if used in a channel called 'advent-of-code'\n if context.channel.name != 'advent-of-code':\n return\n\n # Join together all passed parameters with a space, this allows users to enter names with spaces\n player_name = ' '.join(name)\n\n print('Rank requested for: ', player_name)\n players = get_players()\n\n # Get the player with the matching name (case insensitive)\n players = [(i, player) for i, player in enumerate(players) if player[0].upper() == player_name.upper()]\n if players:\n # Assume there was only one match\n i, player = players[0]\n result = '```'\n result += PLAYER_STR_FORMAT.format(rank=i+1,\n name=player[0], name_pad=len(player[0]),\n points=player[1], points_pad=len(str(player[1])),\n stars=player[2], stars_pad=len(str(player[2])),\n star_time=time.strftime('%H:%M %d/%m', time.localtime(player[3])))\n result += '```'\n else:\n result = 'Whoops, it looks like I can\\'t find that player, are you sure they\\'re playing?'\n await context.send(result)\n\n\[email protected](name='keen', help='Responds with today\\'s keenest bean')\nasync def keen(context):\n # Only respond if used in a channel called 'advent-of-code'\n if context.channel.name != 'advent-of-code':\n return\n print('Keenest bean requested')\n\n all_players = get_players()\n # Calculate the highest number of stars gained by anyone in the leaderboard\n max_stars = max(all_players, key=lambda t: t[2])[2]\n # Get list of players with max stars\n players = [(i, player) for i, player in enumerate(all_players) if player[2] == max_stars]\n\n # Find the first person who got the max stars\n i, player = min(players, key=lambda t: t[1][3])\n\n result = 'Today\\'s keenest bean is:\\n```'\n result += PLAYER_STR_FORMAT.format(rank=i+1,\n name=player[0], name_pad=len(player[0]),\n points=player[1], points_pad=len(str(player[1])),\n stars=player[2], stars_pad=len(str(player[2])),\n star_time=time.strftime('%H:%M %d/%m', time.localtime(player[3])))\n result += '```'\n await context.send(result)\n\n\[email protected](name='daily', help='Will give the daily leaderboard for specified day')\nasync def daily(context, day: str = None):\n # The default day calculation cannot be in the function default value because the default\n # value is evaluated when the program is started, not when the function is called\n if day is None:\n # The default day is whatever day's challenge has just come out\n # So at 4.59AM UTC it will still show previous day's leaderboard\n day = str((datetime.datetime.today() - datetime.timedelta(hours=5)).day)\n\n # Only respond if used in a channel called 'advent-of-code'\n if context.channel.name != 'advent-of-code':\n return\n\n print(\"Daily leaderboard requested for day:\", day)\n players = get_players()\n\n # Goes through all the players checking if they have data for that day and if they do adding to players_days\n players_day = [player for player in players if day in player[4]]\n\n # Players_day has all people who have finished one star for that day\n first_star = []\n second_star = []\n\n # Adds all the players which has stars the into respective lists\n for player_day in players_day:\n if '1' in player_day[4][day]:\n first_star.append((player_day[0], int(player_day[4][day]['1']['get_star_ts'])))\n if '2' in player_day[4][day]:\n second_star.append((player_day[0], int(player_day[4][day]['2']['get_star_ts'])))\n\n # Sorts the two lists on timestamps\n first_star.sort(key=lambda data: data[1])\n second_star.sort(key=lambda data: data[1])\n\n final_table = []\n\n # Adds all the people from first list\n for i, player in enumerate(first_star):\n final_table.append((player[0], (len(players) - i), player[1], 1))\n\n # Updates the list with all the people who got the second star and their score\n for i, player in enumerate(second_star):\n index = [i for i, item in enumerate(final_table) if item[0] == player[0]][0]\n to_change = final_table[index]\n final_table[index] = (to_change[0], (to_change[1] + (len(players) - i)), player[1], 2)\n\n # Sorts the table primarily by score, and secondly by timestamp\n final_table.sort(key=lambda data: data[2])\n final_table.sort(reverse=True, key=lambda data: data[1])\n\n # Outputs data\n if not final_table:\n result = \"```No Scores for this day yet```\"\n await context.send(result)\n else:\n # Get string lengths for the format string\n max_name_len = len(max(final_table, key=lambda t: len(t[0]))[0])\n max_points_len = len(str(max(final_table, key=lambda t: t[1])[1]))\n max_stars_len = len(str(max(final_table, key=lambda t: t[3])[3]))\n leaderboard = []\n for place, player in enumerate(final_table):\n leaderboard.append(PLAYER_STR_FORMAT.format(rank=place+1,\n name=player[0], name_pad=max_name_len,\n points=player[1], points_pad=max_points_len,\n stars=player[3], stars_pad=max_stars_len,\n star_time=time.strftime('%H:%M %d/%m',\n time.localtime(player[2]))))\n await output_leaderboard(context, leaderboard)\n\n\nbot.run(TOKEN)\n" } ]
2
MONO-ON/python_study
https://github.com/MONO-ON/python_study
e4c7d063ea72b4f49edd2d66fd24734db75f92ed
be03737362fc188ba8d1fb91fa3aa3c9fdd29074
bc43b8675b190d08ad45da86fbbee0f61052829f
refs/heads/main
2023-06-30T09:10:12.621121
2021-08-03T02:59:30
2021-08-03T02:59:30
390,617,992
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4571428596973419, "alphanum_fraction": 0.4628571569919586, "avg_line_length": 16.450000762939453, "blob_id": "b7bb57c7dae2b37b88697ed5a79d733185c28aba", "content_id": "20690651831a1c8a6f661a4b1ed6f5a28f4aa1b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 368, "license_type": "no_license", "max_line_length": 34, "num_lines": 20, "path": "/calculator.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "\nclass Calculator:\n \"\"\"\n 계산기 클래스입니다.\n \"\"\"\n result = 0\n \n def add(self, a):\n self.result += a\n \n def subtract(self, a):\n self.result -= a\n \n def multiplicated_by(self, a):\n self.result *= a\n \n def divided_by(self, a):\n self.result /= a\n \n def ac(self):\n self.result = 0\n" }, { "alpha_fraction": 0.6875, "alphanum_fraction": 0.6875, "avg_line_length": 3, "blob_id": "856715ef69eccb6c201366d5c071c010e9ed6049", "content_id": "b1922a59ffcb4d2761bff4bca19d20f88684cef0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 16, "license_type": "no_license", "max_line_length": 7, "num_lines": 4, "path": "/README.md", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "HELLO\n\n\nGitHub!\n" }, { "alpha_fraction": 0.45348837971687317, "alphanum_fraction": 0.4581395387649536, "avg_line_length": 19.4761905670166, "blob_id": "b4fd1f416bd8eb49086130ada34ff46f6e5e752c", "content_id": "e945d3c4482e6d8c659b49154370d39bf2ec32b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 544, "license_type": "no_license", "max_line_length": 35, "num_lines": 21, "path": "/python_block-main/klass.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "class Klass :\n # 이 친구는 스페셜 메소드\n # 생성자 메소드\n def __new__(cls, name, age) :\n print(\"객체가 생성되었습니다\")\n return object.__new__(cls)\n\n # 초기화 메소드\n def __init__(self, name, age) :\n print(\"객체를 초기화 합니다\")\n self.name = name\n self.age = age\n\n # 소멸자 메소드\n def __del__(self) :\n print(\"객체를 삭제합니다\")\n del self\n \nif __name__ == \"__main__\" :\n k = Klass(\"준석\", 25)\n print(k)\n" }, { "alpha_fraction": 0.4157303273677826, "alphanum_fraction": 0.449438214302063, "avg_line_length": 24.285715103149414, "blob_id": "e708ccee2a176b0f35ba139b083ff024dd4f8b91", "content_id": "15d76b17bc98f3e8426b149d3db219de5c23752b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 222, "license_type": "no_license", "max_line_length": 38, "num_lines": 7, "path": "/python_block-main/파이썬 강의/input.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "\nif __name__ == \"__main__\" : \n \n i1 = int(input(\" 첫번째 숫자를 입력하세요 \"))\n i2 = int(input(\" 두번째 숫자를 입력하세요 \"))\n print(\"-\" * 50)\n for i in range(i1,i2) :\n print(i)\n" }, { "alpha_fraction": 0.6393442749977112, "alphanum_fraction": 0.6393442749977112, "avg_line_length": 19.33333396911621, "blob_id": "938b798a641fdcb7ccc467ed4b6af0e7e4a21b33", "content_id": "d9e6893f0cb03ac5754ce006de196e034b228b80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 61, "license_type": "no_license", "max_line_length": 32, "num_lines": 3, "path": "/python_block-main/파이썬 강의/args.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "import sys\nfor i,e in enumerate(sys.argv) :\n print(i, e )\n" }, { "alpha_fraction": 0.6534653306007385, "alphanum_fraction": 0.6534653306007385, "avg_line_length": 19.200000762939453, "blob_id": "4236d2d20708809a92cfe1fc2ec35f425f7de793", "content_id": "67cf53ce2b1aacf28cc142445a47d54ad6093a7c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 101, "license_type": "no_license", "max_line_length": 27, "num_lines": 5, "path": "/python_block-main/파이썬 강의/sleep.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "def sleep(obj, value) :\n return obj.sleep(value)\n\ndef eat(obj, value) :\n return obj.eat(value)\n" }, { "alpha_fraction": 0.5591397881507874, "alphanum_fraction": 0.5913978219032288, "avg_line_length": 9.333333015441895, "blob_id": "85239dfc31272bd8f90e0c335c7a43311f37e817", "content_id": "983e7c3c7984708009e4108524248599cf60de19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 175, "license_type": "no_license", "max_line_length": 23, "num_lines": 9, "path": "/python_block-main/README.md", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "# python_block\n\n파이썬 기초, 데이터 전처리, 데이터 분석\n\n1. 파이썬 기초 : 완성\n\n2. 데이터 전처리 : 초안\n\n3. 데이터 분석 : 자료 정리중\n" }, { "alpha_fraction": 0.6025640964508057, "alphanum_fraction": 0.6025640964508057, "avg_line_length": 18.25, "blob_id": "48605764f59fbfb60558a854e65c27db78992688", "content_id": "a299276b791b650e1331f4a45750571cdf142dfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 90, "license_type": "no_license", "max_line_length": 36, "num_lines": 4, "path": "/python_block-main/파이썬 강의/fast.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "\nfrom random import choice\n\ndef pick() :\n return choice(['서울', '성남','대구'])\n" }, { "alpha_fraction": 0.6458333134651184, "alphanum_fraction": 0.6458333134651184, "avg_line_length": 23, "blob_id": "6379d8655eec6c8c8c7a9f7cd1e53fdc52e84215", "content_id": "15d24172ea2dad356a2baf9d9b1ce78fe9451640", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 48, "license_type": "no_license", "max_line_length": 25, "num_lines": 2, "path": "/python_block-main/파이썬 강의/eat.py", "repo_name": "MONO-ON/python_study", "src_encoding": "UTF-8", "text": "def eat(obj, value) :\n return obj.eat(value)\n" } ]
9
OldPanda/deep-learning-elu
https://github.com/OldPanda/deep-learning-elu
b5a80fa3af3c2cf6d9377cf727878343965c9c62
190875baf4f3523d8a8ee37a5756cda9cb054a43
a2c97cfe7789926c458f082d54d132ba659803d2
refs/heads/master
2020-03-27T16:42:24.744395
2018-02-18T11:09:33
2018-02-18T11:09:33
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7820343375205994, "alphanum_fraction": 0.7899603843688965, "avg_line_length": 41.02777862548828, "blob_id": "6137b8d26376f8033badd24b3d16821c39a282e5", "content_id": "67d80ca8896d93f30d894e143828d5ee62af96eb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1520, "license_type": "permissive", "max_line_length": 195, "num_lines": 36, "path": "/README.md", "repo_name": "OldPanda/deep-learning-elu", "src_encoding": "UTF-8", "text": "# Deep Learning with Exponential Linear Units (ELUs)\n\nThe paper ‘Fast and Accurate Deep Network Learning by Exponential Linear Units (ELUs) [1] by Djork-Arn ́e\nClevert, Thomas Unterthiner & Sepp Hochreiter introduces an activation function ELU which provides a significant speed up in\nlearning rate over traditionally used activation functions (ReLU, eReLU, lReLU) by alleviating the bias shift effect and pushing\nthe mean of the activation function to zero. In the experiments section the paper proposes,‘ELUs lead not only to faster learning,\nbut also to significantly better generalization performance than sReLU and lReLUs on networks with more than 5 layers’. In this\nproject we examine the mathematical expressions and properties of different activation functions and we try to reestablish the\nresults achieved in this paper by training a 6 Layer feed forward neural network classifier on MNSIT dataset using different\nactivation functions.\n\n## Pre-requisite\n\nPlease see requirements.txt file for require packages\n\n\n## How to Run\n\nRun the driver file run.py as\n\n`python run.py`\n\nwhich trigger running whole project\n\nWe have also built a feed forward neural network from scratch to get understanding the concepts.\nIt's implemented in file fee_fwd_NN_from_scratch.py\n\n## Authors\n* Prashant Gonarkar ([email protected]) \n* Sagar Patni ([email protected])\n\n\n## License\nThis project is licensed under the MIT License - see the LICENSE.md file for details\n\n## Acknowledgments \n* [Fast and accurate deep network learning by exponential linear units] (https://arxiv.org/abs/1511.07289)\n\n" }, { "alpha_fraction": 0.5500324964523315, "alphanum_fraction": 0.5864197611808777, "avg_line_length": 27.757009506225586, "blob_id": "496af41a82dfec9ebd1b3701478d6fe1d3437af2", "content_id": "4ddc3989f7b8b833b1eb60e30f257efc679c69f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3078, "license_type": "permissive", "max_line_length": 83, "num_lines": 107, "path": "/sigmoid.py", "repo_name": "OldPanda/deep-learning-elu", "src_encoding": "UTF-8", "text": "import time\n\nfrom tensorflow.examples.tutorials.mnist import input_data\nmnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)\n\nimport tensorflow as tf\n\nlearning_rate = 0.01\ntraining_epochs = 300\nbatch_size = 64\ndisplay_step = 1\n\nn_h_1 = 128 \nn_h_2 = 128\nn_h_3 = 128\nn_h_4 = 128\nn_h_5 = 128\nn_h_6 = 128\nn_input = 784 \nn_classes = 10 \n\n\nX = tf.placeholder(\"float\", [None, n_input])\nY = tf.placeholder(\"float\", [None, n_classes])\n\n\nweights = {\n 'h1': tf.Variable(tf.random_normal([n_input, n_h_1])),\n 'h2': tf.Variable(tf.random_normal([n_h_1, n_h_2])),\n 'h3': tf.Variable(tf.random_normal([n_h_2, n_h_3])),\n 'h4': tf.Variable(tf.random_normal([n_h_3, n_h_4])),\n 'h5': tf.Variable(tf.random_normal([n_h_4, n_h_5])),\n 'h6': tf.Variable(tf.random_normal([n_h_5, n_h_6])),\n 'out': tf.Variable(tf.random_normal([n_h_6, n_classes]))\n}\nbiases = {\n 'b1': tf.Variable(tf.random_normal([n_h_1])),\n 'b2': tf.Variable(tf.random_normal([n_h_2])),\n 'b3': tf.Variable(tf.random_normal([n_h_3])),\n 'b4': tf.Variable(tf.random_normal([n_h_4])),\n 'b5': tf.Variable(tf.random_normal([n_h_5])),\n 'b6': tf.Variable(tf.random_normal([n_h_6])),\n 'out': tf.Variable(tf.random_normal([n_classes]))\n}\n\n\n\ndef build_nn(x):\n \n l1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])\n l1 = tf.nn.sigmoid(l1)\n \n l2 = tf.add(tf.matmul(l1, weights['h2']), biases['b2'])\n l2 = tf.nn.sigmoid(l2)\n \n l3 = tf.add(tf.matmul(l2, weights['h3']), biases['b3'])\n l3 = tf.nn.sigmoid(l3)\n \n l4 = tf.add(tf.matmul(l3, weights['h4']), biases['b4'])\n l4 = tf.nn.sigmoid(l4)\n \n l5 = tf.add(tf.matmul(l4, weights['h5']), biases['b5'])\n l5 = tf.nn.sigmoid(l5)\n \n l6 = tf.add(tf.matmul(l5, weights['h6']), biases['b6'])\n l6 = tf.nn.sigmoid(l6)\n \n out_layer = tf.matmul(l6, weights['out']) + biases['out']\n return out_layer\n\n\nlogits = build_nn(X)\n\n\nloss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(\n logits=logits, labels=Y))\noptimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)\ntrain_op = optimizer.minimize(loss_op)\n\ninit = tf.global_variables_initializer()\n\nwith tf.Session() as sess:\n sess.run(init)\n start_time = time.time()\n \n for epoch in range(training_epochs):\n a_c = 0.\n total_batch = int(mnist.train.num_examples/batch_size)\n\n for i in range(total_batch):\n batch_x, batch_y = mnist.train.next_batch(batch_size)\n\n _, c = sess.run([train_op, loss_op], feed_dict={X: batch_x,\n Y: batch_y})\n\n a_c += c / total_batch\n\n if epoch % display_step == 0:\n print(\"Epoch:\", '%04d' % (epoch+1), \"cost={:.9f}\".format(a_c))\n duration = time.time() - start_time\n print(duration)\n \n pred = tf.nn.softmax(logits) \n correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(Y, 1))\n \n accuracy = tf.reduce_mean(tf.cast(correct_prediction, \"float\"))\n print(\"Accuracy:\", accuracy.eval({X: mnist.test.images, Y: mnist.test.labels}))\n\n" }, { "alpha_fraction": 0.6338028311729431, "alphanum_fraction": 0.7183098793029785, "avg_line_length": 13.199999809265137, "blob_id": "8ddc9f8b97d63288fca9daf2072314af707b3c17", "content_id": "18f47ec801b3e89612d2498f95484c7f04efe5d5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 71, "license_type": "permissive", "max_line_length": 18, "num_lines": 5, "path": "/requirements.txt", "repo_name": "OldPanda/deep-learning-elu", "src_encoding": "UTF-8", "text": "Required Packages:\n1. Python 3.x \n2. TensorFlow 1.x\n3. Pandas\n4. numpy\n" }, { "alpha_fraction": 0.783098578453064, "alphanum_fraction": 0.783098578453064, "avg_line_length": 26.30769157409668, "blob_id": "1f613324de78167e8c358a3c63ef4afd438e5bb0", "content_id": "fb686d0222aadd9d89732b779e520b1dc832daae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 355, "license_type": "permissive", "max_line_length": 49, "num_lines": 13, "path": "/run.py", "repo_name": "OldPanda/deep-learning-elu", "src_encoding": "UTF-8", "text": "import os\n\nprint \"Starting ELU Neural Netowrk on MINIST\"\nos.system(\"python elu.py\")\nprint \"ELU Neural Network finished\"\n\nprint \"Starting ReLU Neural Netowrk on MINIST\"\nos.system(\"python relu.py\")\nprint \"ReLU Neural Network finished\"\n\nprint \"Starting Sigmoid Neural Netowrk on MINIST\"\nos.system(\"python sigmoid.py\")\nprint \"sigmoid Neural Network finished\"\n" } ]
4
xeno14/clean_zsh_history
https://github.com/xeno14/clean_zsh_history
c63d22c1d304eb24c60d6ba8795da29ca8f78957
35fda93ef7c5f8f548d6d9e6443a0c24d3e8a847
15686f08cefb39d680547d96c5dfcbd4bc47db7a
refs/heads/master
2021-05-28T13:13:40.486699
2014-06-28T16:13:41
2014-06-28T16:13:41
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5134431719779968, "alphanum_fraction": 0.5247181057929993, "avg_line_length": 21.153846740722656, "blob_id": "b0bbd747cb935e39e8354b5f423c58f5b14df09d", "content_id": "9e21810f6d12bbf0a02334822601e3587c1ef4f1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1153, "license_type": "permissive", "max_line_length": 64, "num_lines": 52, "path": "/clean_zsh_history.py", "repo_name": "xeno14/clean_zsh_history", "src_encoding": "UTF-8", "text": "\n\"\"\"\nusage:\n python clean_zsh_history.py [path to zsh_history]\n\n default: $HOME/.zsh_history\n\"\"\"\n__author__ = \"xeno1991\"\n\nimport sys\nimport os\n\n\nif __name__ == '__main__':\n if len(sys.argv) == 1:\n home = os.environ['HOME']\n filename = home + \"/.zsh_history\"\n elif len(sys.argv) == 2:\n filename = sys.argv[1]\n else:\n print(\"usage: python erase_history.py [.zsh_history]\")\n sys.exit(1)\n\n try:\n f = open(filename,\"r\")\n except:\n print('problem with opening %s' % filename)\n sys.exit(1)\n\n command_list = list()\n command_set = set()\n\n wcl = 0 #count number of lines in .zsh_history\n for line in f:\n wcl += 1\n line = line.strip()\n if line not in command_set:\n if len(line) > 0:\n command_list.append(line)\n command_set.add(line)\n f.close()\n\n try:\n f = open(filename,\"w\")\n except:\n print('problem with opening %s' % filename)\n sys.exit(1)\n\n for line in command_list:\n f.write(line + \"\\n\")\n f.close()\n\n print(\"cleand history: %d -> %d\" % (wcl, len(command_list)))\n" }, { "alpha_fraction": 0.6614583134651184, "alphanum_fraction": 0.6614583134651184, "avg_line_length": 16.454545974731445, "blob_id": "33a02a459b17d7a5c7206e8cd961ffe760869964", "content_id": "986aea3eb7a81c727875aa887bf0a6389da17edd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 192, "license_type": "permissive", "max_line_length": 54, "num_lines": 11, "path": "/README.md", "repo_name": "xeno14/clean_zsh_history", "src_encoding": "UTF-8", "text": "clean_zsh_history\n=================\n\nErase tautological history in `.zsh_history`.\n\n\n# Usage\n\n python ./clean_zsh_history.py [path/to/.zsh_history]\n\ndefault arguments is `$HOME/.zsh_history`\n" } ]
2
darcamo/conan-mathgl
https://github.com/darcamo/conan-mathgl
2ee4a2d7627d5d36a8b214e9af6b7ce42952a846
d3fc10d3b5e5d7581e897445b00e13114215eaa8
b9ba405be918e9fef0725193da36f5e9b62842da
refs/heads/master
2021-08-22T21:39:29.927882
2020-03-23T19:46:14
2020-03-23T19:46:14
141,925,050
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.7745097875595093, "alphanum_fraction": 0.7745097875595093, "avg_line_length": 26.81818199157715, "blob_id": "807fd05ecb740ca91fcb4b4d391dd0dfb6699585", "content_id": "11fa21d4d4b689a05786aae039f455713ea4a8b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 306, "license_type": "no_license", "max_line_length": 102, "num_lines": 11, "path": "/README.md", "repo_name": "darcamo/conan-mathgl", "src_encoding": "UTF-8", "text": "# conan-mathgl\nConan recipe for the mathgl library.\n\nYou can generate a package in your local cache with the command (from the folder with the conanfile.py\n```\nconan create . youruser/stable\n```\n\n\nNote: Initial conan recipe code from https://github.com/joakimono/conan-libharu\nTry using that recipe first.\n" }, { "alpha_fraction": 0.47494032979011536, "alphanum_fraction": 0.49164676666259766, "avg_line_length": 15.760000228881836, "blob_id": "16460a890bc448de84236dd7e189485a866ba140", "content_id": "409058a575694ea9afe00ba4cd5094c19afef88f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 419, "license_type": "no_license", "max_line_length": 58, "num_lines": 25, "path": "/test_package/example.cpp", "repo_name": "darcamo/conan-mathgl", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <iostream>\n#include \"mgl2/mgl.h\"\n\nint main() {\n\n mglGraph mg;\n mg.Alpha(true);\n return EXIT_SUCCESS;\n}\n\n\n// #include <mgl2/qt.h>\n// int sample(mglGraph *gr)\n// {\n// gr->Rotate(60,40);\n// gr->Box();\n// return 0;\n// }\n// //-----------------------------------------------------\n// int main(int argc,char **argv)\n// {\n// mglQT gr(sample,\"MathGL examples\");\n// return gr.Run();\n// }\n" }, { "alpha_fraction": 0.7900763154029846, "alphanum_fraction": 0.8015267252922058, "avg_line_length": 28.11111068725586, "blob_id": "87a938b3458f3c61eb861d88b5296bf0ef40ab1b", "content_id": "365821ce9b1ffc332b381db6df9d70a8bc20ef74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 262, "license_type": "no_license", "max_line_length": 49, "num_lines": 9, "path": "/test_package/CMakeLists.txt", "repo_name": "darcamo/conan-mathgl", "src_encoding": "UTF-8", "text": "cmake_minimum_required(VERSION 3.1.2)\nproject(MathglTest CXX)\n\ninclude(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)\nconan_basic_setup(TARGETS)\n\nadd_executable(example example.cpp)\ntarget_link_libraries(example CONAN_PKG::mathgl)\ntarget_link_libraries(example \"dl\")\n" }, { "alpha_fraction": 0.5650285482406616, "alphanum_fraction": 0.5727227330207825, "avg_line_length": 39.290000915527344, "blob_id": "5ff525641387a05bb1c822eaade5f9021170f8a8", "content_id": "eae8e76056368e229e804bc667499ead74a1bd94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8058, "license_type": "no_license", "max_line_length": 108, "num_lines": 200, "path": "/conanfile.py", "repo_name": "darcamo/conan-mathgl", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom conans import ConanFile, CMake, tools\nfrom conans.tools import os_info, SystemPackageTool\nimport shutil\nimport os\n# import svn.remote\n\n\nclass MathglConan(ConanFile):\n name = \"mathgl\"\n version = \"2.4.3\"\n license = \"LGPL-3.0-only | GPL-3.0-only\"\n url = \"https://github.com/joakimono/conan-mathgl\"\n author = \"Joakim Haugen ([email protected])\"\n homepage = \"http://mathgl.sourceforge.net\"\n description = \"MathGL is a library for making high-quality scientific graphics under Linux and Windows.\"\n settings = \"os\", \"compiler\", \"build_type\", \"arch\"\n generators = \"cmake\"\n options = {\n \"lgpl\": [True, False],\n \"double_precision\": [True, False],\n \"rvalue_support\": [True, False],\n \"pthread\": [True, False],\n \"pthr_widget\": [True, False],\n \"openmp\": [True, False],\n \"opengl\": [True, False],\n \"glut\": [True, False],\n \"fltk\": [True, False],\n \"wxWidgets\": [True, False],\n \"qt5\": [True, False],\n \"zlib\": [True, False],\n \"png\": [True, False],\n \"jpeg\": [True, False],\n \"gif\": [True, False],\n \"pdf\": [True, False],\n \"gsl\": [True, False],\n \"hdf5\": [True, False],\n \"mpi\": [True, False],\n \"ltdl\": [True, False],\n \"all_swig\": [True, False]\n }\n default_options = (\"lgpl=False\", \"double_precision=True\",\n \"rvalue_support=False\", \"pthread=False\",\n \"pthr_widget=False\", \"openmp=True\", \"opengl=True\",\n \"glut=False\", \"fltk=False\", \"wxWidgets=False\",\n \"qt5=False\", \"zlib=True\", \"png=True\", \"jpeg=True\",\n \"gif=False\", \"pdf=True\", \"gsl=False\", \"hdf5=False\",\n \"mpi=False\", \"ltdl=False\", \"all_swig=False\")\n cmake_options = {}\n\n def add_cmake_opt(self, val, doAdd):\n if doAdd:\n self.cmake_options[\"enable-{}\".format(val)] = 'ON'\n else:\n self.cmake_options[\"enable-{}\".format(val)] = 'OFF'\n\n def system_requirements(self):\n installer = SystemPackageTool()\n if (self.options.opengl or self.options.glut) and os_info.is_linux:\n if tools.os_info.linux_distro == \"arch\":\n installer.install(\"freeglut\")\n else:\n installer.install(\"freeglut3-dev\") # Name in Ubuntu\n\n if self.options.openmp and os_info.is_linux:\n if tools.os_info.linux_distro == \"arch\":\n installer.install(\"openmp\")\n else:\n installer.install(\"libomp-dev\") # Name in Ubuntu\n\n if self.options.qt5:\n if tools.os_info.linux_distro == \"arch\":\n installer.install(\"qt5-base\")\n\n if self.options.wxWidgets:\n if tools.os_info.linux_distro == \"arch\":\n installer.install(\"wxgtk2\")\n\n if self.options.fltk:\n if tools.os_info.linux_distro == \"arch\":\n installer.install(\"fltk\")\n\n def requirements(self):\n\n self.add_cmake_opt(\"double\", self.options.double_precision)\n self.add_cmake_opt(\"mpi\", self.options.mpi)\n self.add_cmake_opt(\"opengl\", self.options.opengl)\n self.add_cmake_opt(\"rvalue\", self.options.rvalue_support)\n self.add_cmake_opt(\n \"pthread\", self.options.pthread\n ) # Either enable pthread of openmp but not both at the same time\n self.add_cmake_opt(\"openmp\", self.options.openmp)\n self.add_cmake_opt(\"ltdl\", self.options.ltdl)\n\n self.add_cmake_opt(\"lgpl\", self.options.lgpl)\n self.add_cmake_opt(\"pthr-widget\",\n self.options.pthr_widget) # pthread widget\n self.add_cmake_opt(\"glut\", self.options.glut)\n self.add_cmake_opt(\"fltk\", self.options.fltk)\n self.add_cmake_opt(\"wx\", self.options.wxWidgets)\n self.add_cmake_opt(\"qt5\", self.options.qt5)\n self.add_cmake_opt(\"zlib\", self.options.zlib)\n self.add_cmake_opt(\"png\", self.options.png)\n self.add_cmake_opt(\"jpeg\", self.options.jpeg)\n self.add_cmake_opt(\"gif\", self.options.gif)\n self.add_cmake_opt(\"pdf\", self.options.pdf)\n self.add_cmake_opt(\"mpi\", self.options.mpi)\n self.add_cmake_opt(\"ltdl\", self.options.ltdl)\n if not self.options.lgpl:\n self.add_cmake_opt(\"gsl\", self.options.gsl)\n self.add_cmake_opt(\"hdf5\", self.options.hdf5)\n self.add_cmake_opt(\"all-swig\", self.options.all_swig)\n\n # expected to be found w/o conan: opengl, glut, fltk, wxwidgets, mpi, ltdl, gsl, qt\n if self.options.zlib:\n self.requires(\"zlib/[>=1.2.11]@conan/stable\")\n # self.options[\"zlib\"].shared = False\n if self.options.png:\n self.requires(\"libpng/[>=1.6.34]@bincrafters/stable\")\n # self.options[\"libpng\"].shared = False\n if self.options.jpeg:\n self.requires(\"libjpeg-turbo/[>=1.5.2]@bincrafters/stable\")\n # self.options[\"libjpeg-turbo\"].shared = False\n # set jpeg version 62\n if self.options.gif:\n self.requires(\"giflib/[>=5.1.3]@bincrafters/stable\")\n # self.options[\"giflib\"].shared = False\n if self.options.pdf:\n self.requires(\"libharu/2.3.0@darcamo/stable\")\n # self.options[\"libharu\"].shared = False\n if self.options.hdf5:\n if not self.options.lgpl:\n self.requires(\"hdf5/[>=1.10.5]\")\n # self.options[\"HDF5\"].shared = False\n\n def source(self):\n tools.get(\n \"http://downloads.sourceforge.net/mathgl/mathgl-2.4.2.1.tar.gz\")\n shutil.move(\"mathgl-2.4.2.1/\", \"sources\")\n\n tools.replace_in_file(\n \"sources/CMakeLists.txt\", \"project( MathGL2 )\",\n '''project( MathGL2 )\ninclude(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake)\nconan_basic_setup()''')\n\n tools.replace_in_file(\n \"sources/CMakeLists.txt\",\n \"set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${MathGL2_SOURCE_DIR}/scripts)\",\n '''set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${MathGL2_SOURCE_DIR}/scripts)'''\n )\n\n tools.replace_in_file(\n \"sources/CMakeLists.txt\", '''\tfind_library(HPDF_LIB hpdf)\n\tif(NOT HPDF_LIB)\n\t\tmessage(SEND_ERROR \"Couldn't find libHaru or libhpdf.\")\n\tendif(NOT HPDF_LIB)\n\tfind_path(HPDF_INCLUDE_DIR hpdf_u3d.h)\n\tif(NOT HPDF_INCLUDE_DIR)\n\t\tmessage(SEND_ERROR \"Couldn't find headers of 3d-enabled version of libhpdf.\")\n\tendif(NOT HPDF_INCLUDE_DIR)''', ''' find_package(Libharu REQUIRED)\n include_directories(${LIBHARU_INCLUDE_DIR})\n set(MGL_DEP_LIBS ${LIBHARU_LIBRARIES} ${MGL_DEP_LIBS})''')\n\n def build(self):\n cmake = CMake(self)\n os.mkdir(\"build\")\n shutil.move(\"conanbuildinfo.cmake\", \"build/\")\n cmake.definitions.update(self.cmake_options)\n if self.settings.os == \"Windows\":\n cmake.definitions[\"enable-dep-dll\"] = \"ON\"\n cmake.configure(source_folder=\"sources\", build_folder=\"build\")\n cmake.build()\n cmake.install()\n\n def package_info(self):\n\n self.cpp_info.libs = [\"mgl\"]\n if self.options.fltk:\n self.cpp_info.libs.append('mgl-fltk')\n if self.options.glut:\n self.cpp_info.libs.append('mgl-glut')\n if self.options.qt5:\n self.cpp_info.libs.append('mgl-qt5')\n self.cpp_info.libs.append('mgl-qt')\n if self.options.fltk:\n self.cpp_info.libs.append('mgl-wnd')\n if self.options.wxWidgets:\n self.cpp_info.libs.append('mgl-wx')\n\n self.cpp_info.libs.append(\"dl\")\n\n # if not self.options.shared and self.settings.compiler == \"Visual Studio\":\n # for lib in range(len(self.cpp_info.libs)):\n # self.cpp_info.libs[lib] += \"-static\"\n # if self.settings.build_type == \"Debug\" and self.settings.compiler == \"Visual Studio\":\n # for lib in range(len(self.cpp_info.libs)):\n # self.cpp_info.libs[lib] += \"d\"\n" } ]
4
BradleyMoore/POD
https://github.com/BradleyMoore/POD
629f14f9a75dbe66beb74de4e505e10dfec04658
915fffb4b14325ff05e252a0593f56af4d4f5a19
dc44dc3a1c1c4d41d6657838c679143b46e531a9
refs/heads/master
2020-05-20T09:37:47.923543
2014-01-23T19:04:31
2014-01-23T19:04:31
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5353711843490601, "alphanum_fraction": 0.5414847135543823, "avg_line_length": 20.22222137451172, "blob_id": "059595470f93d9a2bbb21a2dc2301cf61475cf4c", "content_id": "74b2a510e25eb2ea4a8b6d0e69138f9e3a6b04c7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1145, "license_type": "permissive", "max_line_length": 76, "num_lines": 54, "path": "/definitions.py", "repo_name": "BradleyMoore/POD", "src_encoding": "UTF-8", "text": "defs = []\nterms = []\ndefinitions = []\ncount = 0\ndefinition_file = open('definitions.txt', 'r')\nmain_loop = True\n\nfor line in definition_file:\n defs.append(line.strip())\n\nfor i in range(len(defs)):\n if i % 2 == 0:\n terms.append(defs[i])\n else:\n definitions.append(defs[i])\n\ndef term_list():\n print \"\\n\\n\"\n for i in range(len(terms)):\n print \"%s\\t\" % terms[i],\n if i % 9 == 0:\n print \"\\n\"\n print \"\\n\"\n return \"\"\n\ndef query_search(query):\n count = 0\n for term in terms:\n if query == term:\n return definitions[count] + \"\\n\"\n elif query == \"end\":\n return kill_program()\n elif query == \"help\":\n return term_list()\n count += 1\n\ndef kill_program():\n global main_loop\n main_loop = False\n return \"\"\n \n \nterm_list()\n\nwhile main_loop == True:\n print \"What do you want to know? (type 'end' to exit or 'help' to list\",\n print \"options)\"\n query = raw_input(\"> \").lower()\n if query == \"true\" or query == \"false\":\n query = query.capitalize()\n \n print query_search(query)\n\ndefinition_file.close()" }, { "alpha_fraction": 0.7058823704719543, "alphanum_fraction": 0.7058823704719543, "avg_line_length": 7.5, "blob_id": "3723115c8831b6601591a91fb8d63ded36680d79", "content_id": "a6f7ef621000a22f5e67609ab173ed1232ad9ab4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 34, "license_type": "permissive", "max_line_length": 24, "num_lines": 4, "path": "/README.md", "repo_name": "BradleyMoore/POD", "src_encoding": "UTF-8", "text": "POD\n===\n\nDefines python operands.\n" } ]
2
marksibrahim/tweet
https://github.com/marksibrahim/tweet
e1bc35f67505e3894c7308e463bc18ebcc2379ad
613ec08ef782ef9b3c3b66b6dfc390f963ab72f0
c869af51715e91f2003572cf73ba6ffd512b0e73
refs/heads/master
2021-01-22T04:49:55.975435
2015-01-20T18:40:17
2015-01-20T18:40:17
29,545,152
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5934640765190125, "alphanum_fraction": 0.6026144027709961, "avg_line_length": 29.440000534057617, "blob_id": "8d501350eef46576f0122e2431917c367a3d0a40", "content_id": "89ae51e12d33cce8c56b341e251bcf003283b8de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 765, "license_type": "no_license", "max_line_length": 75, "num_lines": 25, "path": "/tweet.py", "repo_name": "marksibrahim/tweet", "src_encoding": "UTF-8", "text": "import random\n#need three elements \n # opener: top 10, must have, \n # thing: patio, summer gadgets\n # purpose: for relaxing, for your home\n\nopener = ['top 10 ', \n 'Must have ', \n '10 must have ',\n 'editors select 8 ',\n 'Our favorite ']\n\nthing = ['dazzling patio furnishings ',\n 'elegant entertaining accessories ',\n 'stunning casual dinning pieces ',\n 'chic beach towels ',\n 'striking designer chaise-longues ']\n\npurpose = ['to impress!',\n 'for absolute summer indulgence',\n 'for summer adventures',\n 'for perfect indoor-outdoor living ',\n 'for chic alfresco entertaining ']\n\nprint random.choice(opener) + random.choice(thing) + random.choice(purpose)\n\n\n\n\n" }, { "alpha_fraction": 0.6888889074325562, "alphanum_fraction": 0.6888889074325562, "avg_line_length": 11.571428298950195, "blob_id": "5beceac5efaee0a59d9f37d3f50514e090a341c3", "content_id": "d1abc92b8cc29fbe617e62f561c9382bd5aaab90", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 90, "license_type": "no_license", "max_line_length": 42, "num_lines": 7, "path": "/README.md", "repo_name": "marksibrahim/tweet", "src_encoding": "UTF-8", "text": "\n# Tweet\n\ngenerates a buzzy title \n\n## Usage\n\nrun via \"python tweet.py\" at command line. \n" } ]
2
mofed8461/FDHomeAutomationWithSpeechRecognition
https://github.com/mofed8461/FDHomeAutomationWithSpeechRecognition
93339911939978c7ec3dd4dfcae5197ee9a5ce75
60df23a8d77ab531bff64344da3c07d3fdf9d497
a16f31627487aa938df01c7b9a8ae005a6efcd00
refs/heads/master
2020-05-05T02:45:06.792562
2019-05-04T11:06:04
2019-05-04T11:06:04
179,649,850
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6440678238868713, "alphanum_fraction": 0.6669491529464722, "avg_line_length": 20.851852416992188, "blob_id": "bf1776f385a5b964a87b6a60efe843bf5946a819", "content_id": "3615049664aba92bdda8556c2028c0246e4aee80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2360, "license_type": "no_license", "max_line_length": 89, "num_lines": 108, "path": "/homeAutomation.py", "repo_name": "mofed8461/FDHomeAutomationWithSpeechRecognition", "src_encoding": "UTF-8", "text": "import speech_recognition as sr\nimport os\nimport RPi.GPIO as GPIO\nimport time\n\nGPIO.setmode(GPIO.BOARD)\n\n## 2.5 = 0\n## 7.5 = 90\n## 12.5 = 180\ndoorOpened = 7.5\ndoorClosed = 2.5\n\nservoPin = 12\nledPin = 33\nbedroomPin = 35\nkitchenPin = 38\nGPIO.setup(servoPin, GPIO.OUT)\npwmServo = GPIO.PWM(servoPin, 50)\npwmServo.start(doorClosed)\n\nGPIO.setup(ledPin, GPIO.OUT)\nGPIO.setup(bedroomPin, GPIO.OUT)\nGPIO.setup(kitchenPin, GPIO.OUT)\n\nGPIO.output(ledPin, True)\nGPIO.output(kitchenPin, False)\nGPIO.output(bedroomPin, False)\n\nr = sr.Recognizer()\n##print(sr.Microphone.list_microphone_names())\n##mic = sr.Microphone(device_index=2)\n\n## r.adjust_for_ambient_noise(source)\nlastPassword = \"\"\ntry:\n\twhile True:\n\t\tos.system('/usr/bin/arecord --duration=5 -r 16000 -f S16_LE /home/pi/Desktop/temp.wav')\n\n\t\t##with mic as source:\n\t\t##\taudio = r.listen(source)\n\n\t\ttemp = sr.AudioFile('/home/pi/Desktop/temp.wav')\n\t\twith temp as source:\n\t\t\taudio = r.record(source)\n\t\t\n\t\ttext = \"\"\n\t\ttry:\n\t\t\ttext = r.recognize_google(audio)\n\t\texcept sr.RequestError:\n\t \ttext = \"error\"\n\t\texcept sr.UnknownValueError:\n\t \ttext = \"error2\"\n\n\t\ttext = text.lower()\n\n\t\tprint(text)\n\t\tunderstood = False\n\n\t\tif ('door' in text and 'open' in text and lastPassword == \"parachute\"):\n\t\t\tpwmServo.ChangeDutyCycle(doorOpened)\n\t\t\tunderstood = True\n\n\t\tif ('door' in text and 'close' in text):\n\t\t\tpwmServo.ChangeDutyCycle(doorClosed)\n\t\t\tunderstood = True\n\n\t\tif ('kitchen' in text):\n\t\t\tif ('on' in text):\n\t\t\t\tGPIO.output(kitchenPin, True)\n\t\t\t\tunderstood = True\n\t\t\t\tprint('kit onnnnn')\n\t\t\telif ('off' in text):\n\t\t\t\tGPIO.output(kitchenPin, False)\n\t\t\t\tunderstood = True\n\t\t\t\tprint('kit offfff')\n\n\t\tif ('bedroom' in text):\n\t\t\tif ('on' in text):\n\t\t\t\tGPIO.output(bedroomPin, True)\n\t\t\t\tunderstood = True\n\t\t\telif ('off' in text):\n\t\t\t\tGPIO.output(bedroomPin, False)\n\t\t\t\tunderstood = True\n\n\t\tif (understood == False and text != 'error2'):\n\t\t\tlastPassword = text\n\t\t\tGPIO.output(ledPin, False)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, True)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, False)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, True)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, False)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, True)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, False)\n\t\t\ttime.sleep(0.2)\n\t\t\tGPIO.output(ledPin, True)\n\t\t\ttime.sleep(0.2)\n\t\t\n\nexcept KeyboardInterrupt:\n pwmServo.stop()\n GPIO.cleanup()\n" } ]
1
Sin-En-Chou/yuanta_python3
https://github.com/Sin-En-Chou/yuanta_python3
e86019b79b49e451271fda651e01bc6a30831172
27d2c47c8cc008569304008a6a802ac331304f80
1de4c78894a6605da0d4d4869301114e09f9b40e
refs/heads/master
2020-06-04T16:28:37.293735
2019-06-14T07:59:32
2019-06-14T07:59:32
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5813953280448914, "alphanum_fraction": 0.6007751822471619, "avg_line_length": 18.769229888916016, "blob_id": "d6653d413641b92707b661179f5de0f7ac722754", "content_id": "1c1891f99b6e53f7c7920d6d7464ad3c79934e99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 616, "license_type": "no_license", "max_line_length": 64, "num_lines": 26, "path": "/Test.py", "repo_name": "Sin-En-Chou/yuanta_python3", "src_encoding": "UTF-8", "text": "import _thread\nimport time\n\n# _thread.start_new_thread ( function, args[, kwargs] )\n# 參數說明:\n# function - 執行緒函数。\n# args - 傳遞給執行緒函数的參數,他必須是個 tuple 類型。\n# kwargs - 可選參數。\n\n# 為執行緒定義一個函式\ndef run( threadName, delay):\n count = 0\n while 1:\n #time.sleep(delay)\n count += 1\n print (\"%s: %s\" % ( threadName, time.ctime(time.time()) ))\n\n# 建立2個執行緒\ntry:\n _thread.start_new_thread( run, (\"Thread-1\", 0.5, ) )\n _thread.start_new_thread( run, (\"Thread-2\", 1, ) )\nexcept:\n print (\"Error: \")\n\nwhile 1:\n pass\n\n\n" }, { "alpha_fraction": 0.7421875, "alphanum_fraction": 0.7421875, "avg_line_length": 17.14285659790039, "blob_id": "8c61cbcbec33e008bd908b95fb3937956c6dc278", "content_id": "59ffe149f8c6890ace2e6a4ff6d76e9130614b49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 128, "license_type": "no_license", "max_line_length": 71, "num_lines": 7, "path": "/Test2.py", "repo_name": "Sin-En-Chou/yuanta_python3", "src_encoding": "UTF-8", "text": "\nimport requests\n\nurl = 'https://tcgbusfs.blob.core.windows.net/blobyoubike/YouBikeTP.gz'\n\nr = requests.get(url)\n\nprint(r.text)\n" } ]
2
akb9115/python-falcon-api-bot
https://github.com/akb9115/python-falcon-api-bot
bb3ec0c4abda544d1201c489c46b37c370ce3e28
76b1d298db50b65448c00ac17ff68c0506bc2d79
d4cdcc825f9c475352b377ea2f924c930c506f5a
refs/heads/master
2023-01-24T10:35:37.786049
2020-10-21T09:48:05
2020-10-21T09:48:05
235,327,946
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6992481350898743, "alphanum_fraction": 0.6992481350898743, "avg_line_length": 15.5, "blob_id": "2f481331ffd4e163189e62dc94b613f99788dda6", "content_id": "e3ccfd2fe980da9d03e4d41b6aa114ca99dc54e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 133, "license_type": "no_license", "max_line_length": 33, "num_lines": 8, "path": "/data/routes.py", "repo_name": "akb9115/python-falcon-api-bot", "src_encoding": "UTF-8", "text": "import falcon\nfrom .calls import calls\n\napp=calls()\n\napi=falcon.API()\napi.add_route('/order',app)\napi.add_route('/cancel/{id}',app)\n\n" }, { "alpha_fraction": 0.7023809552192688, "alphanum_fraction": 0.7579365372657776, "avg_line_length": 24.200000762939453, "blob_id": "76ebf9c57eb94d83e6aefd697b90df693fef5381", "content_id": "97f6de22a34d61e7e634f2fe516e31bff1c6818d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 252, "license_type": "no_license", "max_line_length": 76, "num_lines": 10, "path": "/README.md", "repo_name": "akb9115/python-falcon-api-bot", "src_encoding": "UTF-8", "text": "# Python-Falcon-Api-Bot\n\nTo start the Mock API server we will use waitress. use the command:\n\nwaitress-serve --port=8000 data.routes:api\n\nDefault server starts at the port provided.The URL is http://127.0.0.1:8000.\n\nEndpoints Mocked\n/cancel/{order_id}\n" }, { "alpha_fraction": 0.5861423015594482, "alphanum_fraction": 0.6086142063140869, "avg_line_length": 26.842105865478516, "blob_id": "f86fdfe643cf49e8a8118b37a904a452a05f2631", "content_id": "af28670af96cdbb5050bd5a2b3669dbeb347d093", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 534, "license_type": "no_license", "max_line_length": 58, "num_lines": 19, "path": "/data/calls.py", "repo_name": "akb9115/python-falcon-api-bot", "src_encoding": "UTF-8", "text": "import falcon\nimport json\nfrom resources.cancel_order2 import cancel_order2\n\nclass calls(object):\n\n def on_get(self,req,resp):\n with open('data/orderstatus.json','r') as f:\n resp.body=f.read()\n resp.status=falcon.HTTP_200\n\n def on_put(self,req,resp,id):\n # data = req.stream.read()\n response=cancel_order2.cancelOrderAsPerID(self,id)\n if response:\n resp.body=response\n resp.status=falcon.HTTP_200\n else:\n resp.status=falcon.HTTP_404\n\n\n\n\n\n" }, { "alpha_fraction": 0.5604956150054932, "alphanum_fraction": 0.5612244606018066, "avg_line_length": 43.98360824584961, "blob_id": "a81d5c93c18101f2c02e1f9f805685ae6f1cc30e", "content_id": "08c59677468c8a2c583cd540d1441337fbb50ef0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2744, "license_type": "no_license", "max_line_length": 92, "num_lines": 61, "path": "/resources/cancel_order.py", "repo_name": "akb9115/python-falcon-api-bot", "src_encoding": "UTF-8", "text": "\nimport json\n\nclass cancel_order(object):\n\n def cancelOrderAsPerID(self,order_id):\n\n self.response = {}\n self.order_data={}\n self.order_status_flag = False\n self.order_tracking_flag=False\n self.cancellation_status=False\n order_status_results = []\n order_tracking_results=[]\n\n #updating order status file\n with open('data/orderstatus.json', 'r') as order_status_reader:\n order_data = order_status_reader.read()\n parsed_json = json.loads(order_data)\n for data in parsed_json:\n if (order_id == data[\"order_id\"]):\n self.order_data=data\n self.order_status_flag = True\n if (data[\"order_status\"] != \"cancelled\"):\n data[\"order_status\"] = \"cancelled\"\n self.cancellation_status=True\n else:\n self.cancellation_status=False\n order_status_results.append(data)\n order_status_reader.close()\n with open('data/orderstatus.json', 'w') as order_status_writer:\n json.dump(order_status_results, order_status_writer, indent=4, sort_keys=False)\n order_status_writer.close()\n\n #updating order tracking file\n with open('data/ordertracking.json', 'r') as order_tracking_reader:\n order_data = order_tracking_reader.read()\n parsed_json = json.loads(order_data)\n for data in parsed_json:\n if (order_id == data[\"order_id\"]):\n self.order_tracking_flag = True\n if(data[\"is_cancelled\"]!=\"true\"):\n data[\"is_cancelled\"]=\"true\"\n order_tracking_results.append(data)\n order_tracking_reader.close()\n with open('data/ordertracking.json', 'w') as order_tracking_writer:\n json.dump(order_tracking_results,order_tracking_writer,indent=4,sort_keys=False)\n order_tracking_writer.close()\n\n #Returning response as per requirement\n if self.order_status_flag == True and self.order_tracking_flag == True:\n if self.cancellation_status==True:\n self.response[\"order_id\"]=self.order_data[\"order_id\"]\n self.response[\"name\"]=self.order_data[\"name\"]\n self.response[\"status\"]=\"Order cancelled\"\n else:\n self.response[\"order_id\"] = self.order_data[\"order_id\"]\n self.response[\"name\"] = self.order_data[\"name\"]\n self.response[\"status\"] = \"Order has already been cancelled\"\n else:\n self.response[\"status\"] = \"order id not found.\"\n return json.dumps(self.response)" }, { "alpha_fraction": 0.577675461769104, "alphanum_fraction": 0.5811277627944946, "avg_line_length": 36.739131927490234, "blob_id": "60e546fa8f3e4dc6cc4a0161538d3d6f2a665ab5", "content_id": "1e71c0e75d123252e5fad6619a3d18f38b788689", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 869, "license_type": "no_license", "max_line_length": 115, "num_lines": 23, "path": "/resources/cancel_order2.py", "repo_name": "akb9115/python-falcon-api-bot", "src_encoding": "UTF-8", "text": "\nimport json\n\nclass cancel_order2(object):\n\n def cancelOrderAsPerID(self,order_id):\n\n self.response = {}\n self.order_data={}\n self.order_status_flag = False\n self.order_tracking_flag=False\n self.cancellation_status=False\n\n with open('data/orderstatus.json', 'r') as order_status_reader:\n complete_data = order_status_reader.read()\n parsed_json = json.loads(complete_data)\n output_dict = [x for x in parsed_json if x['order_id'] == order_id and x['order_status'] !=\"cancelled\"]\n if len(output_dict)==1:\n filtered_data=output_dict[0]\n data_index=parsed_json.index(filtered_data)\n parsed_json[data_index]['order_status']=\"cancelled\"\n return json.dumps((parsed_json))\n else:\n return \"ID Not Found\"\n" } ]
5
adamsc64/pdf2txt-python
https://github.com/adamsc64/pdf2txt-python
28c992c790146fde6551198762793505f11322a5
c25e3ae044a50daa02a706ebb8867e4863b8930f
12df91945f87af42adf20c099b6b2a4a7bb781ea
refs/heads/master
2021-01-22T09:18:02.200981
2017-04-13T12:45:43
2017-04-13T12:45:43
81,950,476
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5456129908561707, "alphanum_fraction": 0.5490993857383728, "avg_line_length": 27.66666603088379, "blob_id": "0fbdbdc273d83291a158613244cfc82f5ff0bb60", "content_id": "b62e79d2724ad8f827d635e23c0959bd1aebfb74", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1721, "license_type": "permissive", "max_line_length": 57, "num_lines": 60, "path": "/pdf2txt/pdf2txt", "repo_name": "adamsc64/pdf2txt-python", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport argparse\nimport io\nimport sys\n\nfrom wand.image import Image\nfrom PIL import Image as PI\nimport pyocr\nimport pyocr.builders\n\n\nENGLISH = 'eng'\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(\n description='Convert a PDF into text using OCR.')\n parser.add_argument('-i', '--infile',\n nargs='?',\n type=argparse.FileType('r'),\n required=True,\n default=sys.stdin)\n parser.add_argument('-o', '--outfile',\n nargs='?',\n type=argparse.FileType('w'),\n required=True,\n default=sys.stdout)\n return parser.parse_args()\n\n\ndef main():\n args = parse_args()\n tool = pyocr.get_available_tools()[0]\n if ENGLISH not in tool.get_available_languages():\n raise ValueError(\"English not installed\")\n final_text = []\n print(\"Loading file %s...\" % args.infile)\n image_pdf = Image(file=args.infile, resolution=300)\n image_jpeg = image_pdf.convert('jpeg')\n print(\"There are %s images in total.\" % (\n len(image_jpeg.sequence)))\n for i, img in enumerate(image_jpeg.sequence):\n img_page = Image(image=img)\n print(\"Getting image %s...\" % (i + 1))\n img = img_page.make_blob('jpeg')\n print(\"Converting to string...\")\n line = tool.image_to_string(\n PI.open(io.BytesIO(img)),\n lang=ENGLISH,\n builder=pyocr.builders.TextBuilder(),\n )\n line = line.encode(\"utf-8\")\n print(\"Writing to file...\")\n args.outfile.write(line)\n args.outfile.close()\n\n\n\nif __name__ == \"__main__\":\n main()\n\n" }, { "alpha_fraction": 0.6052631735801697, "alphanum_fraction": 0.6052631735801697, "avg_line_length": 13.25, "blob_id": "9155302929efb92112393e2ec205a0e0ef1cf78a", "content_id": "1eadeed3aaaf0a8a9a5315369af329294077df24", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 114, "license_type": "permissive", "max_line_length": 24, "num_lines": 8, "path": "/requirements.txt", "repo_name": "adamsc64/pdf2txt-python", "src_encoding": "UTF-8", "text": "# System-level packages:\n########################\n# imagemagick\n# tesseract-eng\n# tesseract-ocr\nwand\npyocr\npillow\n" } ]
2
yanzhenguo/NNGN_model
https://github.com/yanzhenguo/NNGN_model
40e2525d6ae642be4ed47027c23a7448fd29cd5f
23e459c75f6e70d8d8e9d3c636e3a054a1c926fe
801fd534695a78de45c444fefb40a01326ddf08b
refs/heads/master
2021-01-22T04:54:10.295169
2017-09-05T11:11:37
2017-09-05T11:11:37
102,272,117
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6091670989990234, "alphanum_fraction": 0.6209955811500549, "avg_line_length": 32.66666793823242, "blob_id": "cbd07084f981d57a1f2c45f4d13d2b8071aeb51b", "content_id": "f90341e716b63e2e0e31cf120963085b801d7cf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2029, "license_type": "no_license", "max_line_length": 121, "num_lines": 60, "path": "/20NewsGroup/data_process.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "# coding=utf-8\nimport os\nimport os.path\nimport numpy as np\nimport codecs\nimport pickle\nimport nltk\nfrom nltk.tokenize import WordPunctTokenizer\nfrom sklearn.datasets import fetch_20newsgroups\n\n# extract trainint and test data form sklearn\ndef extract_data():\n if os.path.exists('./data/Ytrain.npy') and os.path.exists('./data/Ytest.npy') and os.path.exists('./data/texts.pkl'):\n f = codecs.open('./data/texts.pkl', 'rb')\n texts = pickle.load(f)\n f.close()\n Ytrain = np.load('./data/Ytrain.npy')\n Ytest = np.load('./data/Ytest.npy')\n return texts,Ytrain,Ytest\n newsgroups_train = fetch_20newsgroups(subset='train')\n # print(len(newsgroups_train.data))\n trainlabel = np.zeros((len(newsgroups_train.data)), dtype=np.float32)\n cates = newsgroups_train.target_names\n train = []\n count = 0\n index = 0\n for cat in cates:\n\n newsgroups = fetch_20newsgroups(subset='train', categories=[cat])\n datas = newsgroups.data\n for doc in datas:\n trainlabel[index] = count\n doc = ' '.join(WordPunctTokenizer().tokenize(doc.lower()))\n train.append(doc)\n index += 1\n count += 1\n np.save('./data/Ytrain.npy', trainlabel)\n\n newsgroups_train = fetch_20newsgroups(subset='test')\n # print(len(newsgroups_train.data))\n testlabel = np.zeros((len(newsgroups_train.data)), dtype=np.float32)\n cates = newsgroups_train.target_names\n test = []\n count = 0\n index = 0\n for cat in cates:\n newsgroups = fetch_20newsgroups(subset='test', categories=[cat])\n datas = newsgroups.data\n for doc in datas:\n testlabel[index] = count\n doc = ' '.join(WordPunctTokenizer().tokenize(doc.lower()))\n test.append(doc)\n index += 1\n count += 1\n np.save('./data/Ytest.npy', testlabel)\n\n out = codecs.open('./data/texts.pkl', 'wb')\n pickle.dump(train + test, out, 1)\n out.close()\n return train+test, trainlabel, testlabel\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5381779670715332, "alphanum_fraction": 0.5803053975105286, "avg_line_length": 31.75, "blob_id": "bf4205aca8bcf60638307c92ce3df9d5957d84ad", "content_id": "ff602c50912ceb2161fe192d009a9433558b94f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3798, "license_type": "no_license", "max_line_length": 73, "num_lines": 116, "path": "/Elec/data_process.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "# coding=utf-8\nimport codecs\nimport os\nimport pickle\nimport numpy as np\nimport numpy as np\nfrom nltk.tokenize import WordPunctTokenizer\nfrom keras import backend as K\nfrom keras.engine.topology import Layer\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.preprocessing.sequence import pad_sequences\ndef extract_data():\n if os.path.exists('./data/text.pkl'):\n f = codecs.open('./data/text.pkl', 'rb')\n texts = pickle.load(f)\n f.close()\n return texts\n texts = []\n num_train = 25000\n num_test = 25000\n\n f = codecs.open('./data/elec-25k-train.txt.tok', 'r', 'utf-8')\n for line in f:\n texts.append(line.lower())\n f.close()\n\n f = codecs.open('./data/elec-test.txt.tok', 'r', 'utf-8')\n for line in f:\n texts.append(line.lower())\n f.close()\n\n Ytrain = np.zeros((num_train,), dtype=np.int8)\n f = codecs.open('./data/elec-25k-train.cat', 'r')\n index = 0\n for line in f:\n Ytrain[index] = int(line[:-1])\n index += 1\n f.close()\n\n Ytest = np.zeros((num_test,), dtype=np.int8)\n f = codecs.open('./data/elec-test.cat', 'r')\n index = 0\n for line in f:\n Ytest[index] = int(line[:-1])\n index += 1\n f.close()\n\n newText = []\n for i in range(num_train):\n if Ytrain[i] == 1:\n newText.append(texts[i])\n for i in range(num_train):\n if Ytrain[i] == 2:\n newText.append(texts[i])\n for i in range(num_train, num_train + num_test):\n if Ytest[i - num_train] == 1:\n newText.append(texts[i])\n for i in range(num_train, num_train + num_test):\n if Ytest[i - num_train] == 2:\n newText.append(texts[i])\n f = codecs.open('./data/text.pkl', 'wb')\n pickle.dump(newText, f, 1)\n f.close()\n return newText\n\nclass ElecCorpus():\n def __init__(self, num_words, max_len, filters=''):\n self.num_words = num_words\n self.max_len = max_len\n self.texts = extract_data()\n tokenizer = Tokenizer(num_words=num_words)\n if filters is not None:\n tokenizer.filters = filters\n tokenizer.fit_on_texts(self.texts[:25000])\n self.tokenizer = tokenizer\n\n def get_sequence(self):\n return self.tokenizer.texts_to_sequences(self.texts)\n\n def get_matrix(self):\n return self.tokenizer.texts_to_matrix(self.texts)\n\n def get_input_bow(self):\n text = self.texts\n xtrain = self.tokenizer.texts_to_matrix(text[:25000])\n xtest = self.tokenizer.texts_to_matrix(text[25000:])\n ytrain = np.zeros((25000,), dtype=np.int8)\n ytest = np.zeros((25000,), dtype=np.int8)\n ytrain[12500:25000] = np.ones((12500,), dtype=np.int8)\n ytest[12500:25000] = np.ones((12500,), dtype=np.int8)\n return [xtrain, ytrain, xtest, ytest]\n\n def get_sequence_pad(self):\n word_index = self.tokenizer.word_index\n sequences = []\n for i in range(50000):\n t = []\n tokens = self.texts[i].lower().split(' ')\n for j in range(len(tokens)):\n index = word_index.get(tokens[j], 0)\n if index < self.num_words:\n t.append(index)\n else:\n t.append(0)\n sequences.append(t)\n return sequences\n\n def get_input(self):\n sequence = self.get_sequence()\n xtrain = pad_sequences(sequence[0:25000], maxlen=self.max_len)\n xtest = pad_sequences(sequence[25000:50000], maxlen=self.max_len)\n ytrain = np.zeros((25000,), dtype=np.float32)\n ytest = np.zeros((25000,), dtype=np.float32)\n ytrain[12500:25000] = np.ones((12500,), dtype=np.float32)\n ytest[12500:25000] = np.ones((12500,), dtype=np.float32)\n return xtrain, ytrain, xtest, ytest" }, { "alpha_fraction": 0.7355931997299194, "alphanum_fraction": 0.7525423765182495, "avg_line_length": 15.44444465637207, "blob_id": "7a77426ea5ac30a6eb058529c8c377ef0b810c3c", "content_id": "ecbd5e1777bfa84f305910793c357b7456f5d3ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 295, "license_type": "no_license", "max_line_length": 73, "num_lines": 18, "path": "/README.md", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "Implementation of paper \"A Neural N-gram Network for Text Classification\"\n\nprerequirement:\n* python3.5\n* keras\n* nltk\n* sklearn\n* numpy\n\nTo run the experiments in this paper,run:\n```\n#for IMDB\ncd IMDB\npython bow-NNGN.py\npython seq-NNGN.py\npython seq-NNGN-2.py\n```\nand so for Elec and 20NewsGroup" }, { "alpha_fraction": 0.5523498058319092, "alphanum_fraction": 0.5922070145606995, "avg_line_length": 32.630001068115234, "blob_id": "e811073f4a18df5632e58403c6d45dbdde6d0a93", "content_id": "505f5e25e748f769b3485793ce48b6b208fce23f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3416, "license_type": "no_license", "max_line_length": 73, "num_lines": 100, "path": "/IMDB/data_process.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "# coding=utf-8\nimport os\nimport os.path\nimport codecs\nimport nltk\nimport pickle\nimport logging\nimport numpy as np\nfrom nltk.tokenize import WordPunctTokenizer\nfrom keras import backend as K\nfrom keras.engine.topology import Layer\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.preprocessing.sequence import pad_sequences\n\n# def adddocument(path):\n# '''读取文件中的内容,分词后返回内容'''\n# f = codecs.open(path, 'r', 'utf-8')\n# paragraph = f.read().lower()\n# f.close()\n# words = WordPunctTokenizer().tokenize(paragraph)\n# return ' '.join(words)\n\ndef extract_data():\n '''提取训练文档和测试文档'''\n if os.path.exists('./data/text.pkl'):\n f = codecs.open('./data/text.pkl', 'rb')\n texts = pickle.load(f)\n f.close()\n return texts\n else:\n raise \"not find the ./data/text.pkl file.\"\n return None\n # rootdir = './data/'\n # texts = []\n # subdirs = ['train/neg', 'train/pos', 'test/neg', 'test/pos']\n # for subdir in subdirs:\n # for parent, dirnames, filenames in os.walk(rootdir + subdir):\n # index = 0\n # for filename in filenames:\n # content = adddocument(parent + '/' + filename)\n # texts.append(content)\n # out = codecs.open('./data/text.pkl', 'wb')\n # pickle.dump(texts, out, 1)\n # out.close()\n # return texts\n\n\nclass ImdbCorpus():\n def __init__(self, num_words, max_len, filters=''):\n self.num_words = num_words\n self.max_len = max_len\n self.texts = extract_data()\n tokenizer = Tokenizer(num_words=num_words)\n if filters is not None:\n tokenizer.filters = filters\n tokenizer.fit_on_texts(self.texts[:25000])\n self.tokenizer = tokenizer\n\n def get_sequence(self):\n return self.tokenizer.texts_to_sequences(self.texts)\n\n def get_matrix(self):\n return self.tokenizer.texts_to_matrix(self.texts)\n\n def get_input_bow(self):\n text = self.texts\n xtrain = self.tokenizer.texts_to_matrix(text[:25000])\n xtest = self.tokenizer.texts_to_matrix(text[25000:])\n ytrain = np.zeros((25000,), dtype=np.int8)\n ytest = np.zeros((25000,), dtype=np.int8)\n ytrain[12500:25000] = np.ones((12500,), dtype=np.int8)\n ytest[12500:25000] = np.ones((12500,), dtype=np.int8)\n return [xtrain, ytrain, xtest, ytest]\n\n def get_sequence_pad(self):\n word_index = self.tokenizer.word_index\n sequences = []\n for i in range(50000):\n t = []\n tokens = self.texts[i].lower().split(' ')\n for j in range(len(tokens)):\n index = word_index.get(tokens[j], 0)\n if index < self.num_words:\n t.append(index)\n else:\n t.append(0)\n sequences.append(t)\n return sequences\n\n def get_input(self):\n sequence = self.get_sequence()\n xtrain = pad_sequences(sequence[0:25000], maxlen=self.max_len)\n xtest = pad_sequences(sequence[25000:50000], maxlen=self.max_len)\n ytrain = np.zeros((25000,), dtype=np.float32)\n ytest = np.zeros((25000,), dtype=np.float32)\n ytrain[12500:25000] = np.ones((12500,), dtype=np.float32)\n ytest[12500:25000] = np.ones((12500,), dtype=np.float32)\n return xtrain, ytrain, xtest, ytest\n\nextract_data()" }, { "alpha_fraction": 0.6299559473991394, "alphanum_fraction": 0.6773127913475037, "avg_line_length": 39.35555648803711, "blob_id": "8d2dff5e29571115c2672d49e9be82f68c0255b4", "content_id": "31c86a76b799f5adf1953052ef786a22595d0910", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1816, "license_type": "no_license", "max_line_length": 115, "num_lines": 45, "path": "/IMDB/seq-NNGN.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "# coding=utf-8\nimport numpy as np\nimport keras\nfrom keras.layers import Dense, GlobalMaxPooling1D, Input, Embedding, \\\n AveragePooling1D, GlobalAveragePooling1D, Activation, Conv1D, Dropout, MaxPooling1D, LSTM, Flatten, Concatenate\nfrom keras.models import Model, load_model, Sequential\nimport data_process\n\"implementation of seq-NNGN with region size to be 3\"\n\nNUM_WORDS = 30000\nMAX_LEN = 300\n\nimdb = data_process.ImdbCorpus(num_words=NUM_WORDS, max_len=MAX_LEN)\ndata1, Ytrain, data2, Ytest = imdb.get_input()\nXtrain = np.zeros((25000, (MAX_LEN - 2) * 3), dtype=np.int)\nXtest = np.zeros((25000, (MAX_LEN - 2) * 3), dtype=np.int)\nfor i in range(25000):\n for j in range(MAX_LEN - 2):\n Xtrain[i, j * 3] = data1[i, j]\n Xtrain[i, j * 3 + 1] = data1[i][j + 1] + NUM_WORDS\n Xtrain[i, j * 3 + 2] = data1[i][j + 2] + NUM_WORDS * 2\nfor i in range(25000):\n for j in range(MAX_LEN - 2):\n Xtest[i, j * 3] = data2[i, j]\n Xtest[i, j * 3 + 1] = data2[i][j + 1] + NUM_WORDS\n Xtest[i, j * 3 + 2] = data2[i][j + 2] + NUM_WORDS * 2\n# shuffle the data\nindice = np.arange(len(Xtrain))\nnp.random.shuffle(indice)\nXtrain = Xtrain[indice]\nXtest = Xtest[indice]\nYtrain = Ytrain[indice]\nYtest = Ytest[indice]\n# build model\nmain_input = Input(shape=((MAX_LEN - 2) * 3,))\nembedding1 = Embedding(NUM_WORDS * 3, 500, embeddings_initializer=keras.initializers.Orthogonal())(main_input)\n# embedding1 = Embedding(num_words * 3, 500)(main_input)\nx = AveragePooling1D(pool_size=3)(embedding1)\nx = GlobalMaxPooling1D()(x)\noutput = Dense(1, activation='sigmoid')(x)\n\nmodel = Model(inputs=main_input, outputs=output)\nmodel.compile(loss='binary_crossentropy', optimizer='nadam', metrics=['accuracy'])\n# train on the data\nmodel.fit(Xtrain, Ytrain, batch_size=32, epochs=20, validation_data=(Xtest, Ytest))\n" }, { "alpha_fraction": 0.6646754145622253, "alphanum_fraction": 0.6965352296829224, "avg_line_length": 32.93243408203125, "blob_id": "65122eb3bd7f21a1f0ede028fc3766b2a46a411e", "content_id": "f8b69339afbb2c19618a6bd1833d4237a68239c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2511, "license_type": "no_license", "max_line_length": 108, "num_lines": 74, "path": "/20NewsGroup/seq-NNGN.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "import codecs\nimport pickle\nimport numpy as np\nimport keras\nfrom keras.preprocessing.sequence import pad_sequences\nfrom keras.utils.np_utils import to_categorical\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.models import Sequential,Model\nfrom keras.layers import Dense, Dropout, Activation, Conv1D, GlobalMaxPooling1D, Input, Embedding, \\\n GlobalAveragePooling1D,MaxPooling2D,AveragePooling1D\nimport data_process\n\nnum_words = 30000\nmax_len = 1000\nnum_train = 11314\nnum_test = 7532\n\n# prepare the data\ntexts, Ytrain, Ytest = data_process.extract_data()\nYtrain=to_categorical(Ytrain)\nYtest=to_categorical(Ytest)\n\ntokenizer = Tokenizer(num_words=num_words)\ntokenizer.fit_on_texts(texts[:num_train])\nsequences = tokenizer.texts_to_sequences(texts)\nword_index = tokenizer.word_index\n# sequences=[]\n# for i in range(num_train+num_test):\n# t=[]\n# tokens=texts[i].lower().split(' ')\n# for j in range(len(tokens)):\n# index=word_index.get(tokens[j],0)\n# if index<num_words:\n# t.append(index)\n# else:\n# t.append(0)\n# sequences.append(t)\n\ndata1 = pad_sequences(sequences[:num_train], maxlen=max_len)\ndata2 = pad_sequences(sequences[num_train:], maxlen=max_len)\n\nXtrain = np.zeros((num_train, (max_len-2)*3), dtype=np.int)\nXtest = np.zeros((num_test, (max_len-2)*3), dtype=np.int)\nfor i in range(num_train):\n for j in range(max_len-2):\n Xtrain[i, j*3] = data1[i, j]\n Xtrain[i, j*3+1] = data1[i][j+1]+num_words\n Xtrain[i, j*3+2] = data1[i][j+2]+num_words*2\nfor i in range(num_test):\n for j in range(max_len-2):\n Xtest[i, j*3] = data2[i, j]\n Xtest[i, j*3+1] = data2[i][j+1]+num_words\n Xtest[i, j*3+2] = data2[i][j+2]+num_words*2\n\n# shuffle the data\nindice1 = np.arange(num_train)\nnp.random.shuffle(indice1)\nXtrain = Xtrain[indice1]\nYtrain = Ytrain[indice1]\nindice2 = np.arange(num_test)\nnp.random.shuffle(indice2)\nXtest = Xtest[indice2]\nYtest = Ytest[indice2]\n\n# build the model\nmain_input = Input(shape=((max_len - 2)*3, ))\nembedding1 = Embedding(num_words*3, 800, embeddings_initializer=keras.initializers.Orthogonal())(main_input)\nx = AveragePooling1D(pool_size=3)(embedding1)\nx = GlobalMaxPooling1D()(x)\noutput = Dense(20, activation='softmax')(x)\nmodel = Model(inputs=main_input, outputs=output)\nmodel.compile(loss='categorical_crossentropy', optimizer='nadam', metrics=['accuracy'])\n# train on the data\nmodel.fit([Xtrain], Ytrain, batch_size=32, epochs=50, validation_data=([Xtest], Ytest))\n" }, { "alpha_fraction": 0.6235294342041016, "alphanum_fraction": 0.6650000214576721, "avg_line_length": 35.559139251708984, "blob_id": "45966f28f1ef9fab38d7c16f78a5ea61eee1115b", "content_id": "e05666f3e9cf3a90846d6453d8d9281ae2723351", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3400, "license_type": "no_license", "max_line_length": 110, "num_lines": 93, "path": "/20NewsGroup/seq-NNGN-2.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "import codecs\nimport pickle\nimport numpy as np\nimport keras\nfrom keras.preprocessing.sequence import pad_sequences\nfrom keras.utils.np_utils import to_categorical\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.models import Sequential, Model\nfrom keras.layers import Dense, Dropout, Activation, Conv1D, GlobalMaxPooling1D, Input, Embedding, \\\n GlobalAveragePooling1D, MaxPooling2D, AveragePooling1D, Concatenate\nimport data_process\n\nnum_words = 20000\nmax_len = 800\nnum_train = 11314\nnum_test = 7532\n# prepare the data\ntexts, Ytrain, Ytest = data_process.extract_data()\nYtrain = to_categorical(Ytrain)\nYtest = to_categorical(Ytest)\ntokenizer = Tokenizer(num_words=num_words)\n# tokenizer.filters = ''\ntokenizer.fit_on_texts(texts[:num_train])\nsequences = tokenizer.texts_to_sequences(texts)\nword_index = tokenizer.word_index\n# sequences=[]\n# for i in range(num_train+num_test):\n# t=[]\n# tokens=texts[i].lower().split(' ')\n# for j in range(len(tokens)):\n# index=word_index.get(tokens[j],0)\n# if index<num_words:\n# t.append(index)\n# else:\n# t.append(0)\n# sequences.append(t)\n\ndata1 = pad_sequences(sequences[:num_train], maxlen=max_len)\ndata2 = pad_sequences(sequences[num_train:], maxlen=max_len)\n\nXtrain1 = np.zeros((num_train, (max_len - 2) * 3), dtype=np.int)\nXtest1 = np.zeros((num_test, (max_len - 2) * 3), dtype=np.int)\nfor i in range(num_train):\n for j in range(max_len - 2):\n Xtrain1[i, j * 3] = data1[i, j]\n Xtrain1[i, j * 3 + 1] = data1[i][j + 1] + num_words\n Xtrain1[i, j * 3 + 2] = data1[i][j + 2] + num_words * 2\nfor i in range(num_test):\n for j in range(max_len - 2):\n Xtest1[i, j * 3] = data2[i, j]\n Xtest1[i, j * 3 + 1] = data2[i][j + 1] + num_words\n Xtest1[i, j * 3 + 2] = data2[i][j + 2] + num_words * 2\n\nXtrain2 = np.zeros((num_train, (max_len - 1) * 2), dtype=np.int)\nXtest2 = np.zeros((num_test, (max_len - 1) * 2), dtype=np.int)\nfor i in range(num_train):\n for j in range(max_len - 1):\n Xtrain2[i, j * 2] = data1[i, j]\n Xtrain2[i, j * 2 + 1] = data1[i][j + 1] + num_words\nfor i in range(num_test):\n for j in range(max_len - 1):\n Xtest2[i, j * 2] = data2[i, j]\n Xtest2[i, j * 2 + 1] = data2[i][j + 1] + num_words\n\nindice1 = np.arange(num_train)\nnp.random.shuffle(indice1)\nXtrain1 = Xtrain1[indice1]\nXtrain2 = Xtrain2[indice1]\nYtrain = Ytrain[indice1]\n\nindice2 = np.arange(num_test)\nnp.random.shuffle(indice2)\nXtest1 = Xtest1[indice2]\nXtest2 = Xtest2[indice2]\nYtest = Ytest[indice2]\n\nprint('begin to build model ...')\nmain_input = Input(shape=((max_len - 2) * 3,))\nembedding1 = Embedding(num_words * 3, 800, embeddings_initializer=keras.initializers.Orthogonal())(main_input)\nx = AveragePooling1D(pool_size=3, strides=3)(embedding1)\nx = GlobalMaxPooling1D()(x)\n\ninput2 = Input(shape=((max_len - 1) * 2,))\nembedding2 = Embedding(num_words * 2, 500, embeddings_initializer=keras.initializers.Orthogonal())(input2)\ny = AveragePooling1D(pool_size=2, strides=2)(embedding2)\ny = GlobalMaxPooling1D()(y)\n\nz = Concatenate()([x, y])\noutput = Dense(20, activation='softmax')(z)\n\nmodel = Model(inputs=[main_input, input2], outputs=output)\nmodel.compile(loss='categorical_crossentropy', optimizer='nadam', metrics=['accuracy'])\nmodel.fit([Xtrain1, Xtrain2], Ytrain, batch_size=32, epochs=50, validation_data=([Xtest1, Xtest2], Ytest))\n" }, { "alpha_fraction": 0.7371090650558472, "alphanum_fraction": 0.7599323987960815, "avg_line_length": 36, "blob_id": "bbf619eb261890987ffbed068a6e8575703aa6d5", "content_id": "a99209e2cddcbf23ed37f63b79c4c2325da61c4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1183, "license_type": "no_license", "max_line_length": 115, "num_lines": 32, "path": "/Elec/bow-NNGN.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "# coding=utf-8\nimport numpy as np\nimport keras\nfrom keras.layers import Dense, GlobalMaxPooling1D, Input, Embedding, \\\n AveragePooling1D, GlobalAveragePooling1D, Activation, Conv1D, Dropout, MaxPooling1D, LSTM, Flatten, Concatenate\nfrom keras.models import Model, load_model, Sequential\nimport data_process\n\"implementation of bow-NNGN\"\n\nNUM_WORDS = 30000\nMAX_LEN = 300\n\nimdb = data_process.ElecCorpus(num_words=NUM_WORDS,max_len=MAX_LEN)\nXtrain, Ytrain, Xtest, Ytest = imdb.get_input()\n# shuffle the data\nindice = np.arange(len(Xtrain))\nnp.random.shuffle(indice)\nXtrain = Xtrain[indice]\nXtest = Xtest[indice]\nYtrain = Ytrain[indice]\nYtest = Ytest[indice]\n# build model\nmain_input = Input(shape=(MAX_LEN,))\ninit_method = keras.initializers.Orthogonal()\nx = Embedding(NUM_WORDS, 1000)(main_input)\nx = AveragePooling1D(pool_size=3, strides=1)(x)\nx = GlobalMaxPooling1D()(x)\noutput = Dense(1, activation='sigmoid', trainable=True, use_bias=True)(x)\nmodel = Model(inputs=main_input, outputs=output)\nmodel.compile(loss='binary_crossentropy', optimizer='nadam', metrics=['accuracy'])\n# train on the data\nmodel.fit(Xtrain, Ytrain, batch_size=32, epochs=20, validation_data=(Xtest, Ytest))" }, { "alpha_fraction": 0.7114328742027283, "alphanum_fraction": 0.7423864006996155, "avg_line_length": 32.38333511352539, "blob_id": "59c64a745760478134c1999fa9a21590e82b195a", "content_id": "a6c703a26c143cec8a57e743d6d785c6a73b219a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2003, "license_type": "no_license", "max_line_length": 102, "num_lines": 60, "path": "/20NewsGroup/bow-NNGN.py", "repo_name": "yanzhenguo/NNGN_model", "src_encoding": "UTF-8", "text": "import codecs\nimport pickle\nimport numpy as np\nimport keras\nfrom keras.utils.np_utils import to_categorical\nfrom keras.preprocessing.sequence import pad_sequences\nfrom keras.preprocessing.text import Tokenizer\nfrom keras.models import Sequential,Model\nfrom keras.layers import Dense, Dropout, Activation, Conv1D, GlobalMaxPooling1D, Input, Embedding, \\\n GlobalAveragePooling1D,MaxPooling2D,AveragePooling1D\nimport data_process\n\nnum_words=30000\nmax_len=1000\nnum_train=11314\nnum_test=7532\n\n#prepare the data\ntexts, Ytrain, Ytest = data_process.extract_data()\nYtrain=to_categorical(Ytrain)\nYtest=to_categorical(Ytest)\ntokenizer = Tokenizer(num_words=num_words)\ntokenizer.fit_on_texts(texts[:num_train])\n\nsequence1 = tokenizer.texts_to_sequences(texts[:num_train])\nsequence2 = tokenizer.texts_to_sequences(texts[num_train:])\nword_index = tokenizer.word_index\n\n# sequences=[]\n# for i in range(50000):\n# t=[]\n# tokens=texts[i].lower().split(' ')\n# for j in range(len(tokens)):\n# index=word_index.get(tokens[j],0)\n# if index<num_words:\n# t.append(index)\n# else:\n# t.append(0)\n# sequences.append(t)\ndata1 = pad_sequences(sequence1, maxlen=max_len)\ndata2 = pad_sequences(sequence2, maxlen=max_len)\n# shuffle the data\nindice1 = np.arange(num_train)\nnp.random.shuffle(indice1)\nXtrain = data1[indice1]\nYtrain = Ytrain[indice1]\nindice2 = np.arange(num_test)\nnp.random.shuffle(indice2)\nXtest = data2[indice2]\nYtest = Ytest[indice2]\n# build the model\ninput = Input(shape=(max_len,))\nembedding1 = Embedding(num_words, 1000, embeddings_initializer=keras.initializers.Orthogonal())(input)\nx = AveragePooling1D(pool_size=3, strides=1)(embedding1)\nx = GlobalMaxPooling1D()(x)\noutput = Dense(20, activation='softmax')(x)\nmodel = Model(inputs=input, outputs=output)\nmodel.compile(loss='categorical_crossentropy', optimizer='nadam', metrics=['accuracy'])\n# train on the data\nmodel.fit(Xtrain, Ytrain, batch_size=32, epochs=50, validation_data=(Xtest, Ytest))\n" } ]
9
XBachirX/picusAlarm
https://github.com/XBachirX/picusAlarm
fad82c89831041e2ccc899eeb74d4a091f12141e
4109f57edef54b5eb6116605ae647fef40ab8dd8
0ee1b3d56880ceb4d82c23fe84d72dff45e1d6ba
refs/heads/master
2021-01-12T07:01:44.809321
2011-09-25T22:37:03
2011-09-25T22:37:03
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5126050710678101, "alphanum_fraction": 0.5910364389419556, "avg_line_length": 14.904762268066406, "blob_id": "40358d8031b3317d5453c7110948c9c0350bf364", "content_id": "226fb33ed4b8987af05ec5e32550957cac0d53a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 357, "license_type": "no_license", "max_line_length": 34, "num_lines": 21, "path": "/Libs/ExternalLib/LCDutils.c", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "#include \"LCDutils.h\"\r\n#include \"HWlib.h\"\r\n\r\nvoid ClearScreen(void){\r\n\tchar mess[] = {0x7C, 0x00, 0x00};\r\n\tUARTWrite(2, mess);\r\n}\r\n\r\nvoid Demo(void) {\r\n\tchar mess[] = {0x7C, 0x04, 0x00};\r\n\tUARTWrite(2, mess);\r\n}\r\n\r\nvoid Reverse(void) {\r\n\tchar mess[] = {0x7C, 0x12, 0x00};\r\n\tUARTWrite(2, mess);\r\n}\r\n\r\nvoid LCDWrite(char mess[]) {\r\n\tUARTWrite(2, mess);\r\n}\r\n\r\n" }, { "alpha_fraction": 0.6477272510528564, "alphanum_fraction": 0.6477272510528564, "avg_line_length": 9, "blob_id": "e5d3fae85c6a2e3cfa6289ecbd1cc934f57af134", "content_id": "6944c5dc7ad9427fe5577d8b4cd0418dd2f5c623", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 88, "license_type": "no_license", "max_line_length": 17, "num_lines": 8, "path": "/Libs/ExternalLib/Include/Config.h", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "#ifndef CONFIG_H\r\n#define CONFIG_H\r\n\r\nint saveConfig();\r\n\r\nint loadConfig();\r\n\r\n#endif\r\n" }, { "alpha_fraction": 0.4939231872558594, "alphanum_fraction": 0.5824015736579895, "avg_line_length": 18.989795684814453, "blob_id": "6e2648b0765f79dadcd250a48ddc2dcbab251298", "content_id": "f7c7b4760c9ed0c0dbbd6d6169a780528122911a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 2057, "license_type": "no_license", "max_line_length": 64, "num_lines": 98, "path": "/HTTPPrint.h", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "/**************************************************************\r\n * HTTPPrint.h\r\n * Provides callback headers and resolution for user's custom\r\n * HTTP Application.\r\n * \r\n * This file is automatically generated by the MPFS Utility\r\n * ALL MODIFICATIONS WILL BE OVERWRITTEN BY THE MPFS GENERATOR\r\n **************************************************************/\r\n\r\n#ifndef __HTTPPRINT_H\r\n#define __HTTPPRINT_H\r\n\r\n#include \"TCPIP Stack/TCPIP.h\"\r\n\r\n#if defined(STACK_USE_HTTP2_SERVER)\r\n\r\nextern HTTP_CONN curHTTP;\r\nextern HTTP_STUB httpStubs[MAX_HTTP_CONNECTIONS];\r\nextern BYTE curHTTPID;\r\n\r\nvoid HTTPPrint(DWORD callbackID);\r\nvoid HTTPPrint_sensor(WORD);\r\n\r\nvoid HTTPPrint(DWORD callbackID)\r\n{\r\n\tswitch(callbackID)\r\n\t{\r\n case 0x0000004d:\r\n\t\t\tHTTPPrint_sensor(0);\r\n\t\t\tbreak;\r\n case 0x0000004e:\r\n\t\t\tHTTPPrint_sensor(1);\r\n\t\t\tbreak;\r\n case 0x0000004f:\r\n\t\t\tHTTPPrint_sensor(2);\r\n\t\t\tbreak;\r\n case 0x00000050:\r\n\t\t\tHTTPPrint_sensor(3);\r\n\t\t\tbreak;\r\n case 0x00000051:\r\n\t\t\tHTTPPrint_sensor(4);\r\n\t\t\tbreak;\r\n case 0x00000052:\r\n\t\t\tHTTPPrint_sensor(5);\r\n\t\t\tbreak;\r\n case 0x00000053:\r\n\t\t\tHTTPPrint_sensor(6);\r\n\t\t\tbreak;\r\n case 0x00000054:\r\n\t\t\tHTTPPrint_sensor(7);\r\n\t\t\tbreak;\r\n case 0x00000055:\r\n\t\t\tHTTPPrint_sensor(8);\r\n\t\t\tbreak;\r\n case 0x00000056:\r\n\t\t\tHTTPPrint_sensor(9);\r\n\t\t\tbreak;\r\n case 0x00000057:\r\n\t\t\tHTTPPrint_sensor(10);\r\n\t\t\tbreak;\r\n case 0x00000058:\r\n\t\t\tHTTPPrint_sensor(11);\r\n\t\t\tbreak;\r\n case 0x00000059:\r\n\t\t\tHTTPPrint_sensor(12);\r\n\t\t\tbreak;\r\n case 0x0000005a:\r\n\t\t\tHTTPPrint_sensor(13);\r\n\t\t\tbreak;\r\n case 0x0000005b:\r\n\t\t\tHTTPPrint_sensor(14);\r\n\t\t\tbreak;\r\n case 0x0000005c:\r\n\t\t\tHTTPPrint_sensor(15);\r\n\t\t\tbreak;\r\n case 0x0000005d:\r\n\t\t\tHTTPPrint_sensor(16);\r\n\t\t\tbreak;\r\n case 0x0000005e:\r\n\t\t\tHTTPPrint_sensor(17);\r\n\t\t\tbreak;\r\n\t\tdefault:\r\n\t\t\t// Output notification for undefined values\r\n\t\t\tTCPPutROMArray(sktHTTP, (ROM BYTE*)\"!DEF\", 4);\r\n\t}\r\n\r\n\treturn;\r\n}\r\n\r\nvoid HTTPPrint_(void)\r\n{\r\n\tTCPPut(sktHTTP, '~');\r\n\treturn;\r\n}\r\n\r\n#endif\r\n\r\n#endif\r\n" }, { "alpha_fraction": 0.528553307056427, "alphanum_fraction": 0.595812201499939, "avg_line_length": 21.522388458251953, "blob_id": "bc9d26895e0dff15135d2668ed24d447be8b0ff3", "content_id": "b98e22cd9bebb0a5cac7de0bf005a737379c6cb6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1576, "license_type": "no_license", "max_line_length": 82, "num_lines": 67, "path": "/Libs/ExternalLib/Config.c", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "#include \"WFlib.h\"\r\n\r\n#include \"libpic30.h\"\r\n\r\n\r\n#include \"Sensor.h\"\r\n\r\n#include \"Config.h\"\r\n\r\nextern Sensor sensors[NUM_SENSORS];\r\n\r\n\r\nint loadConfig() {\r\n\tint in1 = 0;\r\n\tint *rmem = (int*) sensors;\r\n\tint rdoffset;\r\n\tint vRead,vRead1;\r\n\tlong int addr1 = 0x2A002+1024;\r\n\r\n\twhile (in1<sizeof (sensors))\r\n\t{\r\n\t\tTBLPAG = (((addr1+in1) & 0x7F0000)>>16);\r\n\t\trdoffset = ((addr1+in1) & 0x00FFFF);\r\n\t\tasm(\"tblrdh.w [%1], %0\" : \"=r\"(vRead1) : \"r\"(rdoffset));\r\n\t\tasm(\"tblrdl.w [%1], %0\" : \"=r\"(vRead) : \"r\"(rdoffset));\t\r\n\r\n\t\t*rmem = vRead;\r\n\t\tin1=in1+2;\r\n\t\trmem = rmem + 1;\r\n\t}\r\n\treturn 0;\r\n}\r\n\r\n\r\nint saveConfig() {\r\n\t_erase_flash(0x2A000+1024);\r\n\tint in1=0;\r\n\tint *wmem = (int*)sensors;\r\n\tunsigned int offset;\r\n\r\n\tNVMCON = 0x4003; // Initialize NVMCON\r\n\tTBLPAG = (0x2A000+1024)>>16; \t\t\t\t\t\t// Initialize PM Page Boundary SFR\r\n\toffset = (0x2A000+1024) & 0xFFFF; \t\t\t\t// Initialize lower word of address\r\n\t__builtin_tblwtl(offset, 0x0A0B); \t// Write to address low word\r\n\t\r\n\tasm(\"DISI #5\"); \t\r\n\t__builtin_write_NVM(); \t\r\n\r\n\twhile (in1<sizeof(sensors))\r\n\t{\r\n\r\n\t\tunsigned long progAddr = 0x2A002+1024+in1; \t\t\t// Address of word to program\r\n\t\tunsigned int progDataL = (unsigned int) *(wmem);\t\t\t// Data to program lower word\r\n\r\n\t\tNVMCON = 0x4003; // Initialize NVMCON\r\n\t\tTBLPAG = progAddr>>16; \t\t\t\t\t\t// Initialize PM Page Boundary SFR\r\n\t\toffset = progAddr & 0xFFFF; \t\t\t\t// Initialize lower word of address\r\n\t\t__builtin_tblwtl(offset, progDataL); \t// Write to address low word\r\n\t\t\r\n\t\tasm(\"DISI #5\"); \t\r\n\t\t__builtin_write_NVM(); \t\r\n\r\n\t\tin1=in1+2;\r\n\t\twmem++;\r\n\t}\r\n\treturn 0;\r\n}\r\n" }, { "alpha_fraction": 0.5147286653518677, "alphanum_fraction": 0.5209302306175232, "avg_line_length": 18.545454025268555, "blob_id": "9114f4880581008c58459a302e1470f7576af5ea", "content_id": "df9a2c401c9d8ca92ab4e492f545ce6e3000f4f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 645, "license_type": "no_license", "max_line_length": 54, "num_lines": 33, "path": "/Web pages/static/javascripts/picusAlarm.logger.js", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "var Logger = (function() {\n return {\n _$data: null,\n\n onReady: function() {\n Logger._$data = $('#screen');\n },\n\n _log: function(msg) {\n Logger._$data.text(msg);\n setTimeout(Logger.timeoutCb, 3000);\n },\n\n timeoutCb: function() {\n Logger._$data.empty();\n },\n\n info: function(msg) {\n Logger._$data.css('background-color', 'grey');\n Logger._log(msg);\n },\n\n warning: function(msg) {\n Logger._$data.css('background-color', 'orange');\n Logger._log(msg);\n },\n\n error: function(msg) {\n Logger._$data.css('background-color', 'red');\n Logger._log(msg);\n }\n }\n})();\n" }, { "alpha_fraction": 0.5181784629821777, "alphanum_fraction": 0.5271759033203125, "avg_line_length": 32.01874923706055, "blob_id": "fb5f8c93f29e67e88f306b15eaffd5388b8aa184", "content_id": "e9893a45091e0a4e6cdd508dd1fbe2a577b517e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 5446, "license_type": "no_license", "max_line_length": 87, "num_lines": 160, "path": "/HTTPApp.c", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": " \r\n/****************************************************************************\r\n SECTION \tInclude\r\n****************************************************************************/\r\n\r\n#include \"TCPIP Stack/TCPIP.h\"\r\n#if defined(STACK_USE_HTTP2_SERVER)\r\n\r\n#include \"stdlib.h\"\r\n#include \"string.h\"\r\n#include \"Sensor.h\"\r\n#include \"Config.h\"\r\n\r\n/****************************************************************************\r\n SECTION\tDefine\r\n****************************************************************************/\r\n#define __HTTPAPP_C\r\n\r\n\r\nextern struct Sensor_t sensors[];\r\n\r\n/****************************************************************************\r\n SECTION \tAuthorization Handlers\r\n****************************************************************************/\r\n \r\n\r\n/*****************************************************************************\r\n FUNCTION\tBYTE HTTPNeedsAuth(BYTE* cFile)\r\n\r\n This function is used by the stack to decide if a page is access protected.\r\n If the function returns 0x00, the page is protected, if returns 0x80, no \r\n authentication is required\r\n*****************************************************************************/\r\n#if defined(HTTP_USE_AUTHENTICATION)\r\nBYTE HTTPNeedsAuth(BYTE* cFile)\r\n{\r\n\t//\tIf you want to restrict the access to some page, include it in the folder \"protect\"\r\n\t//\there you can change the folder, or add others\r\n\tif(memcmppgm2ram(cFile, (ROM void*)\"protect\", 7) == 0)\r\n\t\treturn 0x00;\t\t// Authentication will be needed later\r\n\r\n\t// You can match additional strings here to password protect other files.\r\n\t// You could switch this and exclude files from authentication.\r\n\t// You could also always return 0x00 to require auth for all files.\r\n\t// You can return different values (0x00 to 0x79) to track \"realms\" for below.\r\n\r\n\treturn 0x80;\t\t\t// No authentication required\r\n}\r\n#endif\r\n\r\n/*****************************************************************************\r\n FUNCTION\tBYTE HTTPCheckAuth(BYTE* cUser, BYTE* cPass)\r\n\t\r\n This function checks if username and password inserted are acceptable\r\n\r\n ***************************************************************************/\r\n#if defined(HTTP_USE_AUTHENTICATION)\r\nBYTE HTTPCheckAuth(BYTE* cUser, BYTE* cPass)\r\n{\r\n\tif(strcmppgm2ram((char *)cUser,(ROM char *)\"admin\") == 0\r\n\t\t&& strcmppgm2ram((char *)cPass, (ROM char *)\"flyport\") == 0)\r\n\t\treturn 0x80;\t\t// We accept this combination\r\n\t\r\n\t// You can add additional user/pass combos here.\r\n\t// If you return specific \"realm\" values above, you can base this \r\n\t// decision on what specific file or folder is being accessed.\r\n\t// You could return different values (0x80 to 0xff) to indicate \r\n\t// various users or groups, and base future processing decisions\r\n\t// in HTTPExecuteGet/Post or HTTPPrint callbacks on this value.\r\n\t\r\n\treturn 0x00;\t\t\t// Provided user/pass is invalid\r\n}\r\n#endif\r\n\r\n/****************************************************************************\r\n SECTION\tGET/POST Form Handlers\r\n****************************************************************************/\r\n \r\n#define API_SET_SENSOR \"set_sensor.cgi\"\r\n\r\n\r\n#define getHttpParam(KEY) HTTPGetROMArg(curHTTP.data, (ROM BYTE *)KEY)\r\n\r\n\r\n/****************************************************************************\r\n FUNCTION\tHTTP_IO_RESULT HTTPExecuteGet(void)\r\n\t\r\n This function processes every GET request from the pages.\r\n*****************************************************************************/\r\nHTTP_IO_RESULT HTTPExecuteGet(void)\r\n{\t\r\n\tBYTE filename[20];\r\n\r\n\t// Load the file name\r\n\t// Make sure BYTE filename[] above is large enough for your longest name\r\n\tMPFSGetFilename(curHTTP.file, filename, 20);\r\n\t//LCDWrite(filename);\r\n\t// If it's the LED updater file\r\n\tif(!memcmppgm2ram(filename, API_SET_SENSOR, strlen(API_SET_SENSOR)+1)) {\r\n\t\tint id = atoi(getHttpParam(\"address\"));\r\n\t\tsensors[id].type = atoi(getHttpParam(\"type\"));\r\n\t\tsensors[id].enabled = atoi(getHttpParam(\"enabled\"));\r\n\t\tsensors[id].address = id; /* CUIDADO: address is assumed equal to id */\r\n\t\tsensors[id].threshold = atoi(getHttpParam(\"threshold\"));\r\n\t\tsensors[id].triggerType = atoi(getHttpParam(\"triggerType\"));\r\n\t\tstrcpy(sensors[id].name, getHttpParam(\"name\"));\r\n\r\n\t\tsaveConfig();\r\n\t}\r\n\r\n\treturn HTTP_IO_DONE;\r\n}\r\n\r\n\r\n\r\n#ifdef HTTP_USE_POST\r\n/****************************************************************************\r\n FUNCTION\tHTTP_IO_RESULT HTTPExecutePost(void)\r\n\t\r\n This function processes every POST request from the pages. \r\n*****************************************************************************/\r\nHTTP_IO_RESULT HTTPExecutePost(void)\r\n{\r\n\r\n\t\r\n\treturn HTTP_IO_DONE;\r\n}\r\n#endif\r\n\r\n\r\nstatic void sendRespStr(const char* key, const char* value) {\r\n\tBYTE resp[30];\r\n\tsprintf(resp, \"%s=%s;\", key, value);\r\n\tTCPPutString(sktHTTP, resp);\t\r\n}\r\n\r\nstatic void sendRespInt(const char* key, int value) {\r\n\tBYTE resp[30];\r\n\tsprintf(resp, \"%s=%d;\", key, value);\r\n\tTCPPutString(sktHTTP, resp);\t\r\n}\r\n\r\nvoid HTTPPrint_sensor(WORD id) {\r\n\r\n\tSensor* s = &sensors[id];\r\n\t\r\n\tsendRespInt(\"type\", s->type);\r\n\tsendRespStr(\"name\", s->name);\r\n\tsendRespInt(\"enabled\", s->enabled);\r\n\tsendRespInt(\"address\", s->address);\r\n\tsendRespInt(\"threshold\", s->threshold);\r\n\tsendRespInt(\"triggerType\", s->triggerType);\r\n\tsendRespInt(\"triggered\", s->triggered);\r\n\tsendRespInt(\"timeTriggered\", s->timeTriggered);\r\n\tsendRespInt(\"value\", s->value);\r\n\t//LCDWrite(\"Read\\n\");\r\n}\r\n\r\n\r\n\r\n#endif\r\n" }, { "alpha_fraction": 0.6947368383407593, "alphanum_fraction": 0.6947368383407593, "avg_line_length": 21.25, "blob_id": "91ffbb2894c1c635d26fdfd9d836131c3f0806de", "content_id": "56e922fc8c79d9812745fcd4b0fd912671af8b21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 95, "license_type": "no_license", "max_line_length": 27, "num_lines": 4, "path": "/Libs/ExternalLib/Include/LCDutils.h", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "void ClearScreen(void);\r\nvoid Demo(void);\r\nvoid Reverse(void);\r\nvoid LCDWrite(char mess[]);\r\n\r\n" }, { "alpha_fraction": 0.4986225962638855, "alphanum_fraction": 0.4986225962638855, "avg_line_length": 32, "blob_id": "7b8b7b8fbff5a52d22cfeaec9fc7d649ae3aa7a9", "content_id": "d0352b446d53fb42c5223d6588e38476adcd3e16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 363, "license_type": "no_license", "max_line_length": 80, "num_lines": 11, "path": "/Web pages/static/javascripts/picusAlarm.owner.js", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "var Owner = (function() {\n return {\n onReady: function() {\n $('<form>'\n + '<label>Email: <input type=\"text\" name=\"email\" id=\"email\" /></label>'\n + '<label>Phone: +<input type=\"text\" name=\"phone\" id=\"phone\" /></label>'\n + '<input type=\"submit\" id=\"save_owner\" value=\"Save\" />'\n + '</form>').appendTo('#owner');\n }\n }\n})();\n" }, { "alpha_fraction": 0.5189481973648071, "alphanum_fraction": 0.5239752531051636, "avg_line_length": 26.733333587646484, "blob_id": "1d7dff3fb80e9b336b6ed230722e2ecf33f5d746", "content_id": "5bbe878714a9a5a976d0357a5110afdf61a0a043", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2586, "license_type": "no_license", "max_line_length": 125, "num_lines": 90, "path": "/Web pages/main.py", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "import os\r\nimport json\r\n\r\nimport web\r\n\r\n\r\n\r\nurls = (\r\n '/', 'MainHandler',\r\n '/get_sensors.cgi', 'GetSensorsHandler',\r\n '/set_sensor.cgi', 'SetSensorHandler',\r\n '/init_sensors.cgi', 'InitSensorsHandler',\r\n)\r\n\r\n\r\napplication = web.application(urls, globals())\r\nworking_dir = os.path.dirname(__file__)\r\nrender = web.template.render(os.path.join(working_dir, '.'))\r\ndb = web.database(dbn='sqlite', db=os.path.join(working_dir, 'testdb'))\r\n\r\n\r\n\r\nclass MainHandler(object):\r\n def GET(self):\r\n return render.index()\r\n\r\n\r\nclass GetSensorsHandler(object):\r\n def GET(self):\r\n template = 'type=%d;name=%s;enabled=%d;address=%d;threshold=%d;triggerType=%d;triggered=%d;timeTriggered=%d;value=%d'\r\n entries = []\r\n for entry in db.select('sensors'):\r\n entries.append(template % (\r\n entry.type,\r\n entry.name,\r\n entry.enabled,\r\n entry.address,\r\n entry.threshold,\r\n entry.triggerType,\r\n entry.triggered,\r\n entry.timeTriggered,\r\n entry.value\r\n ))\r\n return '<div>' + '</div><div>'.join(entries) + '</div>'\r\n\r\n\r\nclass SetSensorHandler(object):\r\n def GET(self):\r\n data = web.input(triggered=0, timeTriggered=0, value=0)\r\n print data\r\n kw = {\r\n 'where': \"address={0}\".format(data.address),\r\n 'type': int(data.type),\r\n 'name': data.name,\r\n 'enabled': int(data.enabled),\r\n 'address': int(data.address),\r\n 'threshold': int(data.threshold),\r\n 'triggerType': int(data.triggerType),\r\n 'triggered': int(data.triggered),\r\n 'timeTriggered': int(data.timeTriggered),\r\n 'value': int(data.value)\r\n }\r\n\r\n db.update('sensors', **kw)\r\n\r\n\r\nclass InitSensorsHandler(object):\r\n def GET(self):\r\n db.query(\"\"\"\r\n CREATE TABLE sensors (\r\n type integer default 0,\r\n name text default \"\",\r\n enabled integer default 0,\r\n address integer primary key,\r\n threshold integer default 0,\r\n triggerType integer default 0,\r\n triggered integer default 0,\r\n timeTriggered integer default 0,\r\n value integer default 0\r\n )\"\"\")\r\n\r\n for i in xrange(20):\r\n db.insert('sensors', address=i)\r\n\r\n\r\n\r\n\r\nif __name__ == \"__main__\":\r\n application = web.application(urls, globals())\r\n application.run()\r\n" }, { "alpha_fraction": 0.5170677304267883, "alphanum_fraction": 0.5377727746963501, "avg_line_length": 17.010639190673828, "blob_id": "82a59b902c5e89d949a379c25a500d062a276692", "content_id": "d2c3d4f1eed5c7c3595d885e3625aeabe2f58c6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1787, "license_type": "no_license", "max_line_length": 75, "num_lines": 94, "path": "/taskFlyport.c", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "#include \"taskFlyport.h\"\r\n#include \"Sensor.h\"\r\n#include \"Config.h\"\r\n#include \"LCDutils.h\"\r\n#include <time.h>\r\n#include <string.h>\r\n\r\nSensor sensors[NUM_SENSORS];\r\n\r\nint alarmGreen = 17;\r\nint alarmRed = 6;\r\nstatic int n = 0;\r\nint triggered(int value, SensorType st, TriggerType tt, int th)\r\n{\r\n\tif (st == DIGITAL) {\r\n\t\tif (tt == ACTIVE_LOW) {\r\n\t\t\treturn value;\r\n\t\t} else {\r\n\t\t\treturn !value;\r\n\t\t}\r\n\t} else { //ANALOG\r\n\t\tif (tt == RISING){\r\n\t\t\treturn value > th ? 1 : 0;\r\n\t\t} else {\r\n\t\t\treturn value <= th ? 1 : 0;\r\n\t\t}\r\n\t}\r\n}\r\n\r\nvoid alarm()\r\n{\r\n\tif (n == 0) {\r\n\t\tIOPut(alarmGreen, 1);\r\n\t\tIOPut(alarmRed, 0);\r\n\t\tLCDWrite(\"Intrusion Detected!!\");\r\n\t} else {\r\n\t\tif ((n % 30) == 0) {\r\n\t\t\tIOPut(alarmGreen, toggle);\r\n\t\t\tIOPut(alarmRed, toggle);\r\n\t\t\tn = 1;\r\n\t\t}\r\n\t}\r\n\tn++;\r\n}\r\n\r\nvoid FlyportTask()\r\n{\r\n\tchar buf[10];\r\n\tint i;\r\n\tint intrusion = 0;\r\n\t\r\n\tIOInit(p7, inup);\r\n\t\t\r\n\tIOInit(p9, inup);\r\n\t//loadConfig();\r\n\t\r\n\tWFConnect(WF_DEFAULT);\r\n\tUARTInit(2,115200);\r\n\tUARTOn(2);\r\n\tvTaskDelay(50);\r\n\tUARTWrite(1,\"Started!!!\\r\\n\");\r\n\t//memset(sensors, 0, (sizeof (Sensor))*NUM_SENSORS);\r\n\tsaveConfig();\r\n\tClearScreen();\r\n\tLCDWrite(\"Puppaaaa!!!!\\n\");\r\n\twhile(1) {\r\n\t\t// read sensors status and update sensors structure.\r\n\t\tfor (i = 0 ; i < NUM_SENSORS ; i++) {\r\n\t\t\tSensor *s = &sensors[i];\r\n\t\t\tint value;\r\n\t\t\tint r;\r\n\t\t\t//r = ADCVal(1);\r\n\t\t\t//sprintf(buf, \"%d\\r\\n\", r);\r\n\t\t\t//UARTWrite(1,buf);\r\n\t\t\tif (s->enabled) { \r\n\t\t\t\t// XXX Analog I/O\r\n\t\t\t\tif (s->type == DIGITAL)\r\n\t\t\t\t\tvalue = IOGet(s->address);\r\n\t\t\t\telse\r\n\t\t\t\t\tvalue = ADCVal(s->address);\r\n\t\t\t\t//sprintf(buf, \"%d\\r\\n\", )\r\n\t\t\t\ts->triggered = triggered(value, s->type, s->triggerType, s->threshold);\r\n\t\t\t\ts->value = value;\r\n\t\t\t\tif (s->triggered) {\r\n\t\t\t\t\tintrusion = 1;\r\n\t\t\t\t}\r\n\t\t\t\t\r\n\t\t\t\tif (intrusion) {\r\n\t\t\t\t\talarm();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n}\r\n" }, { "alpha_fraction": 0.4947563111782074, "alphanum_fraction": 0.5077112913131714, "avg_line_length": 17.297618865966797, "blob_id": "1bb0060d24a9d590fc687b434669fd7d84f62652", "content_id": "4a024bd96f29ab2446553dd31c995568cba91722", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1621, "license_type": "no_license", "max_line_length": 77, "num_lines": 84, "path": "/Libs/Flyport libs/ISRs.c", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "#include \"HWlib.h\"\r\n\r\n\r\n/****************************************************************************\r\n SECTION \tISR (Interrupt Service routines)\r\n****************************************************************************/\r\nvoid __attribute__((interrupt, no_auto_psv)) _U1RXInterrupt(void)\r\n{\r\n\tUARTRxInt(1);\r\n}\r\n\r\nvoid __attribute__((interrupt, no_auto_psv)) _U2RXInterrupt(void)\r\n{\r\n#if UART_PORTS >= 2\r\n\tUARTRxInt(2);\r\n#endif\r\n}\r\n\r\nvoid __attribute__((interrupt, no_auto_psv)) _U3RXInterrupt(void)\r\n{\r\n#if UART_PORTS >= 3\r\n\tUARTRxInt(3);\r\n#endif\r\n}\r\n\r\nvoid __attribute__((interrupt, no_auto_psv)) _U4RXInterrupt(void)\r\n{\r\n#if UART_PORTS == 4\r\n\tUARTRxInt(4);\r\n#endif\r\n}\r\n\r\n\r\nvoid __attribute__((interrupt, auto_psv)) _DefaultInterrupt(void)\r\n{\r\n UARTWrite(1,\"!!! Default interrupt handler !!!\\r\\n\" );\r\n while (1)\r\n {\r\n\t Nop();\r\n\t Nop();\r\n\t Nop();\r\n }\r\n}\r\n\r\nvoid __attribute__((interrupt, auto_psv)) _OscillatorFail(void)\r\n{\r\n UARTWrite(1, \"!!! Oscillator Fail interrupt handler !!!\\r\\n\" );\r\n while (1)\r\n {\r\n\t Nop();\r\n\t Nop();\r\n\t Nop();\r\n }\r\n}\r\nvoid __attribute__((interrupt, auto_psv)) _AddressError(void)\r\n{\r\n UARTWrite(1,\"!!! Address Error interrupt handler !!!\\r\\n\" );\r\n while (1)\r\n {\r\n\t Nop();\r\n\t Nop();\r\n\t Nop();\r\n }\r\n}\r\nvoid __attribute__((interrupt, auto_psv)) _StackError(void)\r\n{\r\n UARTWrite(1,\"!!! Stack Error interrupt handler !!!\\r\\n\" );\r\n while (1)\r\n {\r\n\t Nop();\r\n\t Nop();\r\n\t Nop();\r\n }\r\n}\r\nvoid __attribute__((interrupt, auto_psv)) _MathError(void)\r\n{\r\n UARTWrite(1,\"!!! Math Error interrupt handler !!!\\r\\n\" );\r\n while (1)\r\n {\r\n\t Nop(); \r\n\t Nop();\r\n\t Nop();\r\n }\r\n}\r\n" }, { "alpha_fraction": 0.6420824527740479, "alphanum_fraction": 0.6594359874725342, "avg_line_length": 12.28125, "blob_id": "a142b2a91b8ceb7a025c1d19de615e46f7882b1c", "content_id": "d2251dc6923b567aecf4f8da2fc55e825cd151b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 461, "license_type": "no_license", "max_line_length": 28, "num_lines": 32, "path": "/Libs/ExternalLib/Include/Sensor.h", "repo_name": "XBachirX/picusAlarm", "src_encoding": "UTF-8", "text": "\r\n#ifndef SENSOR_H\r\n#define SENSOR_H\r\n\r\n\r\n#define NUM_SENSORS 18\r\n\r\n\r\ntypedef enum SensorType_t {\r\n\tDIGITAL,\r\n\tANALOG\r\n} SensorType;\r\n\r\ntypedef enum TriggerType_t {\r\n\tRISING = 0,\r\n\tFALLING = 1,\r\n\tACTIVE_LOW = 0, \r\n\tACTIVE_HIGH = 1\r\n} TriggerType;\r\n\r\ntypedef struct Sensor_t {\r\n\tSensorType type;\r\n\tchar name[16];\r\n\tint enabled;\r\n\tint address;\r\n\tint threshold;\r\n\tTriggerType triggerType;\r\n\tint triggered;\r\n\tint timeTriggered;\r\n\tint value;\r\n} Sensor;\r\n\r\n#endif\r\n\r\n" } ]
12
mdenson-dayspring/praxis-bridge
https://github.com/mdenson-dayspring/praxis-bridge
ca5464f8c214c22931af6ddc20210b25f0d67bb4
cc36908e68c42bb72bb10f85c182b811cb3fb361
16ba1393e63e3f15a06427299e5c5e59fb617204
refs/heads/master
2023-01-11T21:18:54.313786
2015-08-26T20:11:33
2015-08-26T20:11:33
312,111,322
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.45996859669685364, "alphanum_fraction": 0.4769230782985687, "avg_line_length": 24.0787410736084, "blob_id": "228fb73cfe43abe5279051fb60d1c85b3703f994", "content_id": "ce24060ba4c737a67f13704b3677e701296b98c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3193, "license_type": "no_license", "max_line_length": 74, "num_lines": 127, "path": "/bridge/carddeck.py", "repo_name": "mdenson-dayspring/praxis-bridge", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Aug 13 16:14:51 2015\n\n@author: mdenson\n\"\"\"\nimport random\nimport functools\n\nsuits = ['♣', '♢', '♡', '♠']\nranks = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K', 'A']\n\nclass Deck(object):\n def __init__(self):\n self.cards = []\n self.cards = [i for i in range(54)]\n \n def __len__(self):\n return len(self.cards)\n \n def removeJokers(self):\n self.cards.remove(53)\n self.cards.remove(52)\n \n def shuffle(self):\n random.shuffle(self.cards) \n \n def cut(self):\n cutpt = random.randint(2, len(self.cards)-1)\n cards = self.cards[cutpt:]+self.cards[:cutpt]\n self.cards = cards\n \n def dealcard(self):\n return self.cards.pop()\n \n def twoChar(self, card):\n if card > 51:\n return 'JK'\n return ranks[card >> 2] + suits[card & 0x3]\n\nclass Hand(object):\n def __init__(self):\n self.cards = []\n \n def add(self, card):\n self.cards.append(card)\n \n def sort(self):\n self.cards.sort(reverse=True)\n\ndef bridgeCompare(x,y):\n cmp = (y & 0x3) - (x & 0x3) \n if cmp == 0:\n cmp = (y>>2) - (x>>2)\n return cmp\n \nclass BridgeHand(Hand):\n def sort(self):\n self.cards.sort(key=functools.cmp_to_key(bridgeCompare))\n \n def toString(self, leftStrings, hand):\n retString = []\n for n, l in enumerate(leftStrings):\n if n == 0:\n retString.append(l + hand.upper())\n else:\n suit = suits[4-n]\n cards = [c for c in self.cards if (c & 0x3) == n-1]\n retString.append(l + suit)\n for c in cards:\n retString[n] += ' ' + ranks[c >> 2]\n return retString\n \nhandnames = ['East', 'South', 'West', 'North']\n\nclass Bridge(object):\n def __init__(self, deck):\n deck.removeJokers()\n deck.shuffle()\n deck.shuffle()\n deck.shuffle()\n deck.cut()\n deck.shuffle()\n self.hands = {}\n for hn in handnames:\n self.hands[hn] = BridgeHand()\n \n self.deal(deck)\n \n for n, h in self.hands.items():\n h.sort()\n \n def deal(self, deck):\n while len(deck)>0:\n for hn in handnames:\n self.hands[hn].add(deck.dealcard())\n\n def printGame(self):\n middle = [' '] * 5\n left = [''] * 5\n\n hand = 'North'\n outStrings = b.hands[hand].toString(middle, hand)\n for s in outStrings:\n print(s)\n\n hand = 'West'\n west = []\n for w in b.hands[hand].toString(left, hand):\n west.append((w + middle[0] + middle[0])[:-len(w)])\n hand = 'East'\n outStrings = b.hands[hand].toString(west, hand)\n for s in outStrings:\n print(s)\n \n hand = 'South'\n outStrings = b.hands[hand].toString(middle, hand)\n for s in outStrings:\n print(s)\n\n \n\nif __name__ == \"__main__\":\n import sys\n d = Deck()\n b = Bridge(d)\n b.printGame()\n" }, { "alpha_fraction": 0.2544303834438324, "alphanum_fraction": 0.30506327748298645, "avg_line_length": 34.95454406738281, "blob_id": "c1a9ac5d5697ebd8e6cee3055b21b87f67a902d6", "content_id": "6795f2b6ef303adea32fa62e5ba6cce47e5a76a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 822, "license_type": "no_license", "max_line_length": 64, "num_lines": 22, "path": "/README.md", "repo_name": "mdenson-dayspring/praxis-bridge", "src_encoding": "UTF-8", "text": "This is a test repo that I am going to use to try pull requests \nfunctionality with.\n\nHere is an example bridge game displayed by the application.\n\n NORTH\n ♣ K J 7 6\n ♢ A 10 9 3\n ♡ 9\n ♠ K J 6 4\n WEST EAST\n ♣ 8 5 ♣ Q 9\n ♢ K Q 4 ♢ J 8 2\n ♡ K 10 5 2 ♡ A J 8 6 4\n ♠ A 10 9 2 ♠ Q 5 3\n SOUTH\n ♣ A 10 4 3 2\n ♢ 7 6 5\n ♡ Q 7 3\n ♠ 8 7\n\nThe idea for this application comes from Programming Praxis." } ]
2
shikhardhing/local-movie-viewer
https://github.com/shikhardhing/local-movie-viewer
e9a28eb63425c1c9c9cf49a2bbd28b181aaf8b13
3fea246f7f3b21c6e0d9ce16be20486ede215075
d22242f2ad4659b3dbe7726587b0a3e7ca8056b0
refs/heads/master
2021-03-27T13:37:45.100081
2018-05-15T06:17:59
2018-05-15T06:17:59
89,730,353
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7874564528465271, "alphanum_fraction": 0.7874564528465271, "avg_line_length": 142.5, "blob_id": "6a1077cb40e920c92a487948acd135b678dfa39f", "content_id": "94fe6e4a7e8314e95d409ffacf9458626926f815", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 287, "license_type": "permissive", "max_line_length": 243, "num_lines": 2, "path": "/README.md", "repo_name": "shikhardhing/local-movie-viewer", "src_encoding": "UTF-8", "text": "## Watch offline movies in Netflix style. \nThe application fetches metadata of all the movies in the local machine, from IMDB, with an option to rename/delete movies using python script. Then it creates a web view, similar to Netflix, from all the data, to view those movies in browser.\n" }, { "alpha_fraction": 0.6727421879768372, "alphanum_fraction": 0.6819578409194946, "avg_line_length": 26.75, "blob_id": "760578bf682428eec1626df07e20414eae9e2253", "content_id": "ddae22f03d62a436677fd9f94491d89af22a71c9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4883, "license_type": "permissive", "max_line_length": 124, "num_lines": 176, "path": "/movie_info_generator.py", "repo_name": "shikhardhing/local-movie-viewer", "src_encoding": "UTF-8", "text": "import glob\nimport os\nimport re\nimport sys\nimport timeit\nimport json\nimport requests\nimport requests_cache\nimport urllib.request\nfrom datetime import datetime\n#from MediaInfo import MediaInfo\n\ndef num(s):\n\ttry:\n\t\treturn int(s)\n\texcept ValueError:\n\t\ttry:\n\t \t\treturn float(s)\n\t\texcept ValueError:\n\t \t\treturn 0\n\ndef imdbRating(moviesInfoArray):\n\ttry:\n\t\treturn num(moviesInfoArray['imdbRating'])\n\texcept KeyError:\n\t\treturn 0\n\ndef releaseDate(moviesInfoArray):\n\ttry:\n\t\treturn num(moviesInfoArray['Year'])\n\texcept KeyError:\n\t\treturn 0\n\ndef renam(name):\t\t\t\t\t\t\t\t\t\t\t\t\t#converts naame to searchable format\n\tstring=name.replace(\"'\",\"\")\n\tstring=string.replace(\"-\",\"_\")\n\tstring = re.sub(r'[^A-Za-z0-9]', '_',string)\t\t\t\t\t\t#replace special characters with _\n\tstring = re.sub('(^_)([A-Z]+)', r'\\1_\\2', string).lower()\t\t#convert CamelCase to underscore\n\t#print(string)\n\treturn string\n\ndef request(string):\n\toriginal=string\n\twhile True:\n\t\t#print(string)\n\t\turl='https://v2.sg.media-imdb.com/suggests/'+string[0]+'/'+string+'.json'\n\t\tpage = requests.get(url)\n\t\ttrimmed=page.text[6+len(string):-1]\n\t\tjso=json.loads(trimmed)\n\t\tif('d' in jso ):\n\t\t\t#if(str(jso['d'][0]['q'])==\"feature\"):\t\t\t\t\t\t\t\t#only if a feature film\n\t\t\tif('id' in jso['d'][0]):\n\t\t\t\tmovieID=jso['d'][0]['id']\n\t\t\t\t#print(movieID)\n\t\t\t\treturn movieID\n\t\telse:\n\t\t\tprint(jso)\n\t\tlast_=string.rfind('_')\n\t\tstring=string[:last_]\t\n\t\tprint(string)\t\t\t\t\t\t\t\t\t\t\t\t#search for last _ and remove last word\n\t\tif(len(string)<=1):\n\t\t\tprint(original+'movie not found')\n\t\t\traise Exception(original+'movie not found')\n\ndef download_posters(moviesInfoArray):\n\tfor movie in moviesInfoArray:\n\t\ttry:\n\t\t\tprint(\"Downloading poster for \"+movie[\"Title\"])\n\t\t\turllib.request.urlretrieve(movie[\"Poster\"],'Movies/MoviesInfo/Movies_posters/'+(movie[\"Title\"]+'_'+movie[\"Year\"])+\".jpg\")\n\t\texcept Exception as e:\n\t\t\tprint(e)\n\t\t\tprint(movie[\"Title\"])\n\ndef download_subtitles(moviesInfoArray):\n\tfor movie in moviesInfoArray:\n\t\ttry:\n\t\t\tprint(\"Downloading Subtitle for \"+movie)\t\n\t\texcept Exception as e:\n\t\t\tprint(e)\n\t\t\tprint(movie[\"Title\"])\n\ndef write_to_file(moviesInfoArray,file):\n\toutfile=open('Movies/MoviesInfo/'+file+'.jsonp', 'w')\n\toutfile.write(file+\"=\")\n\tjson_string=json.dumps(moviesInfoArray)\n\toutfile.write(json_string)\n\toutfile.close()\n\ndef main():\n\tstart = timeit.default_timer()\t\t\t\t\t\t\t#code starts running here, calculting run time\n\trequests_cache.install_cache(cache_name='imdb_cache', backend='sqlite', expire_after=60000)\n\t\n\tif(len(sys.argv)>1):\n\t\tfolder=sys.argv[1]\n\telse:\n\t\tprint('using default folder:\"Movies\"')\n\t\tfolder='Movies'\n\tdire=folder+'/**/*.'\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\ttypes = (dire+'mp4', dire+'mkv',dire+'avi',dire+'wmv',dire+'flv',dire+'webm') # the tuple of file types\n\tnames = []\n\tmoviesInfoArray=[]\n\tfor files in types:\n\t names.extend(glob.glob(files,recursive=True))\n\tif not os.path.exists('Movies'):\n\t\tos.makedirs('Movies')\n\tif not os.path.exists('Movies/MoviesInfo'):\n\t\tos.makedirs('Movies/MoviesInfo')\n\tif not os.path.exists('Movies-noDetailFound'):\n\t\tos.makedirs('Movies-noDetailFound')\n\tfor i in names:\n\t\ttry:\n\t\t\tsize=os.path.getsize(i)/(1024*1024)\n\t\t\tif(size<200):\n\t\t\t\tcontinue\n\t\t\tfileName=os.path.basename(i)\n\t\t\textension=fileName[-3:]\n\t\t\tstring=renam(fileName[:-4])\n\t\t\tmovieID=request(string)\n\n\t\t\tmovieInfo = requests.get('http://www.omdbapi.com/?i='+movieID+'&plot=short').json()\n\t\t\tif(movieInfo[\"Rated\"]==\"R\"):\n\t\t\t\tmovieInfo[\"Rated\"]=\"18+\"\n\t\t\telif(movieInfo[\"Rated\"]=='PG-13'):\n\t\t\t\tmovieInfo[\"Rated\"]=\"13+\"\n\t\t\telse:\n\t\t\t\tmovieInfo[\"Rated\"]=\"\"\n\t\t\t\n\t\t\t#newName=movieInfo[\"Title\"]+'_'+movieInfo[\"Year\"]\n\t\t\t#location='Movies/'+newName+'.'+extension\n\t\t\t#os.rename(i,location)\n\t\t\t#movieInfo[\"Location\"]=newName+'.'+extension\n\t\t\tmovieInfo[\"Size\"]=str(int(size))+'MB'\n\t\t\tmovieInfo[\"Location\"]=i\n\t\t\tmovieInfo[\"Filename\"]=fileName[:-4]\n\t\t\tmovieInfo[\"Extension\"]=extension\n\t\t\tmoviesInfoArray.append(movieInfo)\n\t\texcept Exception as e:\n\t\t\tprint(e)\n\t\t\tlocation=os.path.basename(i)\n\t\t\tos.rename(i,'Movies-noDetailFound/'+location)\n\n\tmoviesInfoArray.sort(key=imdbRating, reverse=True)\n\n\tcounter=0\n\tfor movie in moviesInfoArray:\n\t\tmovie[\"ID\"]=counter\n\t\tcounter+=1\n\n\twrite_to_file(moviesInfoArray,'metadataByRating')\n\n\tgenreDict={}\n\tfor movie in moviesInfoArray:\n\t\tfor genre in movie[\"Genre\"].split(', '):\n\t\t\tif genre not in genreDict:\n\t\t\t\tgenreDict[genre]=[]\n\t\t\tgenreDict[genre].append(movie[\"ID\"])\n\tprint(genreDict)\n\twrite_to_file(genreDict,'genre')\n\n\n\tmoviesInfoArray.sort(key=releaseDate, reverse=True)\n\t#sorted_date = sorted(moviesInfoArray, key=lambda x: datetime.strptime(x['Date'], '%d %b %Y'))\n\twrite_to_file(moviesInfoArray,'metadataByDate')\n\n\t#download_posters(moviesInfoArray)\n\t#download_subtitles(moviesInfoArray)\n\t\n\tif not os.listdir('Movies-noDetailFound'):\n\t os.rmdir('Movies-noDetailFound')\n\n\tprint (str(len(names))+\"movies in\"+str(timeit.default_timer() - start)+\"seconds\")\n\t#write HTML file and open it \n\n\nif __name__ == '__main__':\n\tmain()" } ]
2
aatishb/fluent
https://github.com/aatishb/fluent
c34a39b484837265a0dc056032d9094f40b44573
9d64a929df1b9b37c02c1be1e4323114483adb0d
0b54fe39df677cfb4a08fb9d40c79e4a9af740f7
refs/heads/master
2016-09-05T14:40:43.099330
2012-08-30T20:40:12
2012-08-30T20:40:12
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8144475817680359, "alphanum_fraction": 0.8144475817680359, "avg_line_length": 46, "blob_id": "2b228131f192476dcc8745eb29ebb7f7c8e4dd7a", "content_id": "e6974aa16e3dd7d4dac7715751cc724d1cebd52e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 706, "license_type": "no_license", "max_line_length": 124, "num_lines": 15, "path": "/README.txt", "repo_name": "aatishb/fluent", "src_encoding": "UTF-8", "text": "FLU.py\n\nreads in Fasta sequences of flu amino acid sequences and organizes them into a data structure called seqlist\nreads in Q matrix of amino acid substitution rates and implements a function to exponentiate it i.e. exp(Qt)\n\nMAKEPLOTS.py\n\nimports phylogenetic tree and calculates mean evolutionary distance between timepoints (years)\ncreates an array of frequencies at each site\nAt each site, calculates relative entropy and probability of data fitting the neutral model for each year to year transition\nMakes a bunch of plots:\n\trelative entropy vs. position\n\tsites under selection\n\tentropy of sequences chunks (windows) over time\n\tnumber of variant sites in chunks (windows) of the sequences over time\n\n" }, { "alpha_fraction": 0.6192339062690735, "alphanum_fraction": 0.6738602519035339, "avg_line_length": 26.652984619140625, "blob_id": "f7363780a3282a89ab03770ad5780972929a1345", "content_id": "73a172ae8e0c52406f0e78a3d93ba9163db71a33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7414, "license_type": "no_license", "max_line_length": 147, "num_lines": 268, "path": "/makeplots.py", "repo_name": "aatishb/fluent", "src_encoding": "UTF-8", "text": "import numpy as np\nimport flu\nimport matplotlib.pyplot as plt\n\nAA = ['A', 'R', 'N', 'D', 'C', 'Q', 'E', 'G', 'H', 'I', 'L', 'K', 'M', 'F', 'P', 'S', 'T', 'W', 'Y', 'V']\n\ndef count_AA(seq):\n\n\tdict = {}\n\n\tfor word in AA:\n\t\tdict[word] = 0\n\n\tcount = 0.0\t\n\tfor word in seq:\n\t\tif word in AA:\t\t\n\t\t\tdict[word] += 1\n\t\t\tcount += 1\n\n\tif not (count == 0):\n\t\treturn [dict[word]/count for word in AA]\n\n\telse:\n\t\treturn [dict[word] for word in AA]\n\n\ndef deltat(virarray):\n\n\tmeandist = []\n\n\tprint \"Calculating mean deltaT\"\n\tfor year1 in years:\n\t\tfor year2 in years:\n\t\t\tif years.index(year2) == years.index(year1) + 1:\n\n\t\t\t\ti = years.index(year1)\n\t\t\t\n\t\t\t\tdist = []\n\n\t\t\t\tfor word1 in virarray[i]:\n\t\t\t\t\tfor word2 in virarray[i+1]:\n\t\t\t\t\t\tlabel1 = str(word1.place)+'/'+str(word1.year)+'/'+str(word1.id)\n\t\t\t\t\t\tlabel2 = str(word2.place)+'/'+str(word2.year)+'/'+str(word2.id)\n\t\t\t\t\t\tdist.append(tree.distance(label1,label2))\n\n\t\t\t\tmeandist.append(np.mean(dist))\n\tprint \"done\"\n\n\treturn meandist\n\n\ndef corr(data):\n\tdata_tr = np.transpose(data)\n\t#print data_tr[10]\n\tdata_r2 = np.corrcoef(data_tr)\n\treturn data_r2\n\n\ndef plotstuff(virarray, time, relent, prob, sel, entropy):\n\n\t\t\t\n\tprint \"\\n\"\n\t\n\tprint \"Generating figures\"\n\n\t#Plots all sites under selection\n\tplt.clf()\n\tplt.subplots_adjust(bottom=0)\n\tplt.imshow(sel,interpolation='nearest', cmap=plt.cm.Reds,aspect='auto')\n\tplt.yticks(range(len(years[0:-1])), years[0:-1])\n\tplt.savefig(\"heatmap.png\",bbox_inches='tight',dpi=100)\n\n\n\tsites = range(588)\n\t#Plots Probability of Neutral Model vs Position\n\tplt.clf()\n\tplt.subplots_adjust(bottom=-0.5)\n\tfor i in range(len(years)-1):\n\t\tax = plt.subplot(str(511+i))\n\t\tax.set_yscale('log')\n\t\tax.plot(sites, prob[i], label=years[i])\n\tplt.savefig(\"prob.png\",bbox_inches='tight',dpi=100)\n\n\n\t#Plots Relative Entropy vs Position\n\tplt.clf()\n\tplt.subplots_adjust(bottom=-0.5)\n\tfor i in range(len(years)-1):\n\t\tax = plt.subplot(str(511+i))\n\t\tax.set_yscale('linear')\n\t\tax.plot(sites, relent[i], label=years[i])\n\t\tax.plot(sites, (-np.log(0.01)/len(virarray[i+1]))*np.ones(588))\n\t\tax.plot(sites, (-np.log(0.001)/len(virarray[i+1]))*np.ones(588))\n\tplt.savefig(\"relent.png\",bbox_inches='tight',dpi=100)\n\n\n\t#Plots Average Relative Entropy vs Position\n\tminN = min(Nlist)\n\tmean_relent = [np.mean(word) for word in zip(*relent)]\n\tplt.clf()\n\tplt.subplots_adjust(bottom=0)\n\tax = plt.subplot(111)\n\tax.set_yscale('linear')\n\tax.plot(sites, mean_relent, label=years[i])\n\tax.plot(sites, (-np.log(0.01)/minN)*np.ones(588))\n\tax.plot(sites, (-np.log(0.001)/minN)*np.ones(588))\n\tplt.savefig(\"mean_relent.png\",bbox_inches='tight',dpi=100)\n\n\n\t#Plots Average Probability of Neutral Model vs Position\n\tmean_prob = [np.mean(word) for word in zip(*prob)]\n\tplt.clf()\n\tplt.subplots_adjust(bottom=0)\n\tax = plt.subplot(111)\n\tax.set_yscale('log')\n\tax.plot(sites, np.array(mean_prob,dtype=float), label=years[i])\n\tplt.savefig(\"mean_prob.png\",bbox_inches='tight',dpi=100)\n\n\n\t#Build a correlation matrix\t\n\tcorrmat = [[np.abs(word) for word in row] for row in corr(sel)]\n\n\t#Plots correlation among all sites\n\tplt.clf()\n\tplt.subplots_adjust(bottom=0)\n\tplt.imshow(corrmat, interpolation='nearest', cmap=plt.cm.Reds, aspect='auto')\n\t#plt.yticks(sites, sites)\n\tplt.savefig(\"corr_heatmap.png\",bbox_inches='tight',dpi=200)\n\n\n\t#Make a list of sites that are selected at least four out of five seasons\n\tsel_list = []\n\tsite = 0\n\tfor sel_site in zip(*sel):\n\t\tcount = 0\n\t\tfor word in sel_site:\n\t\t\tif word == True:\n\t\t\t\tcount += 1\n\t\tif count >= 4:\n\t\t\tsel_list.append(site)\n\t\tsite += 1\n\n\tprint sel_list\n\n\t#Plot Entropy over time for these sites\n\tplt.clf()\n\tplt.figure()\n\n\tfor site in sel_list:\n\t\tplt.plot(range(len(years)), entropy[site], label=str(site))\n\tplt.legend(loc='best')\n\tplt.xlabel('years')\n\tplt.savefig(\"entropy for selected sites.png\")\n\n\n\t#Divides sequence into chunks and makes a plot of entropy vs time\n\tplt.clf()\n\tplt.figure()\n\n\tstep_size = 50\n\tsite_chunks = [sites[i:i+step_size] for i in range(0, len(sites), step_size)]\n\n\tfor site_chunk in site_chunks:\n\n\t\tentropy_mean = []\n\t\tentropy_std = []\n\t\tfor year in range(len(years)):\n\t\t\ttemplist = []\n\t\t\tfor site in site_chunk:\n\t\t\t\t#if not entropy[site][year] == 0:\n\t\t\t\ttemplist.append(entropy[site][year])\n\n\t\t\tentropy_mean.append(flu.np.mean(templist))\n\t\t\tentropy_std.append(flu.np.std(templist))\n\n\t\tplt.errorbar(range(len(years)), entropy_mean, yerr = entropy_std/flu.np.sqrt(len(site_chunk)) , label=str(site_chunk[0])+'-'+str(site_chunk[-1]))\n\tplt.legend(loc='best')\n\tplt.xlabel('years')\n\tplt.savefig(\"entropy versus time.png\")\n\n\n\t#Split sequences into chunks of 100 amino acids and plot number of variant sites over time\n\tplt.clf()\n\tplt.figure()\n\n\tstep_size = 100\n\tsite_chunks = [sites[i:i+step_size] for i in range(0, len(sites), step_size)]\n\n\tfor site_chunk in site_chunks:\n\n\t\tentropy_mean = []\n\n\t\tfor year in range(len(years)):\n\t\t\ttemplist = []\n\n\t\t\tcount = 0\n\t\t\tfor site in site_chunk:\n\t\t\t\tif not entropy[site][year] == 0:\n\t\t\t\t\tcount += 1\n\t\t\tentropy_mean.append(count)\n\n\t\tplt.errorbar(range(len(years)), entropy_mean , label=str(site_chunk[0])+'-'+str(site_chunk[-1]))\n\tplt.legend(loc='best')\n\tplt.savefig(\"num variants sites.png\")\n\n\n\tprint \"Done\"\n\t\n\nfrom Bio import Phylo\ntree = Phylo.read('egyptH5NHA.phy_phyml_tree.txt', 'newick')\nnodes = tree.get_terminals()\n\nclade1 = Phylo.read('clade1.txt', 'newick')\nc1nodes = [word.name for word in clade1.get_terminals()]\nc1ids = [word.name.split('/')[2] for word in clade1.get_terminals()]\n\nclade2 = Phylo.read('clade2.txt', 'newick')\nc2nodes = [word.name for word in clade2.get_terminals()]\nc2ids = [word.name.split('/')[2] for word in clade2.get_terminals()]\n\n\nyears = ['2006','2007','2008','2009','2010','2011']\nvirarray = [[word for word in flu.seqlist if (word.place == \"Egypt\" and word.year == year)] for year in years]\n\n\ntime = [0.0143905347475, 0.039710268946, 0.0457343136228, 0.0451385507375, 0.0425961707733]\ntimec1 = [0.013992550923,0.040050920329,0.0492249534222,0.0624247846509,0.0557367576217]\ntimec2 = [0.0144531601812,0.0208246072785,0.0251183491289,0.0228981542124,0.0199896902198]\n\n\n# Create array of frequencies at each site. freqarray : years, sites, freq of amino acids\nfreqarray = np.array([[count_AA(word) for word in zip(*[vir.seq for vir in virpool])] for virpool in virarray], dtype=float)\n\n# Next, create and populate 3 arrays, for relative entropy, probability of fitting neutral model, and selection\nprob = [[] for year in years[0:-1]]\nrelent = [[] for year in years[0:-1]]\nsel = [[] for year in years[0:-1]]\nentropy = [[flu.entropy(season) for season in freq_position] for freq_position in zip(*freqarray)]\n\nNlist = []\n\nfor year1 in years:\n\tfor year2 in years:\n\t\tif years.index(year2) == years.index(year1) + 1:\n\t\t\ti = years.index(year1)\n\n\t\t\t#calculate relative entropy pairs of sites across years\n\t\t\txent = []\n\t\t\tfor (site1,site2) in zip(freqarray[i+1],freqarray[i]):\n\t\t\t\tif not (np.sum(site1)*np.sum(site2) == 0):\n\t\t\t\t\txent.append(flu.KLdivergence(site1,flu.expQ(site2,time[i])))\n\t\t\t\telse:\n\t\t\t\t\txent.append(0.0)\n\n\t\t\tN = len(virarray[i+1])\n\t\t\tNlist.append(N)\n\t\t\t#print N\n\t\t\t#create arrays of relative entropy, probability, and binary variable indicating selection. \n\t\t\t#arrays iterate over amino acid position. i = year in which samples collected\n\t\t\trelent[i] = np.array(xent,dtype=float)\n\t\t\tprob[i] = np.exp(-1.0*N*np.array(xent,dtype=float))\n\t\t\tsel[i] = np.less_equal(prob[i],0.001)\n\n\n\n#print deltat(virarray)\nplotstuff(virarray, time, relent, prob, sel, entropy)\n\n\n\n" }, { "alpha_fraction": 0.4359102249145508, "alphanum_fraction": 0.5900249481201172, "avg_line_length": 19.4489803314209, "blob_id": "fb41bb5f5ff71b4afa0c924612086254ddec41da", "content_id": "c5b02da8b95e3cb364bec9078805969e50492f64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2005, "license_type": "no_license", "max_line_length": 247, "num_lines": 98, "path": "/flu.py", "repo_name": "aatishb/fluent", "src_encoding": "UTF-8", "text": "from Bio import SeqIO\nimport numpy as np\n\nclass sequence(object):\n def __init__(self, name, place, year, source, seq):\n self.id = name\n self.place = place\n self.year = year\n self.source = source\n\tself.seq = seq\n\nseqlist = []\n\nfor rec in SeqIO.parse('H5N HA.fst', 'fasta'):\n\n\tinfo = rec.description.split('/')\n\n\tif '' in info:\n\t\tinfo.remove('')\n\n\tif len(info) > 4:\n\t\tanimal = info[1]\t\n\t\tplace = info[2]\n\t\t\n\t\t\n\t\tif ' ' in info[4]:\n\t\t\tyear = info[4].split(' ')[1]\n\t\telif ' ' in info[5]:\n\t\t\tyear = info[5].split(' ')[1]\n\t\telse:\n\t\t\tyear = '?'\t\t\n\n\t\tseqlist.append(sequence(str(rec.id), place, year, animal, str(rec.seq)))\t\t\n\n\n\ndef KLdivergence(vec1,vec2):\n\n\txent = 0.0\n\tfor p,q in zip(vec1,vec2):\n\t\tif not (p == 0):\n\t\t\txent = xent + p*np.log(p/q)\n\treturn xent\n\n\n\ndef entropy(vec):\n\n\tent = 0.0\n\tfor p in vec:\n\t\tif not (p == 0):\n\t\t\tent = ent - p*np.log(p)\n\treturn ent\n\n\n\ndef initM(freq):\n\n\tf = open('flumatrix.txt','r')\n\trows = [line.split() for line in f]\n\tf.close()\n\n\tQ = np.array([[0.0 for i in range(20)] for j in range(20)])\n\n\tfor row in rows:\n\t\tif not (row == []):\n\t\t\ti = rows.index(row)\n\t\t\tj = 0\n\t\t\tfor word in row:\n\t\t\t\tQ[i][j] = float(word)*freq[j]\n\t\t\t\tQ[j][i] = float(word)*freq[i]\n\t\t\t\tj = j + 1\n\n\tfor i in range(20):\n\t\tsum = np.sum(Q[i])\n\t\tQ[i][i] = -sum\n\n\tevals, U = np.linalg.eig(Q)\n\tUt = np.linalg.inv(U)\n\tL = np.diag(evals)\n\t\n\treturn U,evals,Ut\n\n\n\ndef expQ(freq,time):\n\n\texpLt = np.diag(np.exp(evals*time))\n\tM = np.dot(np.dot(U,expLt),Ut)\n\treturn np.dot(M,freq)\n\n\n\ng = np.array([float(num) for num in '0.0470718\t0.0509102\t0.0742143\t0.0478596\t0.0250216\t0.0333036\t0.0545874\t0.0763734\t0.0199642\t0.0671336\t0.0714981\t0.0567845\t0.0181507\t0.0304961\t0.0506561\t0.0884091\t0.0743386\t0.0185237\t0.0314741\t0.0632292'.split()])\nf=g/np.sum(g)\n#f = np.array([ 0.05105 , 0.04219 , 0.08457 , 0.04646 , 0.02647 , 0.03385 , 0.07029 , 0.06871 , 0.02502 , 0.07154 , 0.08383 , 0.06506 , 0.02428 , 0.0329 , 0.03358 , 0.07621 , 0.04835 , 0.01764 , 0.04183 , 0.05618])\n\n[U,evals,Ut] = initM(f)\n\n" } ]
3
Nuccy90/Advanced-Programming
https://github.com/Nuccy90/Advanced-Programming
b44f0f899d0fa06e3adfcebf6dc483921dbaaa4e
cd0960aefab3a383f5240f83091246370581b11b
ab1563381db4f6a9c610a29a5ad55177375ac433
refs/heads/master
2021-05-09T05:11:19.313822
2019-03-06T14:35:02
2019-03-06T14:35:02
119,301,859
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4809088706970215, "alphanum_fraction": 0.48472708463668823, "avg_line_length": 34.58129119873047, "blob_id": "920525af80070e2a8f4e0f2cf2525ed5cad38e5c", "content_id": "4b6a2d6dd0af5580c1480ea022176a061d31651a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15976, "license_type": "no_license", "max_line_length": 620, "num_lines": 449, "path": "/house_game.py", "repo_name": "Nuccy90/Advanced-Programming", "src_encoding": "UTF-8", "text": "import sys\n\nclass Room:\n\n def __init__(self, name):\n \n self.name = name\n\nclass Door:\n\n def __init__(self, directions, status, rooms):\n \n self.direct = directions\n self.status = status\n self.rooms = rooms\n\n def unlock(self, backpack):\n\n if self.status == \"locked\":\n possible = False\n for thing in backpack.check_contents():\n if type(thing) == Key:\n if thing.door == self.rooms:\n possible = True\n if possible:\n self.status = \"closed\"\n print (\"You unlocked the door\")\n else:\n print (\"You don't have the key for this door\")\n elif self.status == \"sealed\":\n print (\"This is the front door. It is sealed and can never be opened. You need to find another way out.\")\n else:\n print (\"This door is unlocked\")\n return self\n\n def open_door(self, backpack):\n\n if self.status == \"open\":\n print (\"This door is already open\")\n elif self.status == \"locked\":\n print(\"You need a key to open this door\")\n elif self.status == \"sealed\":\n print (\"This is the front door. It is sealed and can never be opened. You need to find another way out.\")\n else:\n self.status = \"open\"\n print (\"You opened the door\")\n return self\n\n def close_door(self, backpack):\n\n if self.status == \"closed\":\n print (\"This door is already closed\")\n elif self.status == \"locked\":\n print(\"This door is already closed and locked\")\n elif self.status == \"sealed\":\n print (\"This is the front door. It is sealed and can never be opened. You need to find another way out.\")\n else:\n self.status = \"closed\"\n print (\"You closed the door\")\n return self\n\nclass Item:\n\n def __init__(self, name, position, kind, key):\n\n self.name = name\n self.position = position\n self.kind = kind\n self.key = key\n\n def get_name(self):\n return self.name\n\n def get_type(self):\n return self.kind\n\n def check_ifkey(self, backpack):\n if self.key:\n for it in backpack.contents:\n if self.key == it.name:\n print(\"You've already found a key here\")\n return None\n print(\"You have found a key! You can use it to unlock one of the doors. It will be in your backpack when you need it.\")\n else:\n print (\"There's nothing here\")\n return self.key\n\n\nclass Movable_item(Item):\n\n def __init__(self, name, position, kind, key):\n\n Item.__init__(self, name, position, kind, key)\n\n\nclass Usable_item(Item):\n\n def __init__(self, name, position, kind, key, action):\n\n Item.__init__(self, name, position, kind, key)\n self.action = action \n\n\n def actions(self, house):\n print (\"Well, this didn't help anything. Carry on!\")\n\n\nclass Key:\n\n def __init__(self, name, door):\n \n self.name = name\n self.door = door\n\n def __str__(self):\n\n return self.name\n\nclass Backpack:\n\n def __init__(self):\n self.contents = []\n\n def check_contents(self):\n return self.contents\n\n def update_backpack(self, thing):\n self.contents.append(thing)\n\n\nclass Keypad:\n\n def __init__(self, text, hint, solution, letter, position):\n\n self.text = text\n self.hint = hint\n self.solution = solution\n self.letter = letter\n self.position = position\n\n def interact(self):\n\n print(self.text)\n print(\"Type 'hint' if you would like to get a hint.\")\n t = input()\n c = True\n while c:\n if t == 'hint':\n print(self.hint)\n t = input()\n else:\n if self.solution in t.lower():\n if self.letter != 'e':\n print(\"You are correct! You get a letter added to your backpack\")\n return self.letter\n elif self.solution not in t.lower():\n print(\"Sorry, that's not the correct answer. Type 'keypad' if you want to try again.\")\n return None\n\nclass House:\n\n def __init__(self, rooms, doors, items, keys, riddles, location):\n\n self.rooms = rooms\n self.doors = doors\n self.items = items\n self.keys = keys\n self.riddles = riddles\n self.location = location\n\n\n def update_location(self, new_room):\n self.location = new_room\n print (\"You are now in the\", new_room)\n\n\n def room_info(self):\n\n li = []\n print(\"You are in the\", self.location)\n for door in self.doors:\n if door.rooms[0] == self.location:\n li.append(door.direct[0])\n elif door.rooms[1] == self.location:\n li.append(door.direct[1])\n print (\"There are doors towards\", \", \".join(door for door in li))\n li = []\n for item in self.items:\n if item.position == self.location:\n li.append(item.name)\n print (\"There are the following items:\", \", \".join(item for item in li))\n print(\"There is also a keypad on the wall. You can interact with it by typing 'keypad'.\")\n \nclass HouseReader:\n\n def __init__(self, config):\n\n self.config = config\n\n def build_house(self):\n \n rooms = []\n doors = []\n items = []\n keys = []\n riddles = []\n \n with open(self.config, 'r') as fi:\n\n lines = fi.read().splitlines()\n \n for line in lines:\n if line.startswith('room'):\n li = line.split()\n rooms.append(Room(li[1]))\n elif line.startswith('door'):\n line = line.strip('door ')\n li = line.split()\n li[0] = (li[0].split('-'))\n doors.append(Door(li[0], li[1], (li[2],li[3])))\n elif line.startswith('item'):\n line = line.strip('item ')\n li = line.split()\n items.append(li)\n elif line.startswith('key'):\n li = line.split()\n keys.append(Key(li[1],(li[2], li[3])))\n elif line.startswith('riddle'):\n li = line.split('*')\n riddles.append(Keypad(li[1], li[2], li[3], li[4], li[5]))\n elif line.startswith('start'):\n line = line.strip('start ')\n start = line\n \n for i in range(len(items)):\n if \"STATIONARY\" in items[i]:\n items[i] = Item(items[i][0], items[i][1], items[i][2], items[i][3])\n elif \"MOVE\" in items[i]:\n items[i] = Movable_item(items[i][0], items[i][1], items[i][2], items[i][3])\n elif \"USE\" in items[i]:\n items[i] = Usable_item(items[i][0], items[i][1], items[i][2], items[i][3], items[i][4])\n\n \n this_house = House(rooms, doors, items, keys, riddles, start)\n return this_house\n\nclass Game_Engine:\n\n def build_commands(self):\n\n comm_dict = {'open': (\"open_door\",\"Door\"),\n 'show': (\"room_info\",\"House\"),\n 'search': (\"check_ifkey\",\"Key\"),\n 'pick_up': (\"\", \"Item\"),\n 'close' : (\"close_door\", \"Door\"),\n 'go': (\"update_location\", \"House\"),\n 'release': (\"update_position\", \"Release\"),\n 'inventory': (\"check_contents\", \"Backpack\"),\n 'eat': (\"actions\", \"Item\"),\n 'read': (\"actions\", \"Item\"),\n 'wear': (\"actions\", \"Item\"),\n 'keypad':(\"interact\", \"Keypad\"),\n 'unlock': (\"unlock\", \"Door\")}\n return comm_dict\n\n def play(self):\n \n reader = HouseReader(sys.argv[1])\n myHouse = reader.build_house()\n game_end = False\n d = self.build_commands()\n\n inventory = Backpack()\n holding = []\n print(\"Welcome to the House Game! You've just woken up and you don't know how you got here. You are in the master bedroom of an unfamiliar house. Type 'commands' to see what you can do, or type 'show' for information on the room you are in. You can also type 'inventory' if you want to see the items you have found; they are going to be in a backpack that you found and decided to take with you. If you pick up an object you will be holding it until you decide to release it. If you want to see which items you are currently holding, type 'holding'. You can type 'quit' to exit the game at any time. Good luck!\")\n text = ''\n\n while text != \"quit\" and game_end == False:\n\n text = input()\n \n if text == 'quit':\n print (\"Thank you for playing! Goodbye!\")\n sys.exit(0)\n \n li = text.split()\n try:\n command = li[0]\n except:\n pass\n if command not in d:\n if command == 'commands':\n print (\"Valid commands are composed of an action and possibly a thing. If you want to open a door, you should type 'open' and then the direction that door leads to, for example 'open E' to open the door towards the east. If you want to pick up an item, you should type 'pick_up' followed by the item you want to pick up, for example 'pick_up lamp'. You can use the following commands:\", ', '.join(k for k in d))\n elif command == 'holding':\n a = []\n for it in holding:\n if isinstance(it, Movable_item):\n a.append(it.name)\n if a:\n print(\"You are holding the following items:\", *a)\n else:\n print(\"You are not holding any items\")\n else:\n print(\"Please type a valid command\")\n else:\n tup = d[command]\n if tup[1] == \"Door\":\n self.door_commands(tup[0],li[1], inventory, myHouse)\n elif tup[1] == \"Item\":\n r = self.item_commands(tup[0],li[1], myHouse)\n holding.append(r)\n elif tup[1] == \"House\":\n try:\n self.house_commands(tup[0], myHouse, holding, li[1])\n except:\n self.house_commands(tup[0], myHouse, holding)\n elif tup[1] == \"Release\":\n holding = self.release(myHouse, holding, li[1])\n elif tup[1] == \"Keypad\":\n letter = self.keypad_commands(myHouse)\n if letter =='e':\n game_end = True\n if letter and letter not in inventory.contents:\n inventory.update_backpack(letter)\n elif tup[1] == \"Backpack\":\n content = inventory.check_contents()\n print(\"You have the following items in your backpack:\", *content)\n elif tup[1] == \"Key\":\n f = self.key_commands(tup[0],li[1], myHouse, inventory)\n if f != None:\n for k in myHouse.keys:\n if k.name == f:\n k_object = k\n inventory.update_backpack(k_object)\n print(\"Congratulations! You made it out on the balcony. The fire escape is to your right and you are free to go. Thank you for playing!\")\n\n def door_commands(self, function, variable, backpack, house):\n\n if variable not in \"WSNE\":\n print (\"You can only open doors. Doors are named E, W, S, N.\")\n else:\n found = False\n for door in house.doors:\n if (door.direct[0] == variable and door.rooms[0] == house.location) \\\n or (door.direct[1] == variable and door.rooms[1] == house.location):\n var = door\n method_to_use = getattr(var, function)\n new_door = method_to_use(backpack)\n for door in house.doors:\n if door.rooms == new_door.rooms:\n door = new_door\n found = True\n \n if found == False:\n print(\"There is no door in that direction\")\n\n def item_commands(self, function, item, house):\n\n var = None\n for it in house.items:\n if item == it.name:\n if it.position == house.location:\n var = it\n if function == \"\":\n print (\"You picked up the\", item)\n return var\n else:\n method_to_use = getattr(var, function)\n method_to_use(house)\n return []\n else:\n var = 1\n print (\"There is no such thing in this room\")\n return []\n if var == None:\n print(\"There is no such thing in this room\")\n return []\n\n def house_commands(self, function, house, holding, variable = None):\n \n if function == \"update_location\":\n is_door = False\n for door in house.doors:\n if (door.direct[0] == variable and door.rooms[0] == house.location) \\\n or (door.direct[1] == variable and door.rooms[1] == house.location):\n is_door = True\n if door.status == 'open':\n tup = door.rooms\n for r in tup:\n if r != house.location:\n room = r\n house.update_location(room)\n break\n else:\n if door.status == \"sealed\":\n print (\"This is the front door. It is sealed and can never be opened. You need to find another way out.\")\n else:\n print (\"This door is\", door.status)\n if is_door == False:\n print (\"There is no door in this direction\")\n else:\n var = house\n method_to_use = getattr(var, function)\n method_to_use()\n\n def release(self, house, holding, variable):\n\n found = False\n for it in house.items:\n if variable == it.name:\n found = True\n if it in holding:\n it.position = house.location\n print(\"You released the\", variable)\n holding.remove(it)\n else:\n print(\"You are not holding this item\")\n if found == False:\n print(\"You are not holding this item\")\n return holding\n\n def keypad_commands(self, house):\n\n for k in house.riddles:\n if house.location == k.position:\n return k.interact()\n\n\n def key_commands(self, function, variable, house, backpack): \n\n var = None\n for it in house.items:\n if variable == it.name:\n var = it\n method_to_use = getattr(var, function)\n return method_to_use(backpack)\n if not var:\n print (\"There is no such item in the room\")\n return None\n \n \ndef __main__():\n\n game = Game_Engine()\n game.play()\n\n__main__()\n\n\"\"\" Things to fix: if you apply a function to the wrong object, keypads, end of game \"\"\"\n" } ]
1
jimmyeatsquaids/Pun-Intended
https://github.com/jimmyeatsquaids/Pun-Intended
53cfd78f9eb0826bb40485a3b28e46e9c72503de
ce3e2c27c1c8d5683db1fc4428447ae41eacd717
308c157e31f0c6f2779a3cefa07b26d5d0e6f371
refs/heads/master
2021-01-23T11:32:58.038851
2017-06-01T17:54:26
2017-06-01T17:54:26
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5639651417732239, "alphanum_fraction": 0.5753516554832458, "avg_line_length": 35.43902587890625, "blob_id": "81b19c39f1d3c84b4a0b023292482e2e4555bb1c", "content_id": "6a967a51f94f056b25e5cf1a7ae4f1c048da22b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1493, "license_type": "no_license", "max_line_length": 176, "num_lines": 41, "path": "/main.py", "repo_name": "jimmyeatsquaids/Pun-Intended", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n\nimport argparse\nfrom SearchEngine import SearchEngine\n\nif __name__ == \"__main__\":\n\n parser = argparse.ArgumentParser(\n description='Helps you build puns and lyrics')\n parser.add_argument('--vecs', type=str, default=\"glove.6B.100d.txt\", help=\"Path to Glove/word2vec file (default: %(default)s)\")\n parser.add_argument('--rhyme', action=\"store_true\", help=\"Restrict phonological matches to rhymes\")\n parser.add_argument('--ortho', action=\"store_true\", help=\"Use orthographic matches instead of phonological ones\")\n cmd_args = parser.parse_args()\n\n print(\"Hello and welcome to the pun aid!\")\n se = SearchEngine(1000, cmd_args.vecs, combine='s')\n\n while True:\n\n query = input(\"Start search: \\\"Sounds like x\\\" \\\"Has to do with y\\\" -OR- Change combination method: -s (Summation) or -p (Multiplication) or -i (List intersection): \")\n query = query.split()\n\n if not query:\n break\n\n elif len(query) > 2:\n print(\"Sorry, you need to provide two arguments!\")\n continue\n elif len(query) == 1:\n if query[0] == '-s':\n se.combine = 's'\n elif query[0] == '-p':\n se.combine = 'p'\n elif query[0] == '-i':\n se.combine = ''\n else:\n print('To change combination method please use: -i -p -s ')\n continue\n else:\n\n print(se.execute_query(query[0], query[1]))" }, { "alpha_fraction": 0.5656642317771912, "alphanum_fraction": 0.5763227939605713, "avg_line_length": 38.780303955078125, "blob_id": "12004bc7d98a04507cd921803c133ea6625fbc55", "content_id": "744e604e5eaec32156eacde136be5d1d6ee2fce3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5254, "license_type": "no_license", "max_line_length": 149, "num_lines": 132, "path": "/SearchEngine.py", "repo_name": "jimmyeatsquaids/Pun-Intended", "src_encoding": "UTF-8", "text": "import gensim.scripts.glove2word2vec\nfrom levenshtein import levenshtein\nfrom gensim.models.keyedvectors import KeyedVectors as kv\nimport sys\nimport os\n\n\nclass SearchEngine():\n \"\"\"This is the class that will handle all of the operations and queries and such\"\"\"\n\n def __init__(self, d_of_comparisons, vectorfile, n_of_results=5, combine='s'):\n\n self.d_of_comparisons = d_of_comparisons # max dimensionality of the two lists\n self.n_of_results = n_of_results # how many resutlts the search engine is supposed to output\n self.combine = combine # later to be implemented as choice between combination operations\n\n #If vector file in gloVe format, tranfsorm it into word2vec and provides option to store it as binary\n\n create_bin = 'n'\n\n try:\n if sys.argv[1][-4:] == \".bin\":\n binary = True\n else:\n for filename in os.listdir('data'):\n if filename == 'word2vec.' + vectorfile[:-4] + '.bin':\n print(\"Found binary file with the same name as the specified one. Will be loading the binary.\")\n vectorfile = filename\n binary = True\n break\n binary = False\n\n self.word_vectors = kv.load_word2vec_format(os.path.join('data', vectorfile), binary=binary) # retrieve word vectors from file\n\n if not binary:\n create_bin = input(\"Would you like to create a binary file for your vector file, so that future loading times may be shortened? y/n\")\n\n except ValueError as v:\n print(v)\n print(\"Converting gloVe to word2vec format.-------------\")\n\n create_bin = input(\"Would you like to create a binary file for your vector file, so that future loading times may be shortened? y/n\")\n\n gensim.scripts.glove2word2vec.glove2word2vec(os.path.join('data', vectorfile), os.path.join('data', 'word2vec.' + vectorfile))\n self.word_vectors = kv.load_word2vec_format(os.path.join('data', 'word2vec.' + vectorfile))\n\n if create_bin == 'y':\n self.word_vectors.save_word2vec_format(os.path.join('data', 'word2vec.' + vectorfile[:-4] + '.bin'), binary=True)\n\n self.phondict = dict()\n\n # Fill the phonetic dictionary\n with open(os.path.join('data', 'cmudict-0.7b.utf8')) as phondict:\n for line in phondict:\n if line[:3] == \";;;\": #skip the first couple of lines\n continue\n\n line = line.split(maxsplit=1)\n #print(line[0], line[1])\n self.phondict[line[0].lower()] = line[1]\n\n def get_phon_list(self, word, max_dist):\n \"\"\"Returns a list of phonetically similar words to word with max levenshtein distance of max_dist\"\"\"\n try:\n phon_rep = self.phondict[word]\n\n except KeyError:\n print(\"Word not found in data bank!\")\n\n results = []\n for x in self.phondict:\n lvdist = levenshtein(self.phondict[x].split(), phon_rep.split())\n\n if lvdist <= max_dist:\n results.append((x, lvdist))\n\n results.sort(key=lambda x: x[1])\n return [x[0] for x in results[:self.d_of_comparisons]]\n\n def combines(self, ass_list, phonlist):\n\n \"\"\"Combines the associative list with the phonetic list. To be implemented: fn_combo which steers the \n combination operation\"\"\"\n\n # if len(ass_list) != len(phonlist): # The way this is implemented, this is not...\n\n # raise ValueError(\"listA must have the same length as listB!\") # ...required\n\n fn_combo = None\n if self.combine == 's':\n fn_combo = lambda m, n: m+n\n\n elif self.combine == 'p':\n fn_combo = lambda m, n: (m+1)*(n+1) # If either number is 0, the calculation is senseless\n\n if fn_combo: #If either summation or multiplication has been chosen as combination method\n\n resdict = dict()\n for x in range(len(ass_list)):\n if ass_list[x] in phonlist:\n resdict[ass_list[x]] = x\n else:\n resdict[ass_list[x]] = 100000000 + x\n for y in range(len(phonlist)):\n try:\n resdict[phonlist[y]] = fn_combo(resdict[phonlist[y]], y)\n except KeyError:\n resdict[phonlist[y]] = 100000000 + y\n\n else: #If nothing has been chosen -> default to intersection\n\n return [x for x in ass_list if x in phonlist]\n\n reslist = []\n\n for word in resdict:\n reslist.append((word, resdict[word]))\n\n reslist.sort(key=lambda x: x[1])\n\n print([x for x in ass_list if x in phonlist])\n\n return reslist[:self.n_of_results]\n # return [x[0] for x in reslist[:self.n_of_results]]\n\n def execute_query(self, soundslike, association):\n\n ass_list = [x[0] for x in\n self.word_vectors.most_similar(positive=[association], topn=self.d_of_comparisons)]\n phon_list = self.get_phon_list(soundslike, 2)\n\n return self.combines(ass_list, phon_list)\n\n\n\n" }, { "alpha_fraction": 0.7727272510528564, "alphanum_fraction": 0.7820512652397156, "avg_line_length": 44.157894134521484, "blob_id": "7e72351269c559ade965077bb6c8b5961c2dd5bf", "content_id": "1666f3bc27e7d8245ae1a551159dc6ca8a7b519a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 858, "license_type": "no_license", "max_line_length": 159, "num_lines": 19, "path": "/README.md", "repo_name": "jimmyeatsquaids/Pun-Intended", "src_encoding": "UTF-8", "text": "# Pun-Intended\nAn application designed to help users find puns.\n\nNotes on how to get the program to work:\n\n1.\nGet gensim running which requires some workarounds on windows as you'll be needing numpy+mkl and scipy which can be found\non http://www.lfd.uci.edu/~gohlke/pythonlibs/\n\n2.\nNow, what I did was download the gloVe 6B Vectors based on the Wikipedia2014 and Gigaword5 corpus. https://nlp.stanford.edu/projects/glove/\nAdd them to the data folder.\n\nWhen starting the program you may choose between the different vector models which are in text format out of the box.\nAs the loading of binary files is a lot faster, the program will prefer them over .txt files in case of there being a file with the same name, but .bin ending.\nYou will be offered to create a binary file corresponding to the .txt file you put in, unless there already is one.\n\n\neazteregg\n" } ]
3
hacertilbec/Assignment4
https://github.com/hacertilbec/Assignment4
870520e6986da9a8e114370538b90cad794c0732
1cca207e05aba729399b4c26742a4f7158e7a65d
291a7291c71e2bd5358445c6f7e32d4ca32811dd
refs/heads/master
2021-01-10T11:05:03.392573
2016-01-06T18:35:08
2016-01-06T18:35:08
49,015,081
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7284345030784607, "alphanum_fraction": 0.7550585865974426, "avg_line_length": 38.16666793823242, "blob_id": "e6f68fb1761e2b07eb35b49bf34f72c076dcec87", "content_id": "b61a79100e0616a6f2793ecca39f2d3bc1e09116", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 939, "license_type": "no_license", "max_line_length": 64, "num_lines": 24, "path": "/project/app/models.py", "repo_name": "hacertilbec/Assignment4", "src_encoding": "UTF-8", "text": "from __future__ import unicode_literals\n\nfrom django.db import models\n\nclass Teacher(models.Model):\n\tfirstName = models.CharField(max_length=30, blank = True)\n\tlastName = models.CharField(max_length=30, blank = True)\n\toffice_details = models.CharField(max_length=200, blank = True)\n\tphone_number = models.CharField(max_length=11)\n\temail = models.EmailField(max_length=70,blank=True)\n\nclass Student(models.Model):\n\tfirstName = models.CharField(max_length=30, blank = True)\n\tlastName = models.CharField(max_length=30, blank = True)\n\temail = models.EmailField(max_length=70,blank=True)\n\n\t\nclass Course(models.Model):\n\tname = models.CharField(max_length=70, blank = True)\n\tcode = models.CharField(max_length=20, blank = True)\n\tclassroom = models.CharField(max_length=20, blank = True)\n\ttimes = models.CharField(max_length=70, blank = True)\n\tteacher = models.ForeignKey(Teacher, null=True)\n\tstudents = models.ManyToManyField(Student, null=True)" }, { "alpha_fraction": 0.7172578573226929, "alphanum_fraction": 0.7172578573226929, "avg_line_length": 34.74390411376953, "blob_id": "5432f26a52a4bc52649c3879e0ae630a5be773ad", "content_id": "e7c629a9c4bcc1966c25a0ed36c8fa29793ef097", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2932, "license_type": "no_license", "max_line_length": 129, "num_lines": 82, "path": "/project/app/views.py", "repo_name": "hacertilbec/Assignment4", "src_encoding": "UTF-8", "text": "\nfrom django.shortcuts import render\nfrom django.http import HttpResponseRedirect, HttpResponse\nfrom django.shortcuts import render_to_response, RequestContext\nfrom .models import *\nfrom .forms import *\n\n\ndef teacher_form(request):\n\tif request.method == 'POST':\n\t\tform = teacherForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tfirstName = form.cleaned_data[\"firstName\"]\n\t\t\tlastName = form.cleaned_data[\"lastName\"]\n\t\t\toffice_details = form.cleaned_data[\"office_details\"]\n\t\t\tphone_number = form.cleaned_data[\"phone_number\"]\n\t\t\temail = form.cleaned_data[\"email\"]\n\t\t\ta = Teacher(firstName = firstName, lastName = lastName, office_details=office_details, phone_number=phone_number, email=email)\n\t\t\ta.save()\n\t\t\treturn HttpResponseRedirect(\"/allTeachers/\")\n\telse:\n\t\tform = teacherForm()\n\treturn render_to_response('Form.html', {'form':form}, RequestContext(request) )\n\ndef student_form(request):\n\tif request.method == 'POST':\n\t\tform = studentForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tfirstName = form.cleaned_data[\"firstName\"]\n\t\t\tlastName = form.cleaned_data[\"lastName\"]\n\t\t\temail = form.cleaned_data[\"email\"]\n\t\t\ta = Student(firstName = firstName, lastName = lastName, email=email)\n\t\t\ta.save()\n\t\t\treturn HttpResponseRedirect(\"/allStudents/\")\n\telse:\n\t\tform = studentForm()\n\treturn render_to_response('Form.html',{\"form\":form}, RequestContext(request))\n\ndef course_form(request):\n\tif request.method == 'POST':\n\t\tform = courseForm(request.POST)\n\t\tif form.is_valid():\n\t\t\tname = form.cleaned_data[\"name\"]\n\t\t\tcode = form.cleaned_data[\"code\"]\n\t\t\tclassroom = form.cleaned_data[\"classroom\"]\n\t\t\ttimes = form.cleaned_data[\"times\"]\n\t\t\ta = Course(name=name, code=code, classroom = classroom, times=times)\n\t\t\ta.save()\n\t\t\treturn HttpResponseRedirect(\"/allCourses/\")\n\telse:\n\t\tform = courseForm()\n\treturn render_to_response('Form.html',{\"form\":form}, RequestContext(request))\n\ndef allTeachers(request):\n\ta = Teacher.objects.all()\n\treturn render_to_response('successT.html', {'list': a}, RequestContext(request))\n\ndef allStudents(request):\n\ta = Student.objects.all()\n\treturn render_to_response('successS.html', {'list': a}, RequestContext(request))\n\ndef allCourses(request):\n\ta = Course.objects.all()\n\treturn render_to_response('successC.html', {'list': a}, RequestContext(request))\n\ndef enroll_students(request):\n\tif request.method == 'POST':\n\t\tform = EnrollStudents(request.POST)\n\t\tif form.is_valid():\n\t\t\tstudent = form.cleaned_data['student']\t\t\t\n\t\t\tcourse = form.cleaned_data['course']\n\t\t\tstd = Student.objects.get(firstName = student)\n\t\t\tc = Course.objects.get(name = course)\n\t\t\tc.students.add(std)\n\n\t\t\treturn HttpResponseRedirect(\"/course/\"+course)\n\telse:\n\t\tform = EnrollStudents()\n\treturn render_to_response('Form.html',{\"form\":form}, RequestContext(request))\n\ndef enrolled_students(request,course):\n\tstds = Course.objects.get(name=course).students.all()\n\treturn render_to_response('courses.html', {'list': stds, 'name': course}, RequestContext(request))\n" }, { "alpha_fraction": 0.7255734801292419, "alphanum_fraction": 0.7510620355606079, "avg_line_length": 30.756755828857422, "blob_id": "7e5f19afb740320d9ed1756a12b6c1d70689af27", "content_id": "62de194b440e97c3be4e88f9a4b0ad8b8b82b935", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1177, "license_type": "no_license", "max_line_length": 76, "num_lines": 37, "path": "/project/app/forms.py", "repo_name": "hacertilbec/Assignment4", "src_encoding": "UTF-8", "text": "from django import forms\nfrom .models import *\n\nclass ContactForm(forms.Form):\n\tsubject = forms.CharField(max_length = 30)\n\temail = forms.EmailField()\n\tmessage = forms.CharField(max_length =200)\n\n\tdef clean_message(self):\n\t\tmessage = self.cleaned_data['message']\n\n\nclass teacherForm(forms.Form):\n\tfirstName = forms.CharField(max_length=30)\n\tlastName = forms.CharField(max_length=30)\n\toffice_details = forms.CharField(max_length=200)\n\tphone_number = forms.CharField(max_length=11)\n\temail = forms.EmailField(max_length=70)\n\nclass studentForm(forms.Form):\n\tfirstName = forms.CharField(max_length=30)\n\tlastName = forms.CharField(max_length=30)\n\temail = forms.EmailField(max_length=70)\n\nclass courseForm(forms.Form):\n\tname = forms.CharField(max_length=70)\n\tcode = forms.CharField(max_length=20)\n\tclassroom = forms.CharField(max_length=20)\n\ttimes = forms.CharField(max_length=70)\n\n\nstudentss = [(std.firstName,std.firstName) for std in Student.objects.all()]\ncoursess = [(c.name,c.name) for c in Course.objects.all()]\n\nclass EnrollStudents(forms.Form):\n\tstudent = forms.ChoiceField(studentss, widget = forms.Select())\n\tcourse = forms.ChoiceField(coursess, widget = forms.Select())\n\n\n" } ]
3
ogparry1/COP4501Gandolf
https://github.com/ogparry1/COP4501Gandolf
ec41c4f3e5a60a984156f95e2ba6a9bef30daf49
6c6c1328689c88716bb881fd6da3edb16ae1e00a
5ba69e6d58fbb6639e92fbbdca386c688464fdde
refs/heads/master
2021-03-19T09:01:57.627640
2017-04-26T15:26:13
2017-04-26T15:26:13
89,286,414
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6553054451942444, "alphanum_fraction": 0.6617363095283508, "avg_line_length": 18.185184478759766, "blob_id": "a6045586353ffd227c734b4d57684d76b8ef1708", "content_id": "b986eca05e3098be16f47888ee10d36eac4e9bf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1555, "license_type": "no_license", "max_line_length": 118, "num_lines": 81, "path": "/Code/C++/Classifier.cpp", "repo_name": "ogparry1/COP4501Gandolf", "src_encoding": "UTF-8", "text": "#include <iostream>\n#include <fstream>\n#include <string>\n\nusing namespace std;\n\nclass LearningMachine\n{\n\tpublic:\n\t\tLearningMachine(string);\n\t\t~LearningMachine();\n\t\tvoid setClasses();\n\t\tvoid separateSets(string);\n\t\tvoid trans(int*, int, int);\n\tprivate:\n\t\tstring* Labels;\n\t\tint *TrainingClasses, *TrainingData;\n\t\tint *TestingClasses, *TestingData;\n\t\tint *Y, *Weight;\n\t\tint Lambda;\n};\n\n// constructors/destructors\nLearningMachine::LearningMachine(string fileName)\n{\n\tLabels = labels;\n\tif (fileName.substring(fileName.length()-4, fileName.length()) != \".txt\") ifstream data((fileName + \".txt\").c_str());\n\telse ifstream data(fileName.c_str());\n\tstring labels = \"\";\n\tgetline(data, labels);\n\tTrainingData = data;\n}\n\nLearningMachine::~LearningMachine()\n{\n\tdelete Labels;\n\t\n\tfor (int ii = 0; int ii < Data.length; ii++) { delete Data[ii]; }\n\tdelete Data;\n\t\n\tfor (int ii = 0; int ii < Y.length; ii++) { delete Y[ii]; }\n\tdelete Y;\n\t\n\tfor (int ii = 0; int ii < Weight.length; ii++) { delete Weight[ii]; }\n\tdelete Weight;\n}\n\n// essential (in order of operation) functions\nvoid LearningMachine::setClasses()\n{\n\t\n}\n\nvoid LearningMachine::separateSets(string)\n{\n\t\n}\n\n// functions\nvoid LearningMachine::trans(int* matrix, rows, cols)\n{\n\tint transMat[rows*cols];\n\tfor (int ii = 0; ii < rows; ii++)\n\t{\n\t\tfor (int jj = ii*cols; jj < ii*cols+cols; jj++)\n\t\t{\n\t\t\t\n\t\t\tTmat[iijj] = matrix[jj][ii];\n\t\t}\n\t}\n\tmatrix = Tmat;\n}\n\nint main()\n{\n\tstring fileName = \"\";\n\tprintf(\"What is the name of the data file? >> \");\n\tcin >> fileName;\n\tLearningMachine Hal9000(fileName);\n\treturn 0;\n}\n\n" }, { "alpha_fraction": 0.7004127502441406, "alphanum_fraction": 0.7125787734985352, "avg_line_length": 43.1274528503418, "blob_id": "c0f6b5aa7633d843c975f200baa92d8e59bf3078", "content_id": "d51252c8314dcbac8bc21794a6c6c92839e143c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4603, "license_type": "no_license", "max_line_length": 149, "num_lines": 102, "path": "/Code/MachineLearning/Original.py", "repo_name": "ogparry1/COP4501Gandolf", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport numpy as np\r\nimport random\r\nfrom sklearn.metrics import confusion_matrix\r\nfrom sklearn.metrics import precision_score\r\n\r\ndef createTrainingTestSets(fileName, numberOfEntriesForTrainingSet,seed):\r\n np.random.seed(seed)\r\n dataSet = pd.read_csv(fileName)\r\n train_ix = np.random.choice(dataSet.index, numberOfEntriesForTrainingSet, replace=False)\r\n dataSet_training = dataSet.ix[train_ix]\r\n dataSet_test = dataSet.drop(train_ix)\r\n return dataSet_training,dataSet_test\r\n\r\ndef createTrainingValidationSet(dataSet,classIdentifiers):\r\n trainingValidationSet=pd.DataFrame(np.zeros((len(dataSet),len(classIdentifiers))))\r\n i=0\r\n for index, row in dataSet.iterrows():\r\n for j in range(0,len(classIdentifiers)):\r\n if(row['class']==classIdentifiers[j]):\r\n trainingValidationSet.xs(i)[j]=1\r\n i=i+1\r\n return trainingValidationSet\r\n\r\ndef createTestValidationSet(dataSet,classIdentifiers):\r\n testValidationSet = pd.DataFrame(np.zeros((len(dataSet), len(classIdentifiers))))\r\n i = 0\r\n for index, row in dataSet.iterrows():\r\n for j in range(0, len(classIdentifiers)):\r\n if (row['class'] == classIdentifiers[j]):\r\n testValidationSet.xs(i)[j] = 1\r\n i=i+1\r\n return testValidationSet\r\n\r\ndef train(trainingSet,trainingValidationSet,error):\r\n trainingSet=trainingSet.drop('class',1)\r\n weightMatrix=np.linalg.inv(np.transpose(trainingSet).dot(trainingSet)-error).dot(np.transpose(trainingSet)).dot(trainingValidationSet)\r\n return weightMatrix\r\n\r\ndef test(weightMatrix,testingSet,testingValidationSet):\r\n testingSet=testingSet.drop('class',1)\r\n predictionSet=testingSet.dot(weightMatrix)\r\n predictionOutputSet=np.zeros((1,len(testingSet)))\r\n trueOutputSet=np.zeros((1,len(testingSet)))\r\n i=0\r\n for index, row in predictionSet.iterrows():\r\n predictionOutputSet[0][i]=row.idxmax(1)\r\n i=i+1\r\n i=0\r\n for index, row in testingValidationSet.iterrows():\r\n j=0\r\n for columns in row:\r\n if(columns==1):\r\n trueOutputSet[0][i]=j\r\n j=j+1\r\n i=i+1\r\n return trueOutputSet,predictionOutputSet\r\ndef findError(trueOutputSet,predictedOutputSet):\r\n error=0\r\n for i in range(0,len(trueOutputSet[0])):\r\n if(trueOutputSet[0][i]!=predictedOutputSet[0][i]):\r\n error=error+1\r\n return error/len(trueOutputSet[0])\r\ndef confuseMatrix(true, predict):\r\n confused = confusion_matrix(true[0, :], predict[0, :])\r\n precision = precision_score(true[0, :], predict[0, :], average = None)\r\n return confused, precision\r\n\r\ndef findOptimumTrainingSet(fileName,classIdentifiers,numberOfEntriesForTrainingSet,numberOfIterations):\r\n minimumErrorTrainingSet=0\r\n minimumErrorTestSet=0\r\n minimumError=1\r\n confusionMatrix=0\r\n precision_score=0\r\n freeParameter=0\r\n for i in range(0,numberOfIterations):\r\n print('Iteration '+str(i)+' of '+str(numberOfIterations))\r\n trainingSet,testingSet=createTrainingTestSets(fileName,numberOfEntriesForTrainingSet,random.randint(0,numberOfIterations*numberOfIterations))\r\n trainingValidationSet=createTrainingValidationSet(trainingSet,classIdentifiers)\r\n testingValidationSet=createTestValidationSet(testingSet,classIdentifiers)\r\n for j in range(0,100,2):\r\n weightMatrix=train(trainingSet,trainingValidationSet,j/100)\r\n trueOutputSet, predictionOutputSet = test(weightMatrix, testingSet, testingValidationSet)\r\n currentError=findError(trueOutputSet,predictionOutputSet)\r\n if(currentError<minimumError):\r\n minimumError = currentError\r\n minimumErrorTrainingSet=trainingSet\r\n minimumErrorTestSet=testingSet\r\n freeParameter=i\r\n confusionMatrix,precision_score=confuseMatrix(trueOutputSet,predictionOutputSet)\r\n return minimumErrorTrainingSet,minimumErrorTestSet,freeParameter,confusionMatrix,precision_score\r\n\r\ndataText='IrisInfo'\r\nclassesToIdentify=['Iris-setosa','Iris-versicolor','Iris-virginica']\r\nsampleSize=20\r\nnumberOfIterations=50\r\ntrainingSet,testSet,freeParameter,confusionMatrix,precision_score=findOptimumTrainingSet(dataText,classesToIdentify,sampleSize,numberOfIterations)\r\ntrainingSet.to_csv(dataText+'_trainingSet',sep='\\t')\r\ntestSet.to_csv(dataText+'_testSet',sep='\\t')\r\nnp.savetxt(dataText+'_confusionMatrix',confusionMatrix,delimiter=',')\r\nnp.savetxt(dataText+'_precision_score',precision_score,delimiter=',')\r\nprint('FreeParameter: '+str(freeParameter/100))\r\n" }, { "alpha_fraction": 0.6888889074325562, "alphanum_fraction": 0.7010582089424133, "avg_line_length": 39.53845977783203, "blob_id": "dc6abb01b899d19c8c05df8739921cde6f8477df", "content_id": "532a3a410d2e4e3baa03263a6c7fe9302d4b4a1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3780, "license_type": "no_license", "max_line_length": 149, "num_lines": 91, "path": "/Code/MachineLearning/WineClassifier.py", "repo_name": "ogparry1/COP4501Gandolf", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport numpy as np\r\nimport random\r\nfrom sklearn.metrics import confusion_matrix\r\n\r\ndef createTrainingTestSets(fileName, numberOfEntriesForTrainingSet,seed):\r\n np.random.seed(seed)\r\n dataSet = pd.read_csv(fileName)\r\n train_ix = np.random.choice(dataSet.index, numberOfEntriesForTrainingSet, replace=False)\r\n dataSet_training = dataSet.ix[train_ix]\r\n dataSet_test = dataSet.drop(train_ix)\r\n return dataSet_training,dataSet_test\r\n\r\ndef createTrainingValidationSet(dataSet,classIdentifiers):\r\n trainingValidationSet=pd.DataFrame(np.zeros((len(dataSet),len(classIdentifiers))))\r\n i=0\r\n for index, row in dataSet.iterrows():\r\n for j in range(0,len(classIdentifiers)):\r\n if(row['class']==classIdentifiers[j]):\r\n trainingValidationSet.xs(i)[j]=1\r\n i=i+1\r\n return trainingValidationSet\r\n\r\ndef createTestValidationSet(dataSet,classIdentifiers):\r\n testValidationSet = pd.DataFrame(np.zeros((len(dataSet), len(classIdentifiers))))\r\n i = 0\r\n for index, row in dataSet.iterrows():\r\n for j in range(0, len(classIdentifiers)):\r\n if (row['class'] == classIdentifiers[j]):\r\n testValidationSet.xs(i)[j] = 1\r\n i=i+1\r\n return testValidationSet\r\n\r\ndef train(trainingSet,trainingValidationSet,error):\r\n trainingSet=trainingSet.drop('class',1)\r\n weightMatrix=np.linalg.inv(np.transpose(trainingSet).dot(trainingSet)-error).dot(np.transpose(trainingSet)).dot(trainingValidationSet)\r\n return weightMatrix\r\n\r\ndef test(weightMatrix,testingSet,testingValidationSet):\r\n testingSet=testingSet.drop('class',1)\r\n predictionSet=testingSet.dot(weightMatrix)\r\n predictionOutputSet=np.zeros((1,len(testingSet)))\r\n trueOutputSet=np.zeros((1,len(testingSet)))\r\n i=0\r\n for index, row in predictionSet.iterrows():\r\n predictionOutputSet[0][i]=row.idxmax(1)\r\n i=i+1\r\n i=0\r\n for index, row in testingValidationSet.iterrows():\r\n j=0\r\n for columns in row:\r\n if(columns==1):\r\n trueOutputSet[0][i]=j\r\n j=j+1\r\n i=i+1\r\n return trueOutputSet,predictionOutputSet\r\n\r\ndef findOptimumTrainingSet(fileName,classIdentifiers,numberOfEntriesForTrainingSet,numberOfIterations):\r\n minimumErrorTrainingSet=0\r\n minimumErrorTestSet=0\r\n minimumError=1\r\n for i in range(0,numberOfIterations):\r\n trainingSet,testingSet=createTrainingTestSets(fileName,numberOfEntriesForTrainingSet,random.randint(0,numberOfIterations*numberOfIterations))\r\n trainingValidationSet=createTrainingValidationSet(trainingSet,classIdentifiers)\r\n testingValidationSet=createTestValidationSet(testingSet,classIdentifiers)\r\n for i in range(0,100):\r\n weightMatrix=train(trainingSet,trainingValidationSet,i/100)\r\n trueOutputSet, predictionOutputSet = test(weightMatrix, testingSet, testingValidationSet)\r\n currentError=findError(trueOutputSet,predictionOutputSet)\r\n if(currentError<minimumError):\r\n minimumError = currentError\r\n minimumErrorTrainingSet=trainingSet\r\n minimumErrorTestSet=testingSet\r\n print(minimumError)\r\n return minimumErrorTrainingSet,minimumErrorTestSet\r\n\r\ndef findError(trueOutputSet,predictedOutputSet):\r\n error=0\r\n for i in range(0,len(trueOutputSet[0])):\r\n if(trueOutputSet[0][i]!=predictedOutputSet[0][i]):\r\n error=error+1\r\n return error/len(trueOutputSet[0])\r\n\r\ndef confusionMatrix(true, predicted):\r\n confused = confusion_matrix(true, predicted)\r\n print(\"Confusion Matrix:\\n\\n\")\r\n print(confused)\r\n return confused\r\n\r\n\r\nfindOptimumTrainingSet('./DataSets/Wine.txt',['Iris-setosa','Iris-versicolor','Iris-virginica'],15,500)\r\n" }, { "alpha_fraction": 0.8151658773422241, "alphanum_fraction": 0.8341231942176819, "avg_line_length": 69.33333587646484, "blob_id": "6444f53de36b59ea114ec5d29c314a7f211ed1eb", "content_id": "e8a07f388043f994597e19a31617fa1f352e8189", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 211, "license_type": "no_license", "max_line_length": 156, "num_lines": 3, "path": "/README.md", "repo_name": "ogparry1/COP4501Gandolf", "src_encoding": "UTF-8", "text": "COP4501Gandolf\nThis project is based on multiclass classification and linear regression. Four data sets will be split into training and testing and four equations will be \ncreated to predict the testing groups.\n" } ]
4
NANDANNANDAN/nan
https://github.com/NANDANNANDAN/nan
b662f229b61a69a79d3a1b1adf3094642110e9d4
8cd83095116ed3ac12d0301d3b5966c7a728b592
f08ab98655f4c4a529148bddd267b9ab9371ed04
refs/heads/master
2020-04-08T18:31:30.360763
2018-12-07T06:17:09
2018-12-07T06:17:09
159,611,639
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5080645084381104, "alphanum_fraction": 0.5080645084381104, "avg_line_length": 19.66666603088379, "blob_id": "f005b4389753c9748dbc3778db718ddd97fac848", "content_id": "89286f28e9432cf63b794591648c1b526a7d5488", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 124, "license_type": "no_license", "max_line_length": 43, "num_lines": 6, "path": "/nandann.py", "repo_name": "NANDANNANDAN/nan", "src_encoding": "UTF-8", "text": "d=('a','e','i','o','u','A','E','I','O','u')\ns=str(\"enter a alphabet\")\nif(s in d):\n\tprint(\"vowel\")\nelse:\n\tprint(\"constants\")\n" }, { "alpha_fraction": 0.6604651212692261, "alphanum_fraction": 0.6744186282157898, "avg_line_length": 10.315789222717285, "blob_id": "9ce0b662b7e5a778eee30ac6add202827db58990", "content_id": "7ce5c2e760c16d2b29bce7e689306b8e96955b6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 215, "license_type": "no_license", "max_line_length": 38, "num_lines": 19, "path": "/pos.java", "repo_name": "NANDANNANDAN/nan", "src_encoding": "UTF-8", "text": "class Number \n{\npublic static void main(String args[])\n{\nint number=3;\nif(number>0)\n{\nSystem.out.println(\"Positive\");\n}\nelse if(number<0)\n{\nSystem.out.println(\"Negative\");\n}\nelse\n{\nSystem.out.println(\"Zero\");\n}\n}\n}\n" } ]
2
mushenghe/Visual-Target-tracking
https://github.com/mushenghe/Visual-Target-tracking
7a362226f40b61ac0def00a07587e54db02929e9
a2a58c080341ebf7722ca3273d070b85feccd5d5
ee26da4708a5560d28efb46a247a5143037124f9
refs/heads/master
2020-12-05T05:38:02.025137
2020-01-06T04:19:42
2020-01-06T04:19:42
232,022,261
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.48257023096084595, "alphanum_fraction": 0.5179500579833984, "avg_line_length": 20.959999084472656, "blob_id": "b82cbeee419648dffcbaf30660c984869e27142f", "content_id": "68175bade33a78f67f4edc8bbfdc96d47d3a3104", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3844, "license_type": "no_license", "max_line_length": 82, "num_lines": 175, "path": "/Visual_Target_tracking.py", "repo_name": "mushenghe/Visual-Target-tracking", "src_encoding": "UTF-8", "text": "import numpy as np\nimport cv2\n\nglobal x_step\nglobal y_step\nglobal R\n\n\ndef image_name(n):\t\n\tbase_name = 'image_girl'\t\n\tif n<10:\n\t\tpre = \"000\"\n\telif n<100:\n\t\tpre = \"00\"\n\telse:\n\t\tpre = \"0\"\n\tname = base_name + '/'+ str(pre)+ str(n)+'.jpg'\n\treturn name\n\ndef SSD(T_x,T_y,gold_img,img,gray_img):\n\tcurrent_img = img \n\tgc_img = gray_img\n\tD = 0\n\tMIN_D = 10000000000000\n\tI_x = 0\n\tI_y = 0\n\tfor i in range(T_x - x_step, T_x + x_step+1): #I_x\n\t\tfor j in range(T_y - y_step, T_y + y_step+1): #I_y\n\t\t\t# print(i)\n\t\t\t# print(j)\n\t\t\tfor c_x in range(i-R,i+R+1):\n\t\t\t\tfor c_y in range(j-R,j+R+1):\n\t\t\t\t\t# print(c_x)\n\t\t\t\t\t# print(c_y)\n\t\t\t\t\tif (c_x - i)**2 + (c_y - j)**2 < R **2: #poinit (c_x,c_y) is in the circle\n\t\t\t\t\t\told_x = T_x + i - c_x \n\t\t\t\t\t\told_y = T_y + j - c_y\n\t\t\t\t\t\tD += (int(gc_img[c_x,c_y]) - int(gold_img[old_x,old_y]))**2\n\t\t\t\t\t\t# print(D)\n\t\t\t# print(MIN_D)\n\t\t\tif D < MIN_D:\n\t\t\t\tMIN_D = D\n\t\t\t\tD = 0\n\t\t\t\tI_x = i \n\t\t\t\tI_y = j\n\t\t\t\t# print('T_x is:')\n\t\t\t\t# print(T_x)\n\t\t\t\t# print('T_y is:')\n\t\t\t\t# print(T_y)\n\t\t\telse:\n\t\t\t\tpass\n\t\t\tD = 0\n\t\n\tT_x = I_x \n\tT_y = I_y\t\t\n\tgold_img = gc_img\n\t# print('T_x is:')\n\t# print(T_x)\n\t# print('T_y is:')\n\t# print(T_y)\n\tcv2.circle(current_img, (T_y, T_x), R, (0, 0, 255), 2)\n\tvideo.write(current_img)\n\ndef CC(T_x,T_y,gold_img,img,gray_img):\n\tcurrent_img = img \n\tgc_img = gray_img\n\tC = 0\n\tMAX_C = 0\n\tI_x = 0\n\tI_y = 0\n\tfor i in range(T_x - x_step, T_x + x_step+1): #I_x\n\t\tfor j in range(T_y - y_step, T_y + y_step+1): #I_y\n\t\t\tfor c_x in range(i-R,i+R+1):\n\t\t\t\tfor c_y in range(j-R,j+R+1):\n\t\t\t\t\tif (c_x - i)**2 + (c_y - j)**2 < R **2: #poinit (c_x,c_y) is in the circle\n\t\t\t\t\t\told_x = T_x + i - c_x \n\t\t\t\t\t\told_y = T_y + j - c_y\n\t\t\t\t\t\tC += int(gc_img[c_x,c_y]) * int(gold_img[old_x,old_y])\n\t\t\tif C > MAX_C:\n\t\t\t\tMAX_C = C\n\t\t\t\tC = 0\n\t\t\t\tI_x = i \n\t\t\t\tI_y = j\n\t\t\telse:\n\t\t\t\tpass\n\t\t\tC = 0\t\n\tT_x = I_x \n\tT_y = I_y\t\t\n\tgold_img = gc_img\n\n\tcv2.circle(current_img, (T_y, T_x), R, (0, 0, 255), 2)\n\tvideo.write(current_img)\n\ndef NCC(T_x,T_y,gold_img,img,gray_img):\n\tcurrent_img = img \n\tgc_img = gray_img\n\trows,columns = gray_img.shape\n\tNOP = rows * columns\n\tT = 0\n\tI = 0\n\tfor r in range(rows):\n\t\tfor c in range(columns):\n\t\t\tT += gold_img[r,c]\n\t\t\tI += gc_img[r,c] \n\tTa = T / NOP \n\tIa = I / NOP \n\tN = 0\n\tT_sum = 0\n\tI_sum = 0\n\tM_sum = 0\n\tMAX_N = 0\n\tI_x = 0\n\tI_y = 0\n\tfor i in range(T_x - x_step, T_x + x_step+1): #I_x\n\t\tfor j in range(T_y - y_step, T_y + y_step+1): #I_y\n\t\t\tfor c_x in range(i-R,i+R+1):\n\t\t\t\tfor c_y in range(j-R,j+R+1):\n\t\t\t\t\tif (c_x - i)**2 + (c_y - j)**2 < R **2: #poinit (c_x,c_y) is in the circle\n\t\t\t\t\t\told_x = T_x + i - c_x \n\t\t\t\t\t\told_y = T_y + j - c_y\n\t\t\t\t\t\tTs = gold_img[old_x,old_y] - Ta \n\t\t\t\t\t\t# print('Ts is: %s',Ts)\n\t\t\t\t\t\tIs = gc_img[c_x,c_y] - Ia \n\t\t\t\t\t\t# print('Is is: %s',Is)\n\t\t\t\t\t\tT_sum += Ts ** 2\n\t\t\t\t\t\tI_sum += Is ** 2 \n\t\t\t\t\t\tM_sum += Ts * Is \n\n\t\t\t# print('Tsum is: %s',T_sum)\t\n\t\t\t# print('Isum is: %s',I_sum)\t\t\n\t\t\tN = M_sum/np.sqrt(T_sum*I_sum)\n\t\t\t# print(M_sum/np.sqrt(T_sum*I_sum))\n\n\t\t\tif N > MAX_N:\n\t\t\t\tMAX_N = N\n\t\t\t\tT_sum = I_sum = M_sum = 0\n\t\t\t\tI_x = i \n\t\t\t\tI_y = j\n\t\t\telse:\n\t\t\t\tpass\n\t\t\tN = 0\n\t\n\tT_x = I_x \n\tT_y = I_y\t\t\n\tgold_img = gc_img\n\tcv2.circle(current_img, (T_y, T_x), R, (0, 0, 255), 2)\n\tvideo.write(current_img)\n\n\t\t\t\nN = 500\nT_x = 44\nT_y = 72\nR = 25\nx_step = 5\ny_step = 8\nvideo = cv2.VideoWriter('video.avi',cv2.VideoWriter_fourcc(*\"XVID\"),30,(128,96))\nname = image_name(1)\nold_img = cv2.imread(name) #initialize old_img\ngold_img = cv2.cvtColor(old_img, cv2.COLOR_BGR2GRAY) #initialize gold_img\ncv2.circle(old_img, (T_y, T_x), 25, (0, 0, 255), 2) # manuly draw the first circle\nvideo.write(old_img)\n\nfor i in range(2,N):\n\tname = image_name(i)\n\timg = cv2.imread(name)\n\tgray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\t# cv2.imshow('img', img)\n\t# SSD(T_x,T_y,gold_img,img,gray_img)\n\t# CC(T_x,T_y,gold_img,img,gray_img)\n\tNCC(T_x,T_y,gold_img,img,gray_img)\n\n\n\ncv2.waitKey(0)\ncv2.destroyAllWindows()\n\n" }, { "alpha_fraction": 0.762290358543396, "alphanum_fraction": 0.7698091268539429, "avg_line_length": 68.16000366210938, "blob_id": "2bb46b1f0d0d3f1408b29ec13cca10190ba6516f", "content_id": "2a5623812cb03fc7086b41d9a33771dbae35e736", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1731, "license_type": "no_license", "max_line_length": 191, "num_lines": 25, "path": "/README.md", "repo_name": "mushenghe/Visual-Target-tracking", "src_encoding": "UTF-8", "text": "# Visual Target tracking\n## Project Description\nThis project is one of the homework of EE333 Computer Vision course at Northwestern University. This project to implement head tracking based on three different template-matching methods.<br>\nI separated this task to two basic tasks:<br>\n1. Initialization<br>\nI first read the 0001image and manually draw a circle around the girl’s head. I found the center of the\ncircle by show the image several times and play with the parameters. Store this processed first image as\nthe old_image.\n2. Apply image matching method&search method:<br>\nI applied three matching methods and got 3 videos. The application for the three methods are very\nsimilar. I made a fixed search window for all the three methods, the center of circle for the next frame\nis the point that satisfied the method inside the search window.<br>\n 1) SSD: sum of squared difference<br>\n I wrote a SSD function that takes the old image, the next image and the center of circle of the old image\n , then return the center of circle of the next image. To find the center of circle of the next image, I\n searched all the points within the search window and compute the D value, the center I found has the\n minimum value of D.<br>\n 2) CC: cross-correlation<br>\n Similar to the implementation of SSD method, I wrote a CC function which finds the coordinate that\n holds that maximum C value.<br>\n 3) NCC: normalized cross-correlation<br>\n Similar to the implementation of methods above, the NCC function finds the coordinate that maximize\n the N value.<br>\n All methods above also draw circle on the colored image and write that image to the video. The image\n then become the old image.<br>\n" } ]
2
FabriceCh/advent-of-code
https://github.com/FabriceCh/advent-of-code
1acaf880bc30205b1e0931344c72ff20b5a06b6e
920f1023d0c654d70e9a42c44dcfa7d0c39bd156
6e10e5887950a40bf8dcf09475c4fea3355d0a7f
refs/heads/master
2023-08-08T10:30:09.261645
2023-07-27T21:09:36
2023-07-27T21:09:36
226,786,276
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.43668457865715027, "alphanum_fraction": 0.465080589056015, "avg_line_length": 27.955554962158203, "blob_id": "7535c8feaf957411b98e3736dab7fddd3867651f", "content_id": "118232f433da07c6435ede0f3c4d05fb84d4029c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3909, "license_type": "no_license", "max_line_length": 118, "num_lines": 135, "path": "/2022/21-operations-tree.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nlines2 = [\n \"root: pppw + sjmn\",\n \"pppw: cczh / lfqf\",\n \"cczh: sllz + lgvd\",\n \"sjmn: drzm * dbpl\",\n \"ptdq: humn - dvpt\",\n \"drzm: hmdt - zczc\",\n \"lgvd: ljgn * ptdq\",\n \"dbpl: 5\",\n \"zczc: 2\",\n \"dvpt: 3\",\n \"lfqf: 4\",\n \"humn: 5\",\n \"ljgn: 2\",\n \"sllz: 4\",\n \"hmdt: 32\",\n]\n\ndef in_to_dict(lines):\n nodes = {}\n for l in lines:\n key, op = l.split(\":\")\n op = op.split(\" \")[1:]\n nodes[key] = op\n return nodes\n\ndef get_val_from_node(name, nodes):\n if name == \"humn\":\n return []\n if len(nodes[name]) == 3:\n n1, n2 = nodes[name][0], nodes[name][2]\n left, right = get_val_from_node(n1, nodes), get_val_from_node(n2, nodes)\n rev = False\n if isinstance(right, list):\n left, right = right, left\n rev = True\n if isinstance(left, list):\n #print(left, [(nodes[name][1], right)])\n op = (nodes[name][1], right)\n if rev:\n op = (f\"{nodes[name][1]}{nodes[name][1]}\", right)\n return left + [op]\n if nodes[name][1] == \"+\":\n return left + right\n if nodes[name][1] == \"-\":\n return left - right\n if nodes[name][1] == \"*\":\n return left * right\n if nodes[name][1] == \"/\":\n return int(left / right)\n elif len(nodes[name]) == 1:\n return int(nodes[name][0])\n\ndef get_val_from_node_with_ans(name, nodes, humn_val):\n if name == \"humn\":\n return humn_val\n if len(nodes[name]) == 3:\n n1, n2 = nodes[name][0], nodes[name][2]\n left, right = get_val_from_node_with_ans(n1, nodes, humn_val), get_val_from_node_with_ans(n2, nodes, humn_val)\n\n if nodes[name][1] == \"+\":\n return left + right\n if nodes[name][1] == \"-\":\n return left - right\n if nodes[name][1] == \"*\":\n return left * right\n if nodes[name][1] == \"/\":\n return int(left / right)\n elif len(nodes[name]) == 1:\n return int(nodes[name][0])\n\n\ndef part1(original = False, answer=5):\n\n \n nodes = in_to_dict(lines)\n root = \"root\"\n if original and answer != 5:\n answer = 585\n if answer == 5 or answer == 585:\n print(\"original part1 answer:\")\n print(get_val_from_node_with_ans(root, nodes, answer))\n else:\n n1, n2 = nodes[root][0], nodes[root][2]\n v1 = get_val_from_node_with_ans(n1, nodes, answer)\n v2 = get_val_from_node_with_ans(n2, nodes, answer)\n print(v1)\n print(v2)\n\n\n\ndef part2():\n nodes = in_to_dict(lines)\n root = \"root\"\n n1, n2 = nodes[root][0], nodes[root][2]\n v1 = get_val_from_node(n1, nodes)\n v2 = get_val_from_node(n2, nodes)\n if isinstance(v2, list):\n v1, v2 = v2, v1\n print(v1)\n print(v2)\n real_eq = f\"({v2})\"\n if isinstance(v1, list):\n ans = v2\n while v1:\n cur_op = v1.pop()\n if cur_op[0] == \"+\" or cur_op[0] == \"++\":\n ans -= cur_op[1]\n real_eq = f\"({real_eq}-{cur_op[1]})\"\n if cur_op[0] == \"-\":\n ans += cur_op[1]\n real_eq = f\"({real_eq}+{cur_op[1]})\"\n if cur_op[0] == \"*\" or cur_op[0] == \"**\":\n ans /= cur_op[1]\n real_eq = f\"({real_eq}/{cur_op[1]})\"\n if cur_op[0] == \"/\":\n ans *= cur_op[1]\n real_eq = f\"({real_eq}*{cur_op[1]})\"\n\n if cur_op[0] == \"--\":\n ans = cur_op[1] - ans\n if cur_op[0] == \"//\":\n ans = cur_op[1] / ans\n \n print(ans, cur_op)\n print(\"----ans---\")\n print(ans)\n print(\"---------\")\n part1(original = False, answer=ans)\n print(real_eq)\n#part1(original=True, answer = 585)\npart2()\n" }, { "alpha_fraction": 0.4630409777164459, "alphanum_fraction": 0.4864036738872528, "avg_line_length": 21.713043212890625, "blob_id": "af393dab779c5a5e5a3ab9b07d024251daffca66", "content_id": "4b8b7e9713ad5c9eb392ef715c6e9c626dbf918c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2611, "license_type": "no_license", "max_line_length": 83, "num_lines": 115, "path": "/2022/12-disjktra.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import heapq\nfrom aocd import get_data\nar = get_data(day=12, year=2022)\nar = ar.splitlines()\n\n#print(ar)\n\nar2 = [\n \"Sabqponm\",\n \"abcryxxl\",\n \"accszExk\",\n \"acctuvwj\",\n \"abdefghi\",\n]\n\n_map = []\nfor line in ar:\n _map.append([ord(a) for a in line])\n\nX_LEN = len(_map[0])\nY_LEN = len(_map)\n\ndef find_S(ar):\n for i, line in enumerate(ar):\n for j, el in enumerate(line):\n if el == \"S\":\n return [i, j]\n\ndef find_E(ar):\n for i, line in enumerate(ar):\n for j, el in enumerate(line):\n if el == \"E\":\n return [i, j]\n\ndef find_as(ar):\n aas = []\n for i, line in enumerate(ar):\n for j, el in enumerate(line):\n if el == \"a\":\n aas.append([i, j])\n return aas\n\ndef get_neighbors(x, y):\n neigs = []\n if x != 0:\n neigs.append((x - 1, y))\n if x != Y_LEN - 1:\n neigs.append((x +1, y))\n if y != 0:\n neigs.append((x, y - 1))\n if y != X_LEN - 1:\n neigs.append((x, y + 1))\n return neigs\n\nget_neighbors(4, 4)\n\ndef disjktra(start, end):\n \n start = tuple(start)\n seen = {start}\n target = tuple(end)\n dist = {start: 0}\n nodes_to_visit = [(_map[0][0], start)]\n heapq.heapify(nodes_to_visit)\n iter = 0\n while nodes_to_visit:\n\n current_node = heapq.heappop(nodes_to_visit)\n if current_node[1] == target:\n break\n \n neigs = get_neighbors(current_node[1][0], current_node[1][1])\n for n in neigs:\n \n \n if _map[current_node[1][0]][current_node[1][1]] == ord(\"S\"):\n cur_height = ord(\"a\")\n else:\n cur_height = _map[current_node[1][0]][current_node[1][1]]\n \n if _map[n[0]][n[1]] == ord(\"E\"):\n n_height = ord(\"z\")\n else:\n n_height = _map[n[0]][n[1]]\n\n\n is_not_too_high = cur_height >= n_height - 1\n if n not in seen and is_not_too_high:\n dist[n] = dist[current_node[1]] + 1\n seen.add(n)\n heapq.heappush(nodes_to_visit, (int(dist[current_node[1]]) + 1, n))\n iter += 1\n if target in dist.keys():\n print(dist[target])\n return dist[target]\n else:\n return 10000\n\ndef part1():\n start = find_S(ar)\n end = find_E(ar)\n print(start, end)\n print(disjktra(start, end))\n\ndef part2():\n starts = find_as(ar)\n end = find_E(ar)\n ans = 10000\n for s in starts:\n ans = min(ans, disjktra(s, end))\n print(ans)\npart1()\npart2()\n\n#print(get_neighbors(4, 4))" }, { "alpha_fraction": 0.5495403409004211, "alphanum_fraction": 0.5760980844497681, "avg_line_length": 24.128204345703125, "blob_id": "394d4d145bd2a96ab0521adffe9938abc11b7ae6", "content_id": "8903d8cff42a97f8ea9b6bac5195ba9124eece11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 979, "license_type": "no_license", "max_line_length": 57, "num_lines": 39, "path": "/2022/3.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\na = [\n 'vJrwpWtwJgWrhcsFMMfFFhFp',\n 'jqHRNqRjqzjGDLGLrsFMfFZSrLrFZsSL',\n 'PmmdzqPrVvPwwTWBwg',\n 'wMqvLMZHhHMvwLHjbvcjnnSBnvTQFn',\n 'ttgJtRGJQctTZtZT',\n 'CrZsJsPPZsGzwwsLwLmpwMDw',\n]\n\ndef letter_to_priority(letter):\n if letter.islower():\n return ord(letter) - 96\n else:\n return ord(letter) - 64 + 26\n\ndef part1(ar):\n priorities_sum = 0\n for line in ar:\n half_1 = line[0:len(line)//2]\n half_2 = line[len(line)//2:]\n for l in half_2:\n if l in half_1:\n priorities_sum += letter_to_priority(l)\n break\n print(priorities_sum)\n\ndef part2(ar):\n priorities_sum = 0\n for i in range (0, len(ar), 3):\n for l in ar[i]:\n if l in ar[i + 1] and l in ar[i + 2]:\n priorities_sum += letter_to_priority(l)\n break\n print(priorities_sum)\n\npart2(ar)" }, { "alpha_fraction": 0.42728903889656067, "alphanum_fraction": 0.5278276205062866, "avg_line_length": 19.66666603088379, "blob_id": "91da4024f76ba099f7347355c46db31fb5d3ecfa", "content_id": "403684541e83a38e590d6928e9d469276efb6fd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 557, "license_type": "no_license", "max_line_length": 61, "num_lines": 27, "path": "/2022/4.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nar2 = [\n \"2-4,6-8\",\n \"2-3,4-5\",\n \"5-7,7-9\",\n \"2-8,3-7\",\n \"6-6,4-6\",\n \"2-6,4-8\",\n\n]\ndef parse_line(line):\n pairs = line.split(\",\")\n rs1, rs2 = pairs[0].split(\"-\"), pairs[1].split(\"-\")\n return int(rs1[0]), int(rs1[1]), int(rs2[0]), int(rs2[1])\n\ncountr = 0\nfor l in ar:\n min1, max1, min2, max2 = parse_line(l)\n if max1 >= min2 and max1 <= max2:\n countr += 1\n elif max2 >= min1 and max2 <= max1:\n countr += 1\n\n\nprint(countr)" }, { "alpha_fraction": 0.4921985864639282, "alphanum_fraction": 0.5096926689147949, "avg_line_length": 27.58108139038086, "blob_id": "be1424240203a70fffcc311ba6d69a4c8659e19e", "content_id": "672278da936d5cee5d9579202fa96e1096720826", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4230, "license_type": "no_license", "max_line_length": 77, "num_lines": 148, "path": "/2022/24-dijkstra-changing-map.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from typing import List\nfrom utils import read_file\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\nlines3 = [\n \"#.######\",\n \"#>>.<^<#\",\n \"#.<..<<#\",\n \"#>v.><>#\",\n \"#<^v^^>#\",\n \"######.#\",\n\n]\n\nar = [list(l) for l in lines]\nfor l in lines:\n print(l)\nMAP_SIZE_VERT = len(ar)\nMAP_SIZE_HORI = len(ar[0])\n\n\nclass Storm:\n def __init__(self, pos, dir):\n self.pos = pos\n self.dir = dir\n def incr(self):\n if self.dir == \">\":\n self.pos = (self.pos[0], self.pos[1] + 1)\n elif self.dir == \"<\":\n self.pos = (self.pos[0], self.pos[1] - 1)\n elif self.dir == \"^\":\n self.pos = (self.pos[0] - 1, self.pos[1])\n elif self.dir == \"v\":\n self.pos = (self.pos[0] + 1, self.pos[1])\n\n if self.pos[0] == MAP_SIZE_VERT - 1:\n self.pos = ((self.pos[0] % (MAP_SIZE_VERT - 1)) + 1, self.pos[1])\n elif self.pos[1] == MAP_SIZE_HORI - 1:\n self.pos = (self.pos[0], (self.pos[1] % (MAP_SIZE_HORI - 1)) + 1)\n elif self.pos[0] == 0:\n self.pos = (MAP_SIZE_VERT - 2, self.pos[1])\n elif self.pos[1] == 0:\n self.pos = (self.pos[0], MAP_SIZE_HORI - 2)\n \n\ndef get_storms(ar) -> List[Storm]:\n storms: List[Storm] = []\n for i, l in enumerate(ar):\n for j, el in enumerate(l):\n if el in [\">\", \"<\", \"v\", \"^\"]:\n storms.append(Storm((i, j), el))\n return storms\n\ndef get_adjs(cur_pos, storms, ar):\n storms_pos = [s.pos for s in storms]\n adjs = [cur_pos]\n if cur_pos[0] - 1 >= 0:\n adjs.append((cur_pos[0] - 1, cur_pos[1]))\n if cur_pos[0] + 1 < MAP_SIZE_VERT:\n adjs.append((cur_pos[0] + 1, cur_pos[1]))\n adjs.append((cur_pos[0], cur_pos[1] + 1))\n adjs.append((cur_pos[0], cur_pos[1] - 1))\n\n legit_pos = []\n for pp in adjs:\n if pp not in storms_pos and ar[pp[0]][pp[1]] != \"#\":\n legit_pos.append(pp)\n\n return legit_pos\n\ndef part1():\n storms = get_storms(ar)\n start = (0,1)\n end = (MAP_SIZE_VERT - 1, MAP_SIZE_HORI - 2)\n current_possible_pos = {start}\n counter = 0\n while end not in current_possible_pos:\n # incr turn counter\n counter += 1\n # storms all move once\n for s in storms:\n s.incr()\n # gather every new possible positions\n next_pos = set()\n for cur_pos in current_possible_pos:\n adjs = get_adjs(cur_pos, storms, ar)\n for a in adjs:\n next_pos.add(a)\n current_possible_pos = next_pos\n print(counter)\n \n\ndef part2():\n storms = get_storms(ar)\n start = (0,1)\n end = (MAP_SIZE_VERT - 1, MAP_SIZE_HORI - 2)\n # go to end\n current_possible_pos = {start}\n counter = 0\n while end not in current_possible_pos:\n # incr turn counter\n counter += 1\n # storms all move once\n for s in storms:\n s.incr()\n # gather every new possible positions\n next_pos = set()\n for cur_pos in current_possible_pos:\n adjs = get_adjs(cur_pos, storms, ar)\n for a in adjs:\n next_pos.add(a)\n current_possible_pos = next_pos\n #print(counter)\n # go back to start\n current_possible_pos = {end}\n while start not in current_possible_pos:\n # incr turn counter\n counter += 1\n # storms all move once\n for s in storms:\n s.incr()\n # gather every new possible positions\n next_pos = set()\n for cur_pos in current_possible_pos:\n adjs = get_adjs(cur_pos, storms, ar)\n for a in adjs:\n next_pos.add(a)\n current_possible_pos = next_pos\n\n # go back to end\n current_possible_pos = {start}\n while end not in current_possible_pos:\n # incr turn counter\n counter += 1\n # storms all move once\n for s in storms:\n s.incr()\n # gather every new possible positions\n next_pos = set()\n for cur_pos in current_possible_pos:\n adjs = get_adjs(cur_pos, storms, ar)\n for a in adjs:\n next_pos.add(a)\n current_possible_pos = next_pos\n print(counter)\n\npart2()\n" }, { "alpha_fraction": 0.4891122281551361, "alphanum_fraction": 0.5217755436897278, "avg_line_length": 21.961538314819336, "blob_id": "3529626aeb7c203bec5dea9c9bcf890f751a424d", "content_id": "aa38fac1a5d77c9c8ed746b756072caa56ecca48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1194, "license_type": "no_license", "max_line_length": 59, "num_lines": 52, "path": "/2022/7-directories.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\ncurrent_in = []\nsizes = {\"/\": 0}\nseen_file = {}\n\ndef add_to_dict(dic, key, value):\n if key in dic.keys():\n dic[key] += value\n else:\n dic[key] = value\n\nfor l in ar:\n args = l.split(\" \")\n pwd = \"\".join(current_in)\n if args[1] == \"cd\" and args[2] != \"..\":\n current_in.append(args[2])\n if args[1] == \"cd\" and args[2] == \"..\":\n current_in.pop()\n if args[0] != \"$\" and args[0] != \"dir\":\n f = args[1]\n \n if pwd in seen_file.keys() and f in seen_file[pwd]:\n continue\n else:\n add_to_dict(seen_file, pwd, [f])\n ppwd = \"\"\n for dir in current_in:\n ppwd += dir\n add_to_dict(sizes, ppwd, int(args[0]))\n\n# part 1\nto = 0\nfor d in sizes.values():\n if d <= 100000:\n to += d\nprint(to)\n\n# part 2\nmin_for_prog = 30000000\ntotal_space = sizes[\"/\"]\ntootal = 70000000\nremaining = tootal - total_space\nto_be_deleted = min_for_prog - remaining\nall_sizes = list(sizes.values())\nall_sizes.sort()\nfor s in all_sizes:\n if s > to_be_deleted:\n print(s)\n break\n" }, { "alpha_fraction": 0.46358221769332886, "alphanum_fraction": 0.5029775500297546, "avg_line_length": 22.989011764526367, "blob_id": "6c28cc279bd9679c3ec27614b7925972ac8c4632", "content_id": "e2cca481b6311c653e6281d1f8c180107d20d016", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2183, "license_type": "no_license", "max_line_length": 70, "num_lines": 91, "path": "/2022/9-moving-head-tail.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from aocd import get_data\nar = get_data(day=9, year=2022)\nar = ar.splitlines()\n\ndef update_head(head, dir):\n if dir == \"U\":\n head[1] += 1\n elif dir == \"D\":\n head[1] -= 1\n elif dir == \"R\":\n head[0] += 1\n elif dir == \"L\":\n head[0] -= 1\n\ndef is_tail_next_to_head(tail, head):\n return (abs(tail[0] - head[0]) < 2 and abs(tail[1] - head[1]) < 2)\n\ndef is_tail_same_row_or_column(tail, head):\n return (tail[0] == head[0] or tail[1] == head[1])\n\ndef update_tail_straight(tail, head):\n if head[0] == tail[0]:\n if head[1] > tail[1]:\n tail[1] += 1\n elif head[1] < tail[1]:\n tail[1] -= 1\n return\n if head[1] == tail[1]:\n if head[0] > tail[0]:\n tail[0] += 1\n elif head[0] < tail[0]:\n tail[0] -= 1\n\ndef update_tail_diag(tail, head):\n if head[0] > tail[0]:\n tail[0] += 1\n elif head[0] < tail[0]:\n tail[0] -= 1\n if head[1] > tail[1]:\n tail[1] += 1\n elif head[1] < tail[1]:\n tail[1] -= 1\n\ndef update_tail(tail, head):\n if is_tail_next_to_head(tail, head):\n return\n if is_tail_same_row_or_column(tail, head):\n update_tail_straight(tail, head)\n else:\n update_tail_diag(tail, head)\n \n\ndef exec_step(line, tail, head):\n dir = line[0]\n update_head(head, dir)\n update_tail(tail, head)\n\ndef exec_step2(line, tails, head):\n dir = line[0]\n update_head(head, dir)\n for i in range(9):\n if i == 0:\n update_tail(tails[i], head)\n else:\n update_tail(tails[i], tails[i-1])\n\ndef part1(ar):\n visited = {(0,0)}\n head = [0,0]\n tail = [0,0]\n for line in ar:\n n_steps = line.split(\" \")[1]\n for _ in range(int(n_steps)):\n exec_step(line, tail, head)\n visited.add(tuple(tail))\n print(len(visited))\n\n\ndef part2(ar):\n visited = {(0,0)}\n head = [0,0]\n tails = [[0,0] for i in range(9)]\n for line in ar:\n n_steps = line.split(\" \")[1]\n for _ in range(int(n_steps)):\n exec_step2(line, tails, head)\n visited.add(tuple(tails[8]))\n print(len(visited))\n\npart1(ar)\npart2(ar)\n" }, { "alpha_fraction": 0.5187393426895142, "alphanum_fraction": 0.5323679447174072, "avg_line_length": 30.30666732788086, "blob_id": "40c6eadaddd7becc046d0855a17c398ed60ca352", "content_id": "fe6db92201e3bfba4fe13b110bb591ed2ad585d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2348, "license_type": "no_license", "max_line_length": 90, "num_lines": 75, "path": "/old/2021/day19-2021.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from typing import List\nimport math\n\ndef read_file(path):\n arr = []\n with open(path, \"r\") as file:\n for line in file:\n arr.append(line.rstrip())\n return arr\n\nar = read_file(\"test19\")\n#ar = read_file(\"input19\")\n\n\nclass Scanner:\n def __init__(self, name, raw_beacons):\n self.name = name\n self.positions = self._raw_to_positions(raw_beacons)\n self.distances = self._get_rel_distances(self.positions)\n\n def _raw_to_positions(self, raw_beacons):\n positions = []\n for line in raw_beacons:\n if line == \"\":\n continue\n positions.append([int(a) for a in line.split(\",\")])\n return positions\n\n def _get_rel_distances(self, positions):\n distances = {i: [] for i in range(len(positions))}\n for i, pos in enumerate(positions):\n for j, pos2 in enumerate(positions):\n if j != i:\n dist = (pos[0]-pos2[0])**2 + (pos[1]-pos2[1])**2 + (pos[2]-pos2[2])**2\n distances[i].append(dist)\n distances[i].sort()\n return distances\n\n\nscanners_delmiters = []\nfor i, line in enumerate(ar):\n if \"scanner\" in line:\n scanners_delmiters.append(i)\nscanners: List[Scanner] = []\nfor j, scanner_i in enumerate(scanners_delmiters):\n if j < len(scanners_delmiters) - 1:\n scanners.append(Scanner(ar[scanner_i], ar[scanner_i+1:scanners_delmiters[j+1]]))\n else:\n scanners.append(Scanner(ar[scanner_i], ar[scanner_i+1:]))\n\nall_distances = [d for s in scanners for d in s.distances]\n\nduplicates = []\nseen = {}\nfor i, s in enumerate(scanners):\n for s2 in scanners[i:]:\n if s.name != s2.name:\n print(\"comparing\", s.name, \"with\", s2.name)\n scanner_dups = []\n\n for dist in s.distances.values():\n for dist2 in s2.distances.values():\n n_sim_dist = 0\n for d in dist:\n if d in dist2:\n n_sim_dist += 1\n if n_sim_dist >= 1:\n scanner_dups.append(tuple(dist))\n continue\n \n print(len(scanner_dups))\n if len(scanner_dups) >= 12:\n duplicates += scanner_dups\n\nprint(len(all_distances) - len(set(duplicates)))\n" }, { "alpha_fraction": 0.5204081535339355, "alphanum_fraction": 0.5387755036354065, "avg_line_length": 16.5, "blob_id": "fa9ff889770551102ba865de9a118950ae5a6263", "content_id": "bfaa3e55170859d000d6007f5b5802a888b78d49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 490, "license_type": "no_license", "max_line_length": 57, "num_lines": 28, "path": "/2022/1.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\n#b = [ for a in ar]\n\nctr = 0\nelves = {}\n\nfor i, a in enumerate(ar):\n if a == \"\":\n ctr += 1\n else:\n if ctr in elves.keys():\n elves[ctr] += int(a)\n else:\n elves[ctr] = int(a)\n\nmax = 0\nfor bb in elves.values():\n if bb > max:\n max = bb\n\nlister = list(elves.values())\nlister = sorted(lister)\nprint(lister[-3:])\nprint(sum(lister[-3:]))\n#print(max)\n" }, { "alpha_fraction": 0.4424073398113251, "alphanum_fraction": 0.5013949871063232, "avg_line_length": 21.410715103149414, "blob_id": "cf50326900572a1ea69306421244390fc75e4566", "content_id": "6dc2ffa9a8a899e00af61d4760c6722411d35f71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2509, "license_type": "no_license", "max_line_length": 64, "num_lines": 112, "path": "/2022/14-falling-rocks.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import numpy as np\nnp.set_printoptions(threshold=np.inf)\nnp.set_printoptions(linewidth=np.inf)\nlines = []\n\nfile = open(\"/home/rprcz974/workspace/AOC/2022/input\", \"r\")\nfor l in file.readlines():\n lines.append(l.rstrip())\n\nlines2 = [\n \"498,4 -> 498,6 -> 496,6\",\n \"503,4 -> 502,4 -> 502,9 -> 494,9\",\n]\n\nrock_lines = []\nall_rocks = []\n\nfor line in lines:\n positions = line.split(\" -> \")\n positions = [pos.split(\",\") for pos in positions]\n positions = [[int(p[0]), int(p[1])] for p in positions]\n rock_lines.append(positions)\n all_rocks += positions\n\nmax_x, max_y = 0, 0\nmin_x, min_y = 1110, 1110\nfor p in all_rocks:\n if p[0] > max_x:\n max_x = p[0]\n\n if p[1] > max_y:\n max_y = p[1]\n \n if p[0] < min_x:\n min_x = p[0]\n \n if p[1] < min_y:\n min_y = p[1]\n\n#print(max_x, max_y)\n#print(min_x, min_y)\nmin_x -= 1\n\n# consts\nheight = max_y + 2\nlargeur = 2 * height - 1\nmiddle = height - 1\noffset = 500 - middle\n\n# reduce x to center\nfor l in rock_lines:\n for p in l:\n p[0] -= (offset)\n#print(rock_lines)\n\n# create grid\n\ngrid = [[\".\" for i in range(largeur)] for i in range(height)]\ngrid = np.array(grid)\n# fill grid with rocks\nfor line in rock_lines:\n rok1 = line.pop(0)\n while line:\n rok2 = line.pop(0)\n #print(rok1, rok2)\n if rok1[0] == rok2[0]:\n s, e = min(rok1[1], rok2[1]), max(rok1[1], rok2[1])\n grid[s:e+1, rok1[0]] = \"#\"\n else:\n s, e = min(rok1[0], rok2[0]), max(rok1[0], rok2[0])\n grid[rok2[1],s:e+1] = \"#\"\n rok1 = rok2\n#grid[1:10,2] = \"#\"\n#print(grid)\n\ndef fall(pp=None):\n #print(grid)\n if pp is None:\n pp = [0, middle]\n if grid[pp[0], pp[1]] == \"o\":\n return False\n #print(pp)\n if pp[0] == max_y + 1:\n grid[pp[0], pp[1]] = \"o\"\n return True\n if grid[pp[0] + 1, pp[1]] == \".\":\n pp = [pp[0] + 1, pp[1]]\n return fall(pp)\n elif grid[pp[0] + 1, pp[1] - 1] == \".\":\n pp = [pp[0] + 1, pp[1] - 1]\n return fall(pp)\n elif grid[pp[0] + 1, pp[1] + 1] == \".\":\n pp = [pp[0] + 1, pp[1] + 1]\n return fall(pp)\n elif grid[pp[0] + 1, pp[1]] in [\"#\", \"o\"]:\n grid[pp[0], pp[1]] = \"o\"\n return True\n\n\n\ndef part1():\n c = 0\n while fall():\n c += 1\n #print(grid)\n print(c)\n\npart1()\nf = open(\"/home/rprcz974/workspace/AOC/2022/grid.txt\", \"w\")\nlines_to_w = [''.join([str(a) for a in l]) + \"\\n\" for l in grid]\nf.writelines(lines_to_w)\nf.close()" }, { "alpha_fraction": 0.4514003396034241, "alphanum_fraction": 0.5140032768249512, "avg_line_length": 24.33333396911621, "blob_id": "0f78805a84190c37e5b748fa5f3f2ddb2299a3a0", "content_id": "44f472d938a0186693acdca11fd95ba36519be5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 607, "license_type": "no_license", "max_line_length": 66, "num_lines": 24, "path": "/2022/6.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")[0]\na = \"mjqjpqmgbljsphdztnvjfqwrcgsmlb\"\n\ndef part1(ar):\n last_3 = ar[0:3]\n for i in range(3, len(ar)):\n if ar[i] not in last_3 and len(set(list(last_3))) == 3:\n print(i + 1)\n break\n else:\n last_3 = last_3[1:] + ar[i]\n\ndef part2(ar):\n last_13 = ar[0:13]\n for i in range(13, len(ar)):\n if ar[i] not in last_13 and len(set(list(last_13))) == 13:\n print(i + 1)\n break\n else:\n last_13 = last_13[1:] + ar[i]\n\npart2(ar)" }, { "alpha_fraction": 0.4731917977333069, "alphanum_fraction": 0.501619279384613, "avg_line_length": 30.224720001220703, "blob_id": "b28f54a3ad510832de6fdd90ce737dfc5c9f23ae", "content_id": "dfc4ed123009c1a0d488ad7005506c4965bf9c3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2779, "license_type": "no_license", "max_line_length": 127, "num_lines": 89, "path": "/2022/18-trapped-cubes.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\n\ndef line_to_position(line):\n return tuple([int(a) for a in line.split(\",\")])\n\n\npositions = [line_to_position(l) for l in lines]\n\n\ndef get_adjacent_positions(p):\n adjacents = []\n adjacents.append((p[0] - 1, p[1], p[2]))\n adjacents.append((p[0] + 1, p[1], p[2]))\n adjacents.append((p[0], p[1] - 1, p[2]))\n adjacents.append((p[0], p[1] + 1, p[2]))\n adjacents.append((p[0], p[1], p[2] - 1))\n adjacents.append((p[0], p[1], p[2] + 1))\n return adjacents\n\n\ndef is_position_trapped(p, beams):\n xy, xz, yz = (\"z\", p[0], p[1]), (\"y\", p[0], p[2]), (\"x\", p[1], p[2])\n if xy not in beams.keys() or xz not in beams.keys() or yz not in beams.keys():\n return False\n is_between_z = (p[2] < sorted(beams[xy], key=lambda x: x[0])[-1][0] and p[2] > sorted(beams[xy], key=lambda x: x[0])[0][0])\n is_between_y = (p[1] < sorted(beams[xz], key=lambda x: x[0])[-1][0] and p[1] > sorted(beams[xz], key=lambda x: x[0])[0][0])\n is_between_x = (p[0] < sorted(beams[yz], key=lambda x: x[0])[-1][0] and p[0] > sorted(beams[yz], key=lambda x: x[0])[0][0])\n return (is_between_x and is_between_y and is_between_z)\n\n\nclass Trapper:\n\n def __init__(self):\n self.trapped = {}\n\n def is_pos_really_trapped(self, p, cubes, beams):\n\n if p in self.trapped.keys():\n return self.trapped[p]\n\n pile = [p]\n seen = set()\n while pile:\n cur = pile.pop()\n seen.add(cur)\n if not is_position_trapped(cur, beams):\n for pp in seen:\n self.trapped[pp] = False\n return False\n else:\n adjs = get_adjacent_positions(cur)\n for a in adjs:\n if a not in cubes and a not in seen:\n pile.append(a)\n for pp in seen:\n self.trapped[pp] = True\n return True\n\n\ndef solve(positions):\n beams = {}\n seen_cubes = set()\n for p in positions:\n seen_cubes.add(p)\n p_beams = [(\"z\", p[0], p[1], p[2]), (\"y\", p[0], p[2], p[1]), (\"x\", p[1], p[2], p[0])]\n for b in p_beams:\n beam = (b[0], b[1], b[2])\n val = b[3]\n if beam in beams.keys():\n beams[beam].append((val, p))\n else:\n beams[beam] = [(val, p)]\n t = Trapper()\n part1_ans = 0\n answer = 0\n for p in positions:\n adjs = get_adjacent_positions(p)\n for a in adjs:\n if a not in seen_cubes:\n part1_ans += 1\n if not t.is_pos_really_trapped(a, seen_cubes, beams):\n answer += 1\n print(\"part1:\", part1_ans)\n print(answer)\n\nsolve(positions)\n" }, { "alpha_fraction": 0.5981617569923401, "alphanum_fraction": 0.6158088445663452, "avg_line_length": 28.901098251342773, "blob_id": "01b37fc375cd74c9a597bd1b4508309dcc7c6199", "content_id": "6e64ad2660099bd6a6eeb95b167dcbef99c9662d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2720, "license_type": "no_license", "max_line_length": 68, "num_lines": 91, "path": "/old/2015/13.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import itertools\nfrom typing import List\nfrom aocd import get_data\nar = get_data(day=13, year=2015)\nar = ar.splitlines()\n\n\nar2 = [\n \"Alice would gain 54 happiness units by sitting next to Bob.\",\n \"Alice would lose 79 happiness units by sitting next to Carol.\",\n \"Alice would lose 2 happiness units by sitting next to David.\",\n \"Bob would gain 83 happiness units by sitting next to Alice.\",\n \"Bob would lose 7 happiness units by sitting next to Carol.\",\n \"Bob would lose 63 happiness units by sitting next to David.\",\n \"Carol would lose 62 happiness units by sitting next to Alice.\",\n \"Carol would gain 60 happiness units by sitting next to Bob.\",\n \"Carol would gain 55 happiness units by sitting next to David.\",\n \"David would gain 46 happiness units by sitting next to Alice.\",\n \"David would lose 7 happiness units by sitting next to Bob.\",\n \"David would gain 41 happiness units by sitting next to Carol.\",\n]\n\ndef text_to_values(text):\n words = text.split(\" \")\n subject = words[0]\n other = words[-1][:-1]\n effect, amount = words[2], int(words[3])\n if effect == \"lose\":\n amount *= -1\n return subject, other, amount\n\ndef compute_total_happiness(arrangement: List[str], effects):\n total = 0\n for i, person in enumerate(arrangement):\n total += effects[person][arrangement[i - 1]]\n if i == len(arrangement) - 1:\n total += effects[person][arrangement[0]]\n else:\n total += effects[person][arrangement[i + 1]]\n return total\n\ndef part1():\n\n effects = {}\n\n for line in ar:\n subject, other, amount = text_to_values(line)\n if subject not in effects.keys():\n effects[subject] = {other: amount}\n else:\n effects[subject][other] = amount\n \n people = effects.keys()\n arrangements = list(itertools.permutations(people))\n h = 0\n for aa in arrangements:\n h = max(h, compute_total_happiness(aa, effects))\n print(h)\n return h\n\ndef part2():\n prev_h = part1()\n effects = {}\n\n for line in ar:\n subject, other, amount = text_to_values(line)\n if subject not in effects.keys():\n effects[subject] = {other: amount}\n else:\n effects[subject][other] = amount\n # add myself\n effects[\"Fab\"] = {k: 0 for k in effects.keys()}\n for k in effects.keys():\n effects[k][\"Fab\"] = 0\n print(effects)\n\n people = effects.keys()\n arrangements = list(itertools.permutations(people))\n h = 0\n aaa = []\n for aa in arrangements:\n new_h = compute_total_happiness(aa, effects)\n if new_h > h:\n h = new_h\n aaa = aa\n print(aaa)\n print(h)\n \n\npart1()\npart2()" }, { "alpha_fraction": 0.4132557511329651, "alphanum_fraction": 0.43465614318847656, "avg_line_length": 27.31764793395996, "blob_id": "4a5f4674ed47108707587d466c9d619b75d5394a", "content_id": "ca3ab4b3a6b0fc1f8b2711211fac05e3dc1d8cae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4813, "license_type": "no_license", "max_line_length": 85, "num_lines": 170, "path": "/2022/23-evolutive-map.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import copy\nimport numpy as np\nfrom utils import read_file\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nlines2 = [\n \"..............\",\n \"..............\",\n \".......#......\",\n \".....###.#....\",\n \"...#...#.#....\",\n \"....#...##....\",\n \"...#.###......\",\n \"...##.#.##....\",\n \"....#..#......\",\n \"..............\",\n \"..............\",\n \"..............\",\n]\n\ndef lines_to_pos(lines):\n pos = []\n for i, l in enumerate(lines):\n for j, el in enumerate(l):\n if el == \"#\":\n pos.append((i, j))\n return pos\n\nclass Directioner:\n global_idx = 0\n\n def __init__(self):\n self._directions = [\"N\", \"S\", \"W\", \"E\"]\n self.current_idx = Directioner.global_idx\n self.n_called = 0\n\n def get_current_dir(self):\n return self._directions[self.current_idx]\n\n def increment_dir(self):\n if self.n_called < len(self._directions):\n self.current_idx = (self.current_idx + 1) % len(self._directions)\n self.n_called += 1\n return True\n else:\n return False\n\n def global_incr_dir(self):\n Directioner.global_idx = (Directioner.global_idx + 1) % len(self._directions)\n\ndef get_neighs_pos(pos, direction):\n if direction == \"N\":\n return [\n (pos[0] - 1, pos[1] - 1),\n (pos[0] - 1, pos[1]),\n (pos[0] - 1, pos[1] + 1),\n ]\n if direction == \"E\":\n return [\n (pos[0] - 1, pos[1] + 1),\n (pos[0] , pos[1] + 1),\n (pos[0] + 1, pos[1] + 1),\n ]\n if direction == \"S\":\n return [\n (pos[0] + 1, pos[1] - 1),\n (pos[0] + 1, pos[1]),\n (pos[0] + 1, pos[1] + 1),\n ]\n if direction == \"W\":\n return [\n (pos[0] - 1, pos[1] - 1),\n (pos[0] , pos[1] - 1),\n (pos[0] + 1, pos[1] - 1),\n ]\n\ndef get_all_neighs(pos):\n return [\n (pos[0] - 1, pos[1] - 1),\n (pos[0] , pos[1] - 1),\n (pos[0] + 1, pos[1] - 1),\n\n (pos[0] - 1, pos[1] ),\n (pos[0] + 1, pos[1] ),\n\n (pos[0] + 1, pos[1] + 1),\n (pos[0] - 1, pos[1] + 1),\n (pos[0] , pos[1] + 1),\n ]\n\ndef is_alone(p, all_pos):\n neighs = get_all_neighs(p)\n for n in neighs:\n if n in all_pos:\n return False\n return True\n\ndef is_over(all_pos):\n for p in all_pos:\n neighs = get_all_neighs(p)\n for n in neighs:\n if n in all_pos:\n return False\n return True\n\n\ndef count_dots(all_pos):\n min_i, max_i, min_j, max_j = np.inf, 0, np.inf, 0\n for p in all_pos:\n min_i = min(min_i, p[0])\n min_j = min(min_j, p[1])\n max_i = max(max_i, p[0])\n max_j = max(max_j, p[1])\n #print(\"min_i\", min_i, \"min_j\", min_j, \"max_i\", max_i, \"max_j\", max_j)\n return ((max_i - min_i + 1) * (max_j - min_j + 1)) - len(all_pos)\n\ndef part1():\n main_dirrer = Directioner()\n all_pos = lines_to_pos(lines)\n for i in range(1000):\n #print(\"positions at start of\", i, \":\", all_pos)\n print(i)\n if is_over(all_pos):\n print(\"PART2 is over at:\", i + 1)\n break\n # first phase: propose moves\n proposed_moves = [None for _ in range(len(all_pos))]\n for p_idx, p in enumerate(all_pos):\n # if already alone, don't move\n if is_alone(p, all_pos):\n continue\n # try all dirs in order, set proposed move if possible\n local_dirrer = Directioner()\n for _ in range(4):\n dir = local_dirrer.get_current_dir()\n neighs = get_neighs_pos(p, dir)\n is_dir_ok = True\n for n in neighs:\n if n in all_pos:\n is_dir_ok = False\n break\n if is_dir_ok:\n #print(dir, p, neighs[1])\n proposed_moves[p_idx] = tuple(neighs[1])\n break\n else:\n local_dirrer.increment_dir()\n #print(proposed_moves)\n # second phase\n val_n_i = {}\n for i, prop in enumerate(proposed_moves):\n if prop not in val_n_i.keys():\n val_n_i[prop] = [i]\n else:\n val_n_i[prop].append(i)\n for poss, indexes in val_n_i.items():\n if poss is not None and len(indexes) > 1:\n for idx in indexes:\n proposed_moves[idx] = None\n #print(proposed_moves, val_n_i)\n for i, move_pos in enumerate(proposed_moves):\n if move_pos is not None:\n all_pos[i] = move_pos\n\n main_dirrer.global_incr_dir()\n # part1\n #print(count_dots(all_pos))\n\n\npart1()" }, { "alpha_fraction": 0.3420158624649048, "alphanum_fraction": 0.3635334074497223, "avg_line_length": 13.716666221618652, "blob_id": "b1eae8e0e1e4a7e862325be9fa55d77888ebab75", "content_id": "533b1181e91926d527eb10530ca5dc4ef2db43c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 883, "license_type": "no_license", "max_line_length": 57, "num_lines": 60, "path": "/2022/2.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\n#ar = [\n# \"A Y\",\n# \"B X\",\n# \"C Z\"\n#]\n\n#b = [ for a in ar]\n\ntotal = 0\n\nvalus = {\n \"A\": 1, \"B\": 2, \"C\": 3\n}\n\nfor i, a in enumerate(ar):\n f, s = a[0], a[2]\n win = False\n draw = False\n lose = False\n sum = 0\n\n if s == \"X\":\n lose = True\n elif s == \"Y\":\n draw = True\n elif s == \"Z\":\n win = True\n\n if win:\n sum += 6\n elif draw: \n sum += 3\n \n if draw:\n sum += valus[f]\n \n if lose:\n if f == \"A\":\n sum += 3\n if f == \"B\":\n sum += 1\n if f == \"C\":\n sum += 2\n\n if win:\n if f == \"A\":\n sum += 2\n if f == \"B\":\n sum += 3\n if f == \"C\":\n sum += 1\n print(win)\n print(sum)\n total += sum\n\nprint(total)\n" }, { "alpha_fraction": 0.5527222752571106, "alphanum_fraction": 0.5640937089920044, "avg_line_length": 33.34911346435547, "blob_id": "776b0944a80aadc851fc5e329639f933c374da0c", "content_id": "d9c9caf3ec19c3ba7b4fc4de88d368d647441e82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5804, "license_type": "no_license", "max_line_length": 119, "num_lines": 169, "path": "/2022/16-valves-dijkstra.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import heapq\nimport copy\nimport itertools\nfrom utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\nar2 = [\n \"Valve AA has flow rate=0; tunnels lead to valves DD, II, BB\",\n \"Valve BB has flow rate=13; tunnels lead to valves CC, AA\",\n \"Valve CC has flow rate=2; tunnels lead to valves DD, BB\",\n \"Valve DD has flow rate=20; tunnels lead to valves CC, AA, EE\",\n \"Valve EE has flow rate=3; tunnels lead to valves FF, DD\",\n \"Valve FF has flow rate=0; tunnels lead to valves EE, GG\",\n \"Valve GG has flow rate=0; tunnels lead to valves FF, HH\",\n \"Valve HH has flow rate=22; tunnel leads to valve GG\",\n \"Valve II has flow rate=0; tunnels lead to valves AA, JJ\",\n \"Valve JJ has flow rate=21; tunnel leads to valve II\",\n]\n\nMAX_TIME = 26 # 30 for part 1\n\ndef lines_to_valves(lines):\n valves = {}\n for l in lines:\n v_name = l.split(\" \")[1]\n v_rate = int(l.split(\" \")[4].split(\"=\")[1].split(\";\")[0])\n next_vs = [a[:2] for a in l.split(\" \")[9:]]\n valves[v_name] = {\"r\": v_rate, \"next_vs\": next_vs}\n return valves\n\ndef find_positive_valves(valves):\n positive_valves = []\n for key, val in valves.items():\n if val[\"r\"] > 0:\n positive_valves.append(key)\n return positive_valves\n\ndef dijkstra(valves, start, pos_valves):\n distances = {start: 0}\n seen = {start}\n nodes_to_visit = [(0, start)]\n\n heapq.heapify(nodes_to_visit)\n\n while nodes_to_visit:\n cur_node = heapq.heappop(nodes_to_visit)\n\n if cur_node[1] in pos_valves:\n distances[cur_node[1]] = cur_node[0]\n\n adjs = valves[cur_node[1]][\"next_vs\"]\n for adj in adjs:\n if adj not in seen:\n seen.add(adj)\n heapq.heappush(nodes_to_visit, ((cur_node[0] + 1), adj))\n \n return {k: v for k, v in distances.items() if k in pos_valves}\n\ndef get_next_possible_valves(sol, all_dists):\n next_valves = []\n remaining_time = MAX_TIME - sol[\"time\"] + 1\n #remaining_time = MAX_TIME - sol[\"time\"]\n remaining_time = MAX_TIME - sol[\"time\"] - 1\n for v in sol[\"unseen\"]:\n if all_dists[sol[\"current\"]][v] < remaining_time and v != sol[\"current\"]:\n next_valves.append(v)\n return next_valves\n\n\ndef update_sol_open_valve(sol, valves):\n new_sol = copy.deepcopy(sol)\n new_sol[\"time\"] += 1\n new_sol[\"score\"] += new_sol[\"flow\"]\n new_sol[\"flow\"] += valves[new_sol[\"current\"]][\"r\"]\n new_sol[\"opened\"] += [new_sol[\"current\"]]\n new_sol[\"unseen\"].remove(new_sol[\"current\"])\n return new_sol\n\ndef part1(valves_to_visit, valves, pos_valves, all_dists):\n #valves = lines_to_valves(ar)\n #pos_valves = find_positive_valves(valves)\n\n #all_dists = {\"AA\": dijkstra(valves, \"AA\", pos_valves)}\n #for pp in pos_valves:\n # all_dists[pp] = dijkstra(valves, pp, pos_valves)\n\n unseen = copy.deepcopy(valves_to_visit)\n opened = []\n start = \"AA\"\n time = 0\n score = 0\n solutions = [{\"unseen\": unseen, \"opened\": opened, \"current\": start, \"time\": time, \"score\": score, \"flow\": 0}]\n\n big_max = 0\n\n while solutions:\n sol111 = solutions.pop()\n sol = copy.deepcopy(sol111)\n\n if sol[\"time\"] > MAX_TIME:\n print(\"wtf time exceeded shoudnt happen\")\n break\n next_valves = get_next_possible_valves(sol, all_dists)\n\n # all valves are already opened or no reachable valve until end\n if len(next_valves) == 0:\n # if current valve hasnt been opened, open it\n new_sol = copy.deepcopy(sol)\n if new_sol[\"current\"] not in new_sol[\"opened\"] and new_sol[\"current\"] in new_sol[\"unseen\"]:\n new_sol = update_sol_open_valve(new_sol, valves)\n # compute end of game score\n remaining_time = MAX_TIME - new_sol[\"time\"]\n final_score = new_sol[\"score\"] + new_sol[\"flow\"] * remaining_time\n #print(sol)\n if final_score > big_max:\n big_max = final_score\n #print(final_score)\n #print(new_sol)\n \n else:\n if sol[\"current\"] != \"AA\":\n new_sol = update_sol_open_valve(sol, valves)\n next_valves = get_next_possible_valves(sol, all_dists)\n\n for nv in next_valves:\n dist = all_dists[sol[\"current\"]][nv]\n sol_to_push = copy.deepcopy(new_sol)\n sol_to_push[\"time\"] += dist\n sol_to_push[\"score\"] += (new_sol[\"flow\"] * dist)\n sol_to_push[\"current\"] = nv\n solutions.append(sol_to_push)\n else:\n for nv in next_valves:\n first_sol = copy.deepcopy(sol)\n first_sol[\"time\"] = sol[\"time\"] + all_dists[sol[\"current\"]][nv]\n first_sol[\"current\"] = nv\n solutions.append(first_sol)\n #print(big_max)\n return big_max\n\n#part1()\n\ndef two_partitions(S):\n res_list = []\n for l in range(0,int(len(S)/2)+1):\n combis = set(itertools.combinations(S,l))\n for c in combis:\n res_list.append((sorted(list(c)), sorted(list(S-set(c)))))\n return res_list\n\n\ndef part2():\n valves = lines_to_valves(ar)\n pos_valves = find_positive_valves(valves)\n all_dists = {\"AA\": dijkstra(valves, \"AA\", pos_valves)}\n for pp in pos_valves:\n all_dists[pp] = dijkstra(valves, pp, pos_valves)\n\n all_combs = two_partitions(set(pos_valves))[::-1]\n mmax = 0\n for comb in all_combs:\n humn_nodes, elephant_nodes = comb[0], comb[1]\n score = part1(humn_nodes, valves, pos_valves, all_dists) + part1(elephant_nodes, valves, pos_valves, all_dists)\n if score > mmax:\n print(score)\n mmax = score\n\npart2()" }, { "alpha_fraction": 0.3781512677669525, "alphanum_fraction": 0.4065934121608734, "avg_line_length": 20.100000381469727, "blob_id": "199d970f4177d293f8f45b3a9ae1b69368eefcae", "content_id": "73859db77582142549944ec5374d5b52f0274d47", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1547, "license_type": "no_license", "max_line_length": 80, "num_lines": 70, "path": "/2022/25-ternary-base.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "lines = []\r\n\r\nfile = open(\"/home/fab/AOC/2022/input\", \"r\")\r\nfor l in file.readlines():\r\n lines.append(l.rstrip())\r\ndef symbol_to_dec_val(s):\r\n if s == \"2\":\r\n return 2\r\n if s == \"1\":\r\n return 1\r\n if s == \"0\":\r\n return 0\r\n if s == \"-\":\r\n return -1\r\n if s == \"=\":\r\n return -2\r\n\r\ndef dec_val_to_symbol(v):\r\n if v == 2:\r\n return \"2\"\r\n if v == 1:\r\n return \"1\"\r\n if v == 0:\r\n return \"0\"\r\n if v == -1:\r\n return \"-\"\r\n if v == -2:\r\n return \"=\"\r\n\r\ndef add_single(a, b, r=0):\r\n dec_val = symbol_to_dec_val(a) + symbol_to_dec_val(b) + symbol_to_dec_val(r)\r\n ret = \"0\"\r\n if dec_val < 3 and dec_val > -3:\r\n return ret, dec_val_to_symbol(dec_val)\r\n if dec_val == 3:\r\n return \"1\", \"=\"\r\n if dec_val == 4:\r\n return \"1\", \"-\"\r\n if dec_val == 5:\r\n return \"1\", \"0\"\r\n if dec_val == -3:\r\n return \"-\", \"2\"\r\n if dec_val == -4:\r\n return \"-\", \"1\"\r\n if dec_val == -5:\r\n return \"-\", \"0\"\r\n\r\ndef add(a, b):\r\n if len(b) > len(a):\r\n a, b = b, a\r\n if len(a) > len(b):\r\n b = \"\".join([\"0\" for i in range(len(a) - len(b))]) + b\r\n ans = \"\"\r\n r = \"0\"\r\n for i in range(len(a)):\r\n indx = len(a) - (i + 1)\r\n a_s, b_s = a[indx], b[indx]\r\n r, val = add_single(a_s, b_s, r)\r\n ans = val + ans\r\n if r != \"0\":\r\n ans = r + ans\r\n return ans\r\n\r\ndef part1():\r\n ans = \"0\"\r\n for line in lines:\r\n ans = add(ans, line)\r\n print(ans)\r\n\r\npart1()\r\n" }, { "alpha_fraction": 0.5620864629745483, "alphanum_fraction": 0.5722419023513794, "avg_line_length": 29.79620933532715, "blob_id": "010a68188098833edff788abddffebecd61de6ce", "content_id": "a655831d400f3435e17360cdac6b07aee0a61e00", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6499, "license_type": "no_license", "max_line_length": 98, "num_lines": 211, "path": "/2022/19-starcraft-like-game.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from functools import cache\nfrom enum import Enum\nfrom typing import List, Tuple\nimport copy\n\nlines = []\n\nfile = open(\"/home/fab/AOC/2022/input\", \"r\")\nfor l in file.readlines():\n lines.append(l.rstrip())\n\nclass Choice(Enum):\n BUILD_ORE_ROBOT = 1\n BUILD_CLAY_ROBOT = 2\n BUILD_OBS_ROBOT = 3\n BUILD_GEODE_ROBOT = 4\n\nclass Config:\n def __init__(self, config_line):\n self.id = config_line[0]\n self.ore_robot_cost = {\"ore\": config_line[1]}\n self.clay_robot_cost = {\"ore\": config_line[2]}\n self.obs_robot_cost = {\"ore\": config_line[3], \"clay\": config_line[4]}\n self.geode_robot_cost = {\"ore\": config_line[5], \"obs\": config_line[6]}\n self.MAX_TIME = 32\n self.MAX_FOUND = 0\n\n\nclass GameData:\n def __init__(self, arr_repr=None):\n if arr_repr is None:\n self.current_time = 1\n self.ore = 0\n self.clay = 0\n self.obs = 0\n self.geode = 0\n self.ore_robots = 1\n self.clay_robots = 0\n self.obs_robots = 0\n self.geode_robots = 0\n else:\n self.current_time = arr_repr[0]\n self.ore = arr_repr[1]\n self.clay = arr_repr[2]\n self.obs = arr_repr[3]\n self.geode = arr_repr[4]\n self.ore_robots = arr_repr[5]\n self.clay_robots = arr_repr[6]\n self.obs_robots = arr_repr[7]\n self.geode_robots = arr_repr[8]\n \n def get_arr_repr(self):\n return (\n self.current_time,\n self.ore,\n self.clay,\n self.obs,\n self.geode,\n self.ore_robots,\n self.clay_robots,\n self.obs_robots,\n self.geode_robots,\n )\n\ndef lines_to_game_configs(lines):\n configs = []\n for line in lines:\n config_line = []\n words = line.split(\" \")\n config_line.append(int(words[1].split(\":\")[0]))\n config_line.append(int(words[6]))\n config_line.append(int(words[12]))\n config_line.append(int(words[18]))\n config_line.append(int(words[21]))\n config_line.append(int(words[27]))\n config_line.append(int(words[30]))\n configs.append(Config(config_line=config_line))\n return configs\n\ndef get_resources_from_choice(choice, config):\n if choice == Choice.BUILD_ORE_ROBOT:\n return config.ore_robot_cost\n elif choice == Choice.BUILD_CLAY_ROBOT:\n return config.clay_robot_cost\n elif choice == Choice.BUILD_OBS_ROBOT:\n return config.obs_robot_cost\n elif choice == Choice.BUILD_GEODE_ROBOT:\n return config.geode_robot_cost\n\ndef can_be_built(choice, config, game_data):\n required = get_resources_from_choice(choice, config)\n for k, v in required.items():\n if k == \"ore\":\n if v > game_data.ore:\n return False\n elif k == \"clay\":\n if v > game_data.clay:\n return False\n elif k == \"obs\":\n if v > game_data.obs:\n return False\n return True\n\ndef mine(game_data):\n game_data.ore += game_data.ore_robots\n game_data.clay += game_data.clay_robots\n game_data.obs += game_data.obs_robots\n game_data.geode += game_data.geode_robots\n\ndef build(game_data, choice, config):\n res = get_resources_from_choice(choice, config)\n if choice == Choice.BUILD_ORE_ROBOT:\n game_data.ore_robots += 1\n elif choice == Choice.BUILD_CLAY_ROBOT:\n game_data.clay_robots += 1\n elif choice == Choice.BUILD_OBS_ROBOT:\n game_data.obs_robots += 1\n elif choice == Choice.BUILD_GEODE_ROBOT:\n game_data.geode_robots += 1\n for k, v in res.items():\n if k == \"ore\":\n game_data.ore -= v\n elif k == \"clay\":\n game_data.clay -= v\n elif k == \"obs\":\n game_data.obs -= v\n\ndef generate_choices(game_data, config):\n choices = []\n\n max_ore_cost = max([\n config.ore_robot_cost[\"ore\"],\n config.clay_robot_cost[\"ore\"],\n config.obs_robot_cost[\"ore\"],\n config.geode_robot_cost[\"ore\"],\n ])\n\n if game_data.ore_robots < max_ore_cost:\n choices.append(Choice.BUILD_ORE_ROBOT)\n if game_data.clay_robots < config.obs_robot_cost[\"clay\"]:\n choices.append(Choice.BUILD_CLAY_ROBOT)\n if game_data.clay_robots >= 1 and game_data.obs_robots < config.geode_robot_cost[\"obs\"]:\n choices.append(Choice.BUILD_OBS_ROBOT)\n if game_data.obs_robots >= 1:\n choices.append(Choice.BUILD_GEODE_ROBOT)\n\n return choices\n\n\ndef get_remaining_time(config, game_data):\n return (config.MAX_TIME - game_data.current_time) + 1\n\ndef get_potential(config, game_data):\n remaining_time = get_remaining_time(config, game_data)\n potential = sum([i + game_data.geode_robots for i in range(remaining_time)]) + game_data.geode\n return potential > config.MAX_FOUND\n\n@cache\ndef get_max_geodes(gd: Tuple, conf: Config, choice: Choice = None):\n data = GameData(arr_repr=gd)\n\n if not get_potential(conf, data):\n return 0\n\n if choice is None:\n return gen_choices_max([Choice.BUILD_CLAY_ROBOT, Choice.BUILD_ORE_ROBOT], data, conf)\n\n if data.current_time == conf.MAX_TIME + 1:\n return data.geode\n building = can_be_built(choice, conf, data)\n mine(data)\n data.current_time += 1\n if building:\n build(data, choice, conf)\n choices = generate_choices(data, conf)\n return gen_choices_max(choices, data, conf)\n else:\n return get_max_geodes(data.get_arr_repr(), conf, choice)\n\n\n\ndef gen_choices_max(choices: List, gd, conf):\n scores = []\n for c in choices:\n scores.append(get_max_geodes(copy.deepcopy(gd.get_arr_repr()), conf, c))\n conf.MAX_FOUND = max(conf.MAX_FOUND, max(scores))\n return max(scores)\n\n\ndef part1():\n answer = 0\n configs = lines_to_game_configs(lines)\n for c in configs:\n gd = GameData()\n geodes = get_max_geodes(gd.get_arr_repr(), c)\n print(\"found\", geodes, \"geodes for id\", c.id)\n answer += geodes * c.id\n print(answer)\n#part1()\n\n\ndef part2():\n answer = 1\n configs = lines_to_game_configs(lines)\n for c in configs[:3]:\n gd = GameData()\n geodes = get_max_geodes(gd.get_arr_repr(), c)\n print(\"found\", geodes, \"geodes for id\", c.id)\n answer *= geodes\n print(answer)\npart2()\n\n" }, { "alpha_fraction": 0.5257301926612854, "alphanum_fraction": 0.5438108444213867, "avg_line_length": 22.933332443237305, "blob_id": "6a65fc652016d534dc4b9b6fa82e225daf55cff4", "content_id": "8652d65921546c76eed5328cbcb615825dad2948", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 719, "license_type": "no_license", "max_line_length": 66, "num_lines": 30, "path": "/old/2015/12.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import json\nfrom aocd import get_data\nar = get_data(day=12, year=2015)\nar = json.loads(ar)\n\ndef add_json_nums(json_val, buf):\n if isinstance(json_val, int):\n return json_val\n elif isinstance(json_val, dict):\n ans = 0\n if \"red\" in json_val.keys() or \"red\" in json_val.values():\n return 0\n for k, v in json_val.items():\n ans += add_json_nums(k, buf)\n ans += add_json_nums(v, buf)\n return ans\n elif isinstance(json_val, list):\n ans = 0\n for v in json_val:\n ans += add_json_nums(v, buf)\n return ans\n else:\n return 0\n\ndef part1(ar):\n ans = 0\n ans = add_json_nums(ar, ans)\n print(ans)\n\npart1(ar)\n\n" }, { "alpha_fraction": 0.46685081720352173, "alphanum_fraction": 0.49005526304244995, "avg_line_length": 26.846153259277344, "blob_id": "051060a1529eaec406a763da75ea123f856dfe80", "content_id": "328caf471b5025e1424eacc0d646fcaa7992725e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1810, "license_type": "no_license", "max_line_length": 80, "num_lines": 65, "path": "/old/2015/14.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from aocd import get_data\nar = get_data(day=14, year=2015)\nar = ar.splitlines()\nprint(ar)\n\nar2 = [\n \"Comet can fly 14 km/s for 10 seconds, but then must rest for 127 seconds.\",\n \"Dancer can fly 16 km/s for 11 seconds, but then must rest for 162 seconds.\"\n]\n\ndef extract_deer_from_line(line):\n words = line.split(\" \")\n name = words[0]\n speed = int(words[3])\n move_time = int(words[6])\n rest_time = int(words[13])\n return {\n \"name\": name,\n \"speed\": speed,\n \"move_time\": move_time,\n \"rest_time\": rest_time,\n \"position\": 0,\n \"cur_mode\": \"move\",\n \"cur_rest_time\": 0,\n \"cur_move_time\": 0,\n \"points\": 0\n }\n\ndef find_lead(deers):\n maxx, lead = 0, []\n for d in deers:\n if d[\"position\"] > maxx:\n maxx = d[\"position\"]\n lead = [d]\n elif d[\"position\"] == maxx:\n lead.append(d)\n return lead\n\ndef part1(seconds):\n\n deers = []\n for l in ar:\n deers.append(extract_deer_from_line(l))\n\n for s in range(seconds):\n for deer in deers:\n if deer[\"cur_mode\"] == \"move\":\n deer[\"cur_move_time\"] += 1\n deer[\"position\"] += deer[\"speed\"]\n if deer[\"cur_move_time\"] == deer[\"move_time\"]:\n deer[\"cur_move_time\"] = 0\n deer[\"cur_mode\"] = \"rest\"\n elif deer[\"cur_mode\"] == \"rest\":\n deer[\"cur_rest_time\"] += 1\n if deer[\"cur_rest_time\"] == deer[\"rest_time\"]:\n deer[\"cur_rest_time\"] = 0\n deer[\"cur_mode\"] = \"move\"\n ds = find_lead(deers)\n print([d[\"name\"] for d in ds])\n for d in ds:\n d[\"points\"] += 1\n print(deers)\n print(max([d[\"points\"] for d in deers]))\n\npart1(2503)\n" }, { "alpha_fraction": 0.5224884152412415, "alphanum_fraction": 0.5585007667541504, "avg_line_length": 31.512563705444336, "blob_id": "f99643b316c8970e6bfd39f7e1e50fe78787585f", "content_id": "a1c4bd6eaca66f990fa5ff891837f5c853de7f70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12940, "license_type": "no_license", "max_line_length": 114, "num_lines": 398, "path": "/2022/22-cube-map.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\nfrom trans22 import *\nfrom typing import List\nfrom enum import Enum\nimport numpy as np\n\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\nlines2 = [\n \" ...# \", # 1111 \n \" .#.. \", # 1111 \n \" #... \", # 1111 \n \" .... \", # 1111 \n \"...#.......# \", #222233334444 \n \"........#... \", #222233334444 \n \"..#....#.... \", #222233334444 \n \"..........#. \", #222233334444 \n \" ...#....\", # 55556666\n \" .....#..\", # 55556666\n \" .#......\", # 55556666\n \" ......#.\", # 55556666\n \"\",\n \"10R5L5R10L4R5L5\",\n\n]\n\nclass Dir(Enum):\n RIGHT = 0\n DOWN = 1\n LEFT = 2\n UP = 3\n\nclass Ori(Enum):\n EAST = 0\n SOUTH = 1\n WEST = 2\n NORTH = 3\n\nclass Rotator:\n def __init__(self):\n self.current_dir_index = 0\n self.ordering = [Dir.RIGHT, Dir.DOWN, Dir.LEFT, Dir.UP]\n def rotate(self, letter):\n if letter == \"R\":\n self.current_dir_index += 1\n elif letter == \"L\":\n self.current_dir_index += 3\n self.current_dir_index = self.current_dir_index % len(self.ordering)\n def get_current_dir(self):\n return self.ordering[self.current_dir_index]\n\n\nclass FaceShifter:\n def __init__(self, coming_in_from_ori: Ori, pos_transformer):\n self.coming_in_from_ori = coming_in_from_ori\n self.pos_transformer = pos_transformer\n\n def shift(self, last_pos):\n new_dir, new_pos = None, None\n new_pos = self.pos_transformer(last_pos)\n if self.coming_in_from_ori == Ori.NORTH:\n new_dir = Dir.DOWN\n if new_pos[0] != 0:\n print(\"problem: coming from north but newpos[0] is not 0\")\n if self.coming_in_from_ori == Ori.EAST:\n new_dir = Dir.LEFT\n if new_pos[1] != 49:\n print(\"problem: coming from east but newpos[1] is not 49\")\n if self.coming_in_from_ori == Ori.SOUTH:\n new_dir = Dir.UP\n if new_pos[0] != 49:\n print(\"problem: coming from north but newpos[0] is not 49\")\n if self.coming_in_from_ori == Ori.WEST:\n new_dir = Dir.RIGHT\n if new_pos[1] != 0:\n print(\"problem: coming from north but newpos[1] is not 0\")\n \n return new_dir, new_pos\n\n\n\nclass Face:\n def __init__(self):\n self.map = []\n\n self.upper_face = None\n self.downward_face = None\n self.right_face = None\n self.left_face = None\n\n self.upper_face_shifter: FaceShifter = None\n self.downward_face_shifter: FaceShifter = None\n self.right_face_shifter: FaceShifter = None\n self.left_face_shifter: FaceShifter = None\n\n self.id = 1\n \n def add_row(self, row):\n if len(self.map) == 0:\n self.map.append(list(row))\n else:\n self.map.append(list(row))\n\n def get_size(self):\n return len(self.map)\n\n def get_el_at_pos(self, pos):\n return self.map[pos[0]][pos[1]]\n\n def __repr__(self):\n string = \"\"\n for r in self.map:\n string += \"\".join(r) + \"\\n\"\n return string\n\n\ndef get_new_pos(pos: List, dir: Dir):\n if dir == Dir.UP:\n new_pos = (pos[0] - 1, pos[1])\n if dir == Dir.DOWN:\n new_pos = (pos[0] + 1, pos[1])\n if dir == Dir.LEFT:\n new_pos = (pos[0], pos[1] - 1)\n if dir == Dir.RIGHT:\n new_pos = (pos[0], pos[1] + 1)\n return new_pos\n\n\n#def get_real_new_pos(pos, dir: Dir, face: Face):\n# def update_face(face, new_face):\n# if new_face is not None:\n# return new_face\n# else:\n# return face\n# temp_new_pos = get_new_pos(pos, dir)\n# maxv = face.get_size() - 1\n# vert = temp_new_pos[0]\n# hori = temp_new_pos[1]\n# new_face = face\n# if vert > maxv: # too down\n# vert = 0\n# new_face = update_face(face, face.downward_face)\n# if vert < 0: # too up\n# vert = maxv\n# new_face = update_face(face, face.upper_face)\n# if hori > maxv: # too right\n# hori = 0\n# new_face = update_face(face, face.right_face)\n# if hori < 0: # too left\n# hori = maxv\n# new_face = update_face(face, face.left_face)\n# return (vert, hori), new_face\n\ndef get_real_new_pos(pos, dir: Dir, face: Face):\n def update_face(face, new_face):\n if new_face is not None:\n return new_face\n else:\n return face\n temp_new_pos = get_new_pos(pos, dir)\n maxv = face.get_size() - 1\n vert = temp_new_pos[0]\n hori = temp_new_pos[1]\n new_pos = temp_new_pos\n new_dir = dir\n new_face = face\n if vert > maxv: # too down\n new_dir, new_pos = face.downward_face_shifter.shift(pos)\n new_face = update_face(face, face.downward_face)\n if vert < 0: # too up\n new_dir, new_pos = face.upper_face_shifter.shift(pos)\n new_face = update_face(face, face.upper_face)\n if hori > maxv: # too right\n new_dir, new_pos = face.right_face_shifter.shift(pos)\n new_face = update_face(face, face.right_face)\n if hori < 0: # too left\n new_dir, new_pos = face.left_face_shifter.shift(pos)\n new_face = update_face(face, face.left_face)\n return new_dir, new_pos, new_face\n\n\ndef get_faces(ar):\n min_row_len = np.inf\n for row in ar:\n if row != \"\" and row != ar[-1]:\n if len(row.replace(\" \", \"\")) < min_row_len:\n min_row_len = len(row.replace(\" \", \"\"))\n cube_dim = min_row_len\n faces: List[Face] = []\n prev_n_faces = 0\n for row in ar:\n if row == \"\" or row == ar[-1]:\n continue\n r = list(row.replace(\" \", \"\"))\n n_faces = len(r) // cube_dim\n if n_faces != prev_n_faces:\n faces += [Face() for _ in range(n_faces)]\n faces_rows = list(np.array_split(r, n_faces))\n for i, single_row in enumerate(faces_rows):\n faces[-(n_faces - i)].add_row(single_row)\n prev_n_faces = n_faces\n return faces\n\ndef get_input_directions(ar):\n dirss = ar[-1]\n #dirss = \"2R1L99R5L99L5R11L5R99R1L99R1L99L1R99R1L5R6R99L1R2L2R2L2R2L1L11R99L5L1L5R1R5L3R2L33R7R4L\"\n in_dirs = []\n distances = []\n cur = \"\"\n for i, el in enumerate(dirss):\n if el in [\"L\", \"R\"]:\n in_dirs.append(el)\n else:\n cur += el\n if i + 1 < len(dirss) and dirss[i + 1] in [\"L\", \"R\"]:\n distances.append(int(cur))\n cur = \"\"\n\n return in_dirs, distances + [int(\"\".join(dirss[-2:]))]\n\ndef arrange_faces_SAMPLE_TEST(faces: List[Face]):\n faces[0].downward_face = faces[3]\n faces[0].upper_face = faces[5]\n\n faces[1].left_face = faces[3]\n faces[1].right_face = faces[2]\n\n faces[2].left_face = faces[1]\n faces[2].right_face = faces[3]\n\n faces[3].left_face = faces[2]\n faces[3].right_face = faces[1]\n faces[3].upper_face = faces[0]\n faces[3].downward_face = faces[4]\n\n faces[4].left_face = faces[5]\n faces[4].right_face = faces[5]\n faces[4].upper_face = faces[3]\n faces[4].downward_face = faces[0]\n\n faces[5].right_face = faces[4]\n faces[5].left_face = faces[4]\n\n faces[0].id = 1\n faces[1].id = 2\n faces[2].id = 3\n faces[3].id = 4\n faces[4].id = 5\n faces[5].id = 6\n\ndef arrange_faces(faces: List[Face]):\n faces[0].left_face = faces[3]\n faces[0].right_face = faces[1]\n faces[0].upper_face = faces[5]\n faces[0].downward_face = faces[2]\n\n faces[0].left_face_shifter = FaceShifter(Ori.WEST, face1_left_trans)\n faces[0].right_face_shifter = FaceShifter(Ori.WEST, face1_right_trans)\n faces[0].upper_face_shifter = FaceShifter(Ori.WEST, face1_up_trans)\n faces[0].downward_face_shifter = FaceShifter(Ori.NORTH, face1_down_trans)\n\n faces[1].left_face = faces[0]\n faces[1].right_face = faces[4]\n faces[1].upper_face = faces[5]\n faces[1].downward_face = faces[2]\n\n faces[1].left_face_shifter = FaceShifter(Ori.EAST, face2_left_trans)\n faces[1].right_face_shifter = FaceShifter(Ori.EAST, face2_right_trans)\n faces[1].upper_face_shifter = FaceShifter(Ori.SOUTH, face2_up_trans)\n faces[1].downward_face_shifter = FaceShifter(Ori.EAST, face2_down_trans)\n\n faces[2].left_face = faces[3]\n faces[2].right_face = faces[1]\n faces[2].upper_face = faces[0]\n faces[2].downward_face = faces[4]\n\n faces[2].left_face_shifter = FaceShifter(Ori.NORTH, face3_left_trans)\n faces[2].right_face_shifter = FaceShifter(Ori.SOUTH, face3_right_trans)\n faces[2].upper_face_shifter = FaceShifter(Ori.SOUTH, face3_up_trans)\n faces[2].downward_face_shifter = FaceShifter(Ori.NORTH, face3_down_trans)\n\n faces[3].left_face = faces[0]\n faces[3].right_face = faces[4]\n faces[3].upper_face = faces[2]\n faces[3].downward_face = faces[5]\n\n faces[3].left_face_shifter = FaceShifter(Ori.WEST, face4_left_trans)\n faces[3].right_face_shifter = FaceShifter(Ori.WEST, face4_right_trans)\n faces[3].upper_face_shifter = FaceShifter(Ori.WEST, face4_up_trans)\n faces[3].downward_face_shifter = FaceShifter(Ori.NORTH, face4_down_trans)\n\n faces[4].left_face = faces[3]\n faces[4].right_face = faces[1]\n faces[4].upper_face = faces[2]\n faces[4].downward_face = faces[5]\n\n faces[4].left_face_shifter = FaceShifter(Ori.EAST, face5_left_trans)\n faces[4].right_face_shifter = FaceShifter(Ori.EAST, face5_right_trans)\n faces[4].upper_face_shifter = FaceShifter(Ori.SOUTH, face5_up_trans)\n faces[4].downward_face_shifter = FaceShifter(Ori.EAST, face5_down_trans)\n\n faces[5].left_face = faces[0]\n faces[5].right_face = faces[4]\n faces[5].upper_face = faces[3]\n faces[5].downward_face = faces[1]\n\n faces[5].left_face_shifter = FaceShifter(Ori.NORTH, face6_left_trans)\n faces[5].right_face_shifter = FaceShifter(Ori.SOUTH, face6_right_trans)\n faces[5].upper_face_shifter = FaceShifter(Ori.SOUTH, face6_up_trans)\n faces[5].downward_face_shifter = FaceShifter(Ori.NORTH, face6_down_trans)\n\n faces[0].id = 1\n faces[1].id = 2\n faces[2].id = 3\n faces[3].id = 4\n faces[4].id = 5\n faces[5].id = 6\n\n\ndef part1_adjust_final_posSAMPLE_INPUT(pos, face: Face):\n def get_offs(n):\n return 1 + n*face.get_size()\n if face.id == 1:\n return (pos[0] + get_offs(0), pos[1] + get_offs(2))\n if face.id == 2:\n return (pos[0] + get_offs(1), pos[1] + get_offs(0))\n if face.id == 3:\n return (pos[0] + get_offs(1), pos[1] + get_offs(1))\n if face.id == 4:\n return (pos[0] + get_offs(1), pos[1] + get_offs(2))\n if face.id == 5:\n return (pos[0] + get_offs(2), pos[1] + get_offs(2))\n if face.id == 6:\n return (pos[0] + get_offs(2), pos[1] + get_offs(3))\n\ndef part1_adjust_final_pos(pos, face: Face):\n def get_offs(n):\n return 1 + n*face.get_size()\n if face.id == 1:\n return (pos[0] + get_offs(0), pos[1] + get_offs(1))\n if face.id == 2:\n return (pos[0] + get_offs(0), pos[1] + get_offs(2))\n if face.id == 3:\n return (pos[0] + get_offs(1), pos[1] + get_offs(1))\n if face.id == 4:\n return (pos[0] + get_offs(2), pos[1] + get_offs(0))\n if face.id == 5:\n return (pos[0] + get_offs(2), pos[1] + get_offs(1))\n if face.id == 6:\n return (pos[0] + get_offs(3), pos[1] + get_offs(0))\n\n\ndef log_path(f, p, d):\n if d == Dir.UP:\n f.map[p[0]][p[1]] = \"^\"\n if d == Dir.DOWN:\n f.map[p[0]][p[1]] = \"v\"\n if d == Dir.LEFT:\n f.map[p[0]][p[1]] = \"<\"\n if d == Dir.RIGHT:\n f.map[p[0]][p[1]] = \">\"\n\ndef part1():\n faces = get_faces(lines)\n input_dirs, distances = get_input_directions(lines)\n \n rotator = Rotator()\n arrange_faces(faces)\n current_face = faces[0]\n # currentpos = j, i (row, line)\n current_pos = [0, 0]\n print(len(input_dirs), len(distances))\n\n\n for cur_dd_index, cur_dist in enumerate(distances):\n for _ in range(cur_dist):\n #current_face.map[current_pos[0]][current_pos[1]] = \"x\"\n log_path(current_face, current_pos, rotator.get_current_dir())\n next_dir, next_pos, next_face = get_real_new_pos(current_pos, rotator.get_current_dir(), current_face)\n if next_face.get_el_at_pos(next_pos) == \"#\":\n break\n else:\n current_face, current_pos = next_face, next_pos\n rotator.current_dir_index = next_dir.value\n log_path(current_face, current_pos, rotator.get_current_dir())\n if cur_dd_index < len(input_dirs):\n cur_in_dir = input_dirs[cur_dd_index]\n rotator.rotate(cur_in_dir)\n current_pos = part1_adjust_final_pos(current_pos, current_face)\n \n for f in faces:\n print(f.id)\n print(f)\n \n print((1000 * current_pos[0]) + (4 * current_pos[1]) + rotator.get_current_dir().value)\n\npart1()\n" }, { "alpha_fraction": 0.49006742238998413, "alphanum_fraction": 0.5039183497428894, "avg_line_length": 31.282352447509766, "blob_id": "3bab9115f9c251c65769225efdbe13381e5cd5e6", "content_id": "91a237a6c293b71da2c542a4a000310c7b9740e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5487, "license_type": "no_license", "max_line_length": 183, "num_lines": 170, "path": "/2022/17-tetris.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import numpy as np\nimport copy\n\nlines = []\n\nfile = open(\"/home/rprcz974/workspace/AOC/2022/input\", \"r\")\nfor l in file.readlines():\n lines.append(l.rstrip())\n\npattern = lines[0]\npattern2 = \">>><<><>><<<>><>>><<<>>><<<><<<>><>><<>>\"\nprint(\"len of pattern:\", len(pattern))\n\n\nclass RockGenerator:\n def __init__(self):\n self.rocks_order = [\"minus\", \"plus\", \"Lshape\", \"line\", \"square\"]\n self.current_rock_index = 0\n self.rocks_order = [np.where(np.array(self._rock_name_to_shape(a)) == \"@\") for a in self.rocks_order]\n\n def _rock_name_to_shape(self, name):\n if name not in self.rocks_order:\n raise Exception(f\"{name} is not a rock\")\n\n if name == \"minus\":\n return [[\"\", \"\", \"\", \"@\", \"@\", \"@\", \"@\"]]\n elif name == \"plus\":\n return [\n [\"\", \"\", \"\", \".\", \"@\", \".\"],\n [\"\", \"\", \"\", \"@\", \"@\", \"@\"],\n [\"\", \"\", \"\", \".\", \"@\", \".\"]\n ]\n elif name == \"Lshape\":\n return [\n [\"\", \"\", \"\", \".\", \".\", \"@\"],\n [\"\", \"\", \"\", \".\", \".\", \"@\"],\n [\"\", \"\", \"\", \"@\", \"@\", \"@\"]\n ]\n elif name == \"line\":\n return [\n [\"\", \"\", \"\", \"@\"],\n [\"\", \"\", \"\", \"@\"],\n [\"\", \"\", \"\", \"@\"],\n [\"\", \"\", \"\", \"@\"] \n ]\n elif name == \"square\":\n return [\n [\"\", \"\", \"\", \"@\", \"@\"],\n [\"\", \"\", \"\", \"@\", \"@\"]\n ]\n\n def get_next_rock(self):\n rock_index_to_return = self.current_rock_index\n self.current_rock_index += 1\n self.current_rock_index = self.current_rock_index % len(self.rocks_order)\n return copy.deepcopy(self.rocks_order[rock_index_to_return])\n\nclass DirGen:\n def __init__(self, pattern):\n self.pattern = pattern\n self.current_index = 0\n\n def get_next_dir(self):\n dir = self.pattern[self.current_index]\n self.current_index = (self.current_index + 1) % len(self.pattern)\n return dir\n\nclass Tower:\n def __init__(self):\n self.dir_gen = DirGen(pattern=pattern)\n self.rock_gen = RockGenerator()\n self.grid = np.array([\n [True for i in range(9)]\n ])\n self.highest_point_height = 0\n self.grid_width = 7\n self.current_rock = None\n self.current_fall_count = None\n self.total_height = 0\n self.MAX_GRID_LEN = 1000\n self.REMOVABLE_LEN = 200\n self.journal = {}\n self.n_rocks = 0\n\n\n def process_state(self):\n \n key = (self.dir_gen.current_index, self.rock_gen.current_rock_index)\n value = self.n_rocks\n\n if key not in self.journal:\n self.journal[key] = {\"raw\": [value], \"dif\": []}\n else:\n self.journal[key][\"raw\"].append(value)\n self.journal[key][\"dif\"].append(value - self.journal[key][\"raw\"][-2])\n #if self.dir_gen.current_index < 4:\n #print(key, self.journal[key])\n\n def get_current_total_height(self):\n return self.total_height + self.highest_point_height\n\n def spawn_rock(self, rock):\n self.process_state()\n self.n_rocks += 1\n self.current_rock = list(rock)\n self.current_fall_count = 0\n empty_space = np.array([[True] + [False for i in range(7)] + [True] for _ in range(max(rock[0] + 1))])\n self.grid = np.concatenate((empty_space, self.grid), axis=0)\n\n\n def shift_rock(self):\n dir = self.dir_gen.get_next_dir()\n if dir == \"<\":\n shift_val = -1\n elif dir == \">\":\n shift_val = 1\n can_shift = True\n\n for i, j in zip(self.current_rock[0], self.current_rock[1]):\n if self.grid[i][j + shift_val]:\n can_shift = False\n\n if can_shift:\n self.current_rock[1] += shift_val\n\n def fall_rock(self):\n if self.current_fall_count == 0:\n for i in range(3):\n self.shift_rock()\n self.shift_rock()\n can_fall = True\n for i, j in zip(self.current_rock[0], self.current_rock[1]):\n if self.grid[i + 1][j]:\n can_fall = False\n if can_fall:\n self.current_rock[0] += 1\n self.current_fall_count += 1\n else:\n self.grid[self.current_rock[0], self.current_rock[1]] = True\n self.highest_point_height = max(self.highest_point_height - self.current_fall_count + max(self.current_rock[0]) - min(self.current_rock[0]) + 1, self.highest_point_height)\n self.grid = self.grid[len(self.grid) - (self.highest_point_height + 1):, :]\n if len(self.grid) > self.MAX_GRID_LEN:\n self.grid = self.grid[:-self.REMOVABLE_LEN, :]\n self.total_height += self.REMOVABLE_LEN\n self.highest_point_height -= self.REMOVABLE_LEN\n return can_fall\n\ntower = Tower()\n\nttt = 1000000000000\n\nchecks_inter = len(pattern) * 5\nmagic_number = 1695\njump_height = 2634\nother_magic_number = ttt % magic_number\nn_times = ttt // magic_number\nprint(other_magic_number, n_times)\n\nfirst_pass = (5 * magic_number) + other_magic_number\n\nfor i in range(first_pass):\n next_rock = tower.rock_gen.get_next_rock()\n tower.spawn_rock(next_rock)\n while tower.fall_rock():\n continue\n\nfirst_pass_height = tower.get_current_total_height()\n\nanswer = first_pass_height + ((n_times - 5) * jump_height)\nprint(\"answer:\", answer)" }, { "alpha_fraction": 0.26226291060447693, "alphanum_fraction": 0.27665141224861145, "avg_line_length": 13.166666984558105, "blob_id": "888137bd0309cacafc52508472369fa69d8e34ec", "content_id": "1a32751e7105cc5c9363ec91a10d9cdc3eb130d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1529, "license_type": "no_license", "max_line_length": 57, "num_lines": 108, "path": "/2022/5-piles.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nar = ar[10:]\n\nprint(ar[0])\na = [\" \",\n \"H\", \n \"M\", \n \"P\", \n \"Z\", ]\n\npiles = {\n 1: [\"R\",\n \"H\",\n \"M\",\n \"P\",\n \"Z\",],\n 2: [\n \"B\",\n \"J\",\n \"C\",\n \"P\",\n ],\n 3: [\n \"D\",\n \"C\",\n \"L\",\n \"G\",\n \"H\",\n \"N\",\n \"S\",\n ],\n 4: [\n \"L\",\n \"R\",\n \"S\",\n \"Q\",\n \"D\",\n \"M\",\n \"T\",\n \"F\",\n ],\n 5: [\"M\",\n \"Z\",\n \"T\",\n \"B\",\n \"Q\",\n \"P\",\n \"S\",\n \"F\",],\n 6: [\n \"G\",\n \"B\",\n \"Z\",\n \"S\",\n \"F\",\n \"T\",\n ],\n 7: [\n \"V\",\n \"R\",\n \"N\",\n ],\n 8: [\n \"M\",\n \"C\",\n \"V\",\n \"D\",\n \"T\",\n \"L\",\n \"G\",\n \"P\",\n ],\n 9: [\n \"L\",\n \"M\",\n \"F\",\n \"J\",\n \"N\",\n \"Q\",\n \"W\",\n ],\n}\n\nfor k, p in piles.items():\n piles[k] = p[::-1]\n\ndef text_to_nums(l):\n words = l.split(\" \")\n return int(words[1]), int(words[3]),int(words[5])\nprint(len(piles[9]))\n\nfor l in ar:\n move, fr, too = text_to_nums(l)\n print(l)\n #for n in range(move):\n # aa = piles[fr].pop()\n # piles[too] = piles[too] + [aa]\n stack = piles[fr][-move:]\n piles[fr] = piles[fr][:-move]\n piles[too] += stack\n print(piles)\n\nstrr = \"\"\nfor p in piles.values():\n strr += p[-1]\nprint(strr)" }, { "alpha_fraction": 0.5588458776473999, "alphanum_fraction": 0.5808656215667725, "avg_line_length": 24.823530197143555, "blob_id": "404e5e54e79a1f34cd35d02f5e51b66835dcb216", "content_id": "e154b981ab36d5f584ad41cb370f1d6bbf7f1b20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1317, "license_type": "no_license", "max_line_length": 92, "num_lines": 51, "path": "/old/2015/11.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from aocd import get_data\nar = get_data(day=11, year=2015)\nar = ar.splitlines()\nprint(ar)\n\ndef increment(password):\n new_pasword = list(password)\n for i, letter in enumerate(list(password[::-1])):\n if letter == \"z\":\n new_pasword[len(password) - 1 - i] = \"a\"\n else:\n new_pasword[len(password) - 1 - i] = chr(ord(letter) + 1)\n break\n return \"\".join(new_pasword)\n\n\ndef is_increasing_straight(password):\n for i, letter in enumerate(password[:-2]):\n if ord(letter) + 1 == ord(password[i+1]) and ord(letter) + 2 == ord(password[i+2]):\n return True\n return False\n\n\ndef isnt_confusing(password):\n return not(\"i\" in password or \"o\" in password or \"l\" in password)\n \ndef is_pair(password):\n pairs = []\n for i, letter in enumerate(password[:-1]):\n if letter == password[i + 1]:\n pairs.append((i, i+1))\n if len(pairs) < 2:\n return False\n if len(pairs) > 2:\n return True\n if pairs[0][1] != pairs[1][0]:\n return True\n return False\n\ndef part1(pwd):\n while not(is_increasing_straight(pwd)) or not(isnt_confusing(pwd)) or not(is_pair(pwd)):\n pwd = increment(pwd)\n print(pwd)\n\npart1(\"cqjxxyzz\")\n\ndef part2(pwd):\n pwd = increment(pwd)\n part1(pwd)\n\npart2(\"cqjxxyzz\")\n" }, { "alpha_fraction": 0.530434787273407, "alphanum_fraction": 0.5681159496307373, "avg_line_length": 32.41935348510742, "blob_id": "9548de02bc4a5987486b43ee7f333227313ca9dd", "content_id": "be48793506c37cdb45a41d1dae5d9705904a5742", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1035, "license_type": "no_license", "max_line_length": 98, "num_lines": 31, "path": "/old/2015/15.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import itertools\n\nthings = {\n 'Sprinkles': {\"capacity\": 5, \"durability\": -1, \"flavor\": 0, \"texture\": 0, \"calories\": 5},\n 'PeanutButter': {\"capacity\": -1, \"durability\": 3, \"flavor\": 0, \"texture\": 0, \"calories\": 1},\n 'Frosting': {\"capacity\": 0, \"durability\": -1, \"flavor\": 4, \"texture\": 0, \"calories\": 6},\n 'Sugar': {\"capacity\" :-1, \"durability\": 0, \"flavor\": 0, \"texture\": 2, \"calories\": 8}\n}\n\nthings = {\n \"Butterscotch\": {\"capacity\": -1, \"durability\": -2, \"flavor\": 6, \"texture\": 3, \"calories;\": 8},\n \"Cinnamon\": {\"capacity\": 2, \"durability\": 3, \"flavor\": -2, \"texture\": -1, \"calories;\": 3}\n}\n\ncombs = list(itertools.combinations(list(things.keys()), 100))\n\ndef get_score(ingrs):\n cap, dur, fla, tex, cal = 0, 0, 0, 0, 0\n\n for ing in ingrs:\n cap += things[ing][\"capacity\"]\n dur += things[ing][\"durability\"]\n fla += things[ing][\"flavor\"]\n tex += things[ing][\"texture\"]\n return cap * dur * fla * tex\n\nma = 0\nprint(len(combs))\nfor c in combs:\n ma = max(ma, get_score(c))\nprint(ma)" }, { "alpha_fraction": 0.4453781545162201, "alphanum_fraction": 0.4924369752407074, "avg_line_length": 26.045454025268555, "blob_id": "9f8d42927f15f22dc26a4b58fec8501d0bb65077", "content_id": "4cf0c69fd45856b657efcd7963e1960038c23984", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1190, "license_type": "no_license", "max_line_length": 60, "num_lines": 44, "path": "/2022/20-array-jumping.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nlines = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nlines2 = [\n \"1\",\n \"2\",\n \"-3\",\n \"3\",\n \"-2\",\n \"0\",\n \"4\",\n]\n\ndef part1():\n keyy = 811589153\n numbers = [int(l) * keyy for l in lines]\n numbers = [(i,n) for i, n in enumerate(numbers)]\n length = len(numbers)\n print([i[1] for i in numbers])\n for _ in range(10):\n for i in range(len(numbers)):\n current_idx = None\n for j, n in enumerate(numbers):\n if n[0] == i:\n current_idx = j\n break\n nnn = numbers.pop(current_idx)\n new_pos = (current_idx + nnn[1]) % (length - 1)\n numbers.insert(new_pos, nnn)\n #print(nnn)\n #print([i[1] for i in numbers])\n\n zero_idx = None\n for j, n in enumerate(numbers):\n if n[1] == 0:\n zero_idx = j\n break\n val_1k = numbers[(zero_idx + 1000) % len(numbers)][1]\n val_2k = numbers[(zero_idx + 2000) % len(numbers)][1]\n val_3k = numbers[(zero_idx + 3000) % len(numbers)][1]\n print(val_1k, val_2k, val_3k)\n print(val_1k + val_2k + val_3k)\n\npart1()\n" }, { "alpha_fraction": 0.5194610953330994, "alphanum_fraction": 0.5220808386802673, "avg_line_length": 26.275510787963867, "blob_id": "d74f5b10fc99a672de36f8d8252eff60571ec572", "content_id": "22764c29ab77778685ac0f215c02753018d6a7ad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2672, "license_type": "no_license", "max_line_length": 64, "num_lines": 98, "path": "/old/2021/day25-2021.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "def read_file(path):\n arr = []\n with open(path, \"r\") as file:\n for line in file:\n arr.append(line.rstrip())\n return arr\n\nar = read_file(\"input\")\n\n####################\n\n_map = []\nfor line in ar:\n _map.append(list(line))\nX_LEN = len(_map[0])\nY_LEN = len(_map)\n\nclass Position:\n def __init__(self, x, y):\n self.x = x\n self.y = y\n\nclass Step_executor():\n def __init__(self, _map):\n self._map = _map\n self.right_updates = {}\n self.down_updates = {}\n\n def access_map(self, pos):\n return self._map[pos.y][pos.x]\n \n def update_map_pos(self, pos, val):\n self._map[pos.y][pos.x] = val\n\n def pos_wrapper(self, pos: Position):\n x = pos.x\n if pos.x == X_LEN:\n pos.x = 0\n y = pos.y\n if pos.y == Y_LEN:\n pos.y = 0\n\n def update_right_position(self, pos: Position):\n if self.access_map(pos) == \">\":\n pos_to_check = Position(pos.x, pos.y)\n pos_to_check.x += 1\n self.pos_wrapper(pos_to_check)\n if self.access_map(pos_to_check) == \".\":\n self.right_updates[pos] = \".\"\n self.right_updates[pos_to_check] = \">\"\n \n def update_down_position(self, pos: Position):\n if self.access_map(pos) == \"v\":\n pos_to_check = Position(pos.x, pos.y)\n pos_to_check.y += 1\n self.pos_wrapper(pos_to_check)\n if self.access_map(pos_to_check) == \".\":\n self.down_updates[pos] = \".\"\n self.down_updates[pos_to_check] = \"v\"\n\n def update_map(self):\n for pos, val in self.right_updates.items():\n self.update_map_pos(pos, val)\n self.right_updates = {}\n for pos, val in self.down_updates.items():\n self.update_map_pos(pos, val)\n self.down_updates = {}\n\n\n def execute_step(self) -> bool:\n \n # move rights\n for y_, line in enumerate(_map):\n for x_, _ in enumerate(line):\n self.update_right_position(pos=Position(x_, y_))\n is_rights_moving = bool(self.right_updates)\n self.update_map()\n\n # move downs\n for y_, line in enumerate(_map):\n for x_, _ in enumerate(line):\n self.update_down_position(pos=Position(x_, y_))\n is_downs_moving = bool(self.down_updates)\n self.update_map()\n\n # return true if still any update\n return is_downs_moving or is_rights_moving\n\nexecutor = Step_executor(_map)\nis_moving = True\ncounter = 0\n\nwhile is_moving:\n is_moving = executor.execute_step()\n counter += 1\n print(counter)\n\nprint(counter)" }, { "alpha_fraction": 0.3885236084461212, "alphanum_fraction": 0.4285714328289032, "avg_line_length": 18.69411849975586, "blob_id": "7e7bcaa2bedde574f49fa2e111f62abe312a649d", "content_id": "07301d42cd08957940575b745b62152695d3a050", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1673, "license_type": "no_license", "max_line_length": 57, "num_lines": 85, "path": "/2022/10-cpu-instructions.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\n\ndef part1():\n answer = 0\n cycle = 1\n X = 1\n addx = False\n value = 0\n instruction = None\n\n while cycle < 300:\n if not ar:\n break\n if (cycle - 20) % 40 == 0:\n answer += cycle * X\n print(instruction)\n print(addx)\n print(cycle, X)\n cycle += 1\n if not addx:\n instruction = ar.pop(0)\n else:\n X += int(value)\n addx = False\n \n continue\n if instruction == \"noop\":\n \n continue\n else:\n value = instruction.split(\" \")[1]\n addx = True\n \n print(answer)\n\n#part1()\n\n\ndef is_lit(cycle, X):\n return abs(((cycle - 1) % 40) - X) <= 1\n\ndef part2():\n answer = 0\n cycle = 1\n X = 1\n addx = False\n value = 0\n instruction = None\n\n CTR = []\n\n while cycle < 300:\n if not ar:\n break\n if is_lit(cycle, X):\n CTR.append(\"#\")\n else:\n CTR.append(\".\")\n \n cycle += 1\n if not addx:\n instruction = ar.pop(0)\n else:\n X += int(value)\n addx = False\n \n continue\n if instruction == \"noop\":\n \n continue\n else:\n value = instruction.split(\" \")[1]\n addx = True\n \n print(\"\".join(CTR[0:40]))\n print(\"\".join(CTR[40:80]))\n print(\"\".join(CTR[80:120]))\n print(\"\".join(CTR[120:160]))\n print(\"\".join(CTR[160:200]))\n print(\"\".join(CTR[200:240]))\n\npart2()" }, { "alpha_fraction": 0.4010569453239441, "alphanum_fraction": 0.4556664824485779, "avg_line_length": 21.706666946411133, "blob_id": "2d8f2dd03c994f2e5dabe8b618233eda2b49e391", "content_id": "f4075f734a2ea94be4d69936e2b8512615bac696", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1703, "license_type": "no_license", "max_line_length": 71, "num_lines": 75, "path": "/2022/8-grid.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from aocd import get_data\nar = get_data(day=8, year=2022)\nar = ar.splitlines()\n\nar2 = [ \"30373\",\n \"25512\",\n \"65332\",\n \"33549\",\n \"35390\",\n]\n\ngrid = [list(a) for a in ar]\n\ndef is_visible(x, y):\n if x == 0 or x == len(grid) - 1 or y == 0 or y == len(grid[0]) - 1:\n return True\n value = grid[x][y]\n v1, v2, v3, v4 = True, True, True, True\n for i in range(x):\n if grid[i][y] >= value:\n v1 = False\n for i in range(x+1, len(grid)):\n if grid[i][y] >= value:\n v2 = False\n for i in range(y):\n if grid[x][i] >= value:\n v3 = False\n for i in range(y+1, len(grid[0])):\n if grid[x][i] >= value:\n v4 = False\n return v1 or v2 or v3 or v4\n\ncount = 0\nfor i, _ in enumerate(grid):\n for j, _ in enumerate(grid[0]):\n count += is_visible(i, j)\n\ndef part2(x, y):\n if x == 0 or x == len(grid) - 1 or y == 0 or y == len(grid[0]) - 1:\n return 0\n value = grid[x][y]\n v1, v2, v3, v4 = x, len(grid) - x - 1, y, len(grid) - y - 1\n\n for i in range(x-1, 0, -1):\n if grid[i][y] >= value:\n v1 = x-i\n break\n\n for i in range(x+1, len(grid)):\n if grid[i][y] >= value:\n v2 = i-x\n break\n\n for i in range(y-1, 0, -1):\n if grid[x][i] >= value:\n v3 = y-i\n break\n\n for i in range(y+1, len(grid[0])):\n if grid[x][i] >= value:\n v4 = i-y\n break\n\n return v1 * v2 * v3 * v4\n\n#print(part2(1, 2))\n#print(part2(3, 2))\n\nmx = 0\nfor i, _ in enumerate(grid):\n for j, _ in enumerate(grid[0]):\n print(part2(i, j))\n mx = max(mx, part2(i, j))\n\nprint(mx)\n" }, { "alpha_fraction": 0.4973684251308441, "alphanum_fraction": 0.5189473628997803, "avg_line_length": 22.469135284423828, "blob_id": "3d5452e41843279c545787b004caea0838e92848", "content_id": "a43076a2a8892ab4bd19fc2aa1a1030d811373c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1900, "license_type": "no_license", "max_line_length": 74, "num_lines": 81, "path": "/2022/13-compare-pairs.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from utils import read_file\nimport functools\nfrom natsort import natsorted \nimport copy\nimport json\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\nar2 = read_file(\"/home/fabrice/advent-of-code/2022/input_sample.txt\")\n\n\npairs = []\ncur_pair = []\nfor line in ar:\n if line != \"\":\n cur_pair.append(json.loads(line))\n else:\n pairs.append(copy.deepcopy(cur_pair))\n cur_pair = []\n\npairs2 = [\n [[],\n [3]]\n]\n\ndef compare(left, right):\n if isinstance(left, int) and isinstance(right, int):\n if left < right:\n return -1\n elif left > right:\n return 1\n else:\n return \"continue\"\n elif isinstance(left, list) and isinstance(right, list):\n \n for i, el in enumerate(right):\n if i > len(left) - 1:\n return -1\n res = compare(left[i], el)\n if res == \"continue\":\n continue\n else:\n return res\n\n if len(right) > len(left):\n return -1\n elif len(right) < len(left):\n return 1\n else:\n return \"continue\"\n\n elif isinstance(left, int) and isinstance(right, list):\n return compare([left], right)\n elif isinstance(left, list) and isinstance(right, int):\n return compare(left, [right])\n\ndef part1():\n c = 0\n for i, p in enumerate(pairs):\n #print(\"comparing\", p[0], p[1])\n if compare(p[0], p[1]) == -1:\n #print(p)\n #print(i + 1)\n c += i + 1\n #if compare(p[0], p[1]) == \"continue\":\n # print(\"WTF\", p[0], p[1])\n print(c)\n\npart1()\n\n\ndef part2(pairs):\n pairs = [p[0] for p in pairs] + [p[1] for p in pairs] + [[[2]], [[6]]]\n\n pairs.sort(key=functools.cmp_to_key(compare))\n \n\n\n print((pairs.index([[2]]) + 1) * (pairs.index([[6]]) + 1))\n #print(str_pairs)\n\npart2(pairs)" }, { "alpha_fraction": 0.5199999809265137, "alphanum_fraction": 0.5199999809265137, "avg_line_length": 23.5, "blob_id": "9b86101d37d675f547d482276ace298446fba1c0", "content_id": "8f54cabb26ca4bac605d57914e902be84e483b96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 150, "license_type": "no_license", "max_line_length": 37, "num_lines": 6, "path": "/2022/utils.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "def read_file(path):\n arr = []\n with open(path, \"r\") as file:\n for line in file:\n arr.append(line.rstrip())\n return arr\n\n\n\n" }, { "alpha_fraction": 0.5295472145080566, "alphanum_fraction": 0.6047582626342773, "avg_line_length": 15.922078132629395, "blob_id": "65c4fa753b28059797fdf440a498f08e5b136bb4", "content_id": "bf7d0ecd3df894f187a4c3bba225713387558721", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1303, "license_type": "no_license", "max_line_length": 28, "num_lines": 77, "path": "/2022/trans22.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "# face 1\ndef face1_left_trans(pos):\n return (49 - pos[0], 0)\n\ndef face1_right_trans(pos):\n return (pos[0], 0)\n\ndef face1_up_trans(pos):\n return (pos[1], 0)\n\ndef face1_down_trans(pos):\n return (0, pos[1])\n\n# face 2\ndef face2_left_trans(pos):\n return (pos[0], 49)\n\ndef face2_right_trans(pos):\n return (49 - pos[0], 49)\n\ndef face2_up_trans(pos):\n return (49, pos[1])\n\ndef face2_down_trans(pos):\n return (pos[1], 49)\n\n# face 3\ndef face3_left_trans(pos):\n return (0, pos[0])\n\ndef face3_right_trans(pos):\n return (49, pos[0])\n\ndef face3_up_trans(pos):\n return (49, pos[1])\n\ndef face3_down_trans(pos):\n return (0, pos[1])\n\n# face 4\ndef face4_left_trans(pos):\n return (49 - pos[0], 0)\n\ndef face4_right_trans(pos):\n return (pos[0], 0)\n\ndef face4_up_trans(pos):\n return (pos[1], 0)\n\ndef face4_down_trans(pos):\n return (0, pos[1])\n\n# face 5\ndef face5_left_trans(pos):\n return (pos[0], 49)\n\ndef face5_right_trans(pos):\n return (49 - pos[0], 49)\n\ndef face5_up_trans(pos):\n return (49, pos[1])\n\ndef face5_down_trans(pos):\n return (pos[1], 49)\n\n# face 6\ndef face6_left_trans(pos):\n return (0, pos[0])\n\ndef face6_right_trans(pos):\n return (49, pos[0])\n\ndef face6_up_trans(pos):\n return (49, pos[1])\n\ndef face6_down_trans(pos):\n return (0, pos[1])\n" }, { "alpha_fraction": 0.5088785290718079, "alphanum_fraction": 0.5247663259506226, "avg_line_length": 23.329545974731445, "blob_id": "3147cfe35736927454866d985468ffa42c3eb524", "content_id": "e612eca4afd7a4c2e6e840f9c01b2b92229aab5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2140, "license_type": "no_license", "max_line_length": 103, "num_lines": 88, "path": "/old/2021/day15-2021.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import heapq\nimport copy\n\ndef read_file(path):\n arr = []\n with open(path, \"r\") as file:\n for line in file:\n arr.append(line.rstrip())\n return arr\n\nar = read_file(\"input\")\n\n_map = []\nfor line in ar:\n _map.append([int(a) for a in line])\n\ndef increment_map(a_map):\n new_map = copy.deepcopy(a_map)\n for y, line in enumerate(new_map):\n for x, el in enumerate(line):\n new_map[y][x] = int(el) + 1\n if el == 9:\n new_map[y][x] = 1\n return new_map\n\ndef extend_map():\n # right extend\n new_map = copy.deepcopy(_map)\n inc_map = copy.deepcopy(_map)\n for i in range(4):\n inc_map = increment_map(inc_map)\n for l, line in enumerate(new_map):\n line += copy.deepcopy(inc_map[l])\n \n # down extend\n inc_map = copy.deepcopy(new_map)\n for i in range(4):\n inc_map = increment_map(inc_map)\n new_map += copy.deepcopy(inc_map)\n return new_map\n\n_map = extend_map()\n#for l in _map:\n# print(l)\n\nX_LEN = len(_map[0])\nY_LEN = len(_map)\n\ndef get_neighbors(x, y):\n neigs = []\n if x != 0:\n neigs.append((x - 1, y))\n if x != X_LEN - 1:\n neigs.append((x +1, y))\n if y != 0:\n neigs.append((x, y - 1))\n if y != Y_LEN - 1:\n neigs.append((x, y + 1))\n return neigs\n\nprint(\"starting disjktra\")\n\ndef disjktra():\n \n start = (0,0)\n seen = {start}\n target = (X_LEN-1, Y_LEN-1)\n dist = {start: 0}\n nodes_to_visit = [(_map[0][0], start)]\n heapq.heapify(nodes_to_visit)\n iter = 0\n while nodes_to_visit:\n current_node = heapq.heappop(nodes_to_visit)\n dist[current_node[1]] = int(current_node[0])\n if current_node[1] == target:\n break\n \n neigs = get_neighbors(current_node[1][0], current_node[1][1])\n for n in neigs:\n if n not in seen:\n dist[n] = dist[current_node[1]]\n seen.add(n)\n heapq.heappush(nodes_to_visit, (int(dist[current_node[1]]) + int(_map[n[1]][n[0]]), n))\n iter += 1\n print(iter)\n print(dist[target] - dist[start])\n\ndisjktra()" }, { "alpha_fraction": 0.4589322507381439, "alphanum_fraction": 0.47467488050460815, "avg_line_length": 28.525253295898438, "blob_id": "2699a5e0a1f2eba9336575e72a22230fd5bb063d", "content_id": "049ea458de3d84952d2f8c7c63cf22fd46c77c42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2922, "license_type": "no_license", "max_line_length": 104, "num_lines": 99, "path": "/2022/11-monkeys.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "from aocd import get_data\nar = get_data(day=11, year=2022)\nar = ar.splitlines()\n\n#print(ar)\n\nfrom utils import read_file\n\n#ar = read_file(\"/home/fabrice/advent-of-code/2022/test_ex\")\n\n#print(ar)\nar.append(\" \")\n\n\nclass Monkey:\n def __init__(self, name, starting_items, operation, test, if_true, if_false):\n self.name = name\n self.items = starting_items\n self.operation = operation\n self.test = test\n self.if_true = if_true\n self.if_false = if_false\n self.n_processed = 0\n\n def inspect(self):\n to_give = []\n while self.items:\n item = self.items.pop(0)\n self.n_processed += 1\n if self.operation.split(\" \")[1] == \"old\":\n bb = item\n else:\n bb = int(self.operation.split(\" \")[1])\n if self.operation[0] == \"*\":\n item *= bb\n #is_test_true = (item * bb) % int(self.test) == 0\n if self.operation[0] == \"+\":\n item += bb\n #is_test_true = ((item) % int(self.test) + (bb) % int(self.test)) % int(self.test) == 0\n #item += bb\n #item = item // 3\n\n if (item) % int(self.test) == 0:\n to_give.append([self.if_true, item])\n else:\n to_give.append([self.if_false, item])\n return to_give\n\n\ndef text_to_monkeys(text):\n monks = []\n name, items, op, test, if_true, if_false = \"\", \"\", \"\", \"\", \"\", \"\"\n for l in text:\n\n \n if l.startswith(\"Monkey\"):\n name = l.split(\" \")[1][0]\n elif l.startswith(\" Starting items\"):\n items = [int(a[:-1]) for a in l.split(\" \")[4:-1]] + [int(l.split(\" \")[-1])]\n elif l.startswith(\" Operation:\"):\n op = l[23:]\n elif l.startswith(\" Test: \"):\n test = l.split(\":\")[1].split(\" \")[3]\n elif l.startswith(\" If true:\"):\n if_true = l.split(\":\")[1].split(\" \")[4]\n elif l.startswith(\" If false:\"):\n if_false = l.split(\":\")[1].split(\" \")[4]\n else:\n monks.append(Monkey(name, items, op, test, if_true, if_false))\n return monks\n\ndef part1():\n ans = 0\n monks = text_to_monkeys(ar)\n\n big_mama = 1\n for moo in monks:\n big_mama *= int(moo.test)\n\n for _ in range(10000):\n for monk in monks:\n #print(\"monkey\", monk.name)\n #print(\"items after\", monk.items)\n to_give = monk.inspect()\n #print(\" operation\", monk.operation)\n #print(\"test\", monk.test)\n #print(\"to_give\", to_give)\n for mm, it in to_give:\n for m in monks:\n if m.name == mm:\n m.items.append(it % int(big_mama))\n ns = []\n for m in monks:\n print(m.n_processed)\n ns.append(m.n_processed)\n ns.sort()\n print(ns[-1] * ns[-2])\n\npart1()" }, { "alpha_fraction": 0.4285328984260559, "alphanum_fraction": 0.47761595249176025, "avg_line_length": 27.96875, "blob_id": "db124debbefa793e273241b1dbdc2b59ca7b0803", "content_id": "c819288be31104e393ecfe684c5976bd01833eef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3708, "license_type": "no_license", "max_line_length": 120, "num_lines": 128, "path": "/2022/15-merge-intervals.py", "repo_name": "FabriceCh/advent-of-code", "src_encoding": "UTF-8", "text": "import math\nfrom utils import read_file\n\nar = read_file(\"/home/fabrice/advent-of-code/2022/input\")\n\nar2 = [\n \"2,18 -2,15\", \n \"9,16 10,16\", \n \"13,2 15,3\", \n \"12,14 10,16\", \n \"10,20 10,16\", \n \"14,17 10,16\", \n \"8,7 2,10\", \n \"2,0 2,10\", \n \"0,11 2,10\", \n \"20,14 25,17\", \n \"17,20 21,22\", \n \"16,7 15,3\", \n \"14,3 15,3\", \n \"20,1 15,3\", \n\n]\n\ndef man_dist(sensor, pos):\n rel_pos = [abs(pp - ss) for ss, pp in zip(sensor, pos)]\n return sum(rel_pos)\n\ndef build_map(ar):\n sensors = {}\n abs_beacs = []\n for line in ar:\n blocks = line.split(\" \")\n sens_pos = [int(a) for a in blocks[0].split(\",\")]\n beac_pos = [int(a) for a in blocks[1].split(\",\")]\n sensors[tuple(sens_pos)] = {\"b\": beac_pos, \"r\": man_dist(sens_pos, beac_pos)}\n abs_beacs.append(tuple(beac_pos))\n return sensors, abs_beacs\n \ndef merge_pass(intervals):\n for i, ia in enumerate(intervals):\n for j, ib in enumerate(intervals):\n if j != i:\n if ia[0] >= ib[0] and ia[0] <= ib[1]:\n intervals.pop(max(i, j))\n intervals.pop(min(i, j))\n #print([min(ia[0], ib[0]), max(ia[1], ib[1])])\n intervals.append([min(ia[0], ib[0]), max(ia[1], ib[1])])\n return intervals\n elif ia[1] >= ib[0] and ia[1] <= ib[1]:\n intervals.pop(max(i, j))\n intervals.pop(min(i, j))\n #print([min(ia[0], ib[0]), max(ia[1], ib[1])])\n intervals.append([min(ia[0], ib[0]), max(ia[1], ib[1])])\n return intervals\n return intervals\n\ndef merge_intervals(intervals):\n changed = True\n #print(intervals)\n while changed:\n old_len = len(intervals)\n intervals = sorted(intervals, key=lambda x: x[0])\n #print(intervals)\n intervals = merge_pass(intervals)\n new_len = len(intervals)\n changed = old_len != new_len\n #print(changed)\n return intervals\n\ndef part1():\n sensors, abs_beacs = build_map(ar)\n \n \n lj = 2000000\n lj = 10\n intervals = []\n for s, dd in sensors.items():\n dist_from_line = abs(lj - s[1])\n #print(s)\n #print([s[0] - (sensors[s][\"r\"] - dist_from_line), s[0] + (sensors[s][\"r\"] - dist_from_line)])\n if dist_from_line <= sensors[s][\"r\"]:\n intervals.append([s[0] - (sensors[s][\"r\"] - dist_from_line), s[0] + (sensors[s][\"r\"] - dist_from_line)])\n \n\n\n \n intervals = merge_intervals(intervals)\n\n c = 1\n for inter in intervals:\n c += abs(inter[1] - inter[0])\n #print(\"before beacons:\", c)\n\n for b in set(abs_beacs):\n if b[1] == lj:\n c -= 1\n #print(b)\n continue\n\n print(c)\n\ndef part2():\n sensors, abs_beacs = build_map(ar)\n all_intervals = []\n for cur_i in range(4000000):\n #for cur_i in range(20):\n if cur_i % 100000 == 0:\n print(cur_i)\n intervals = []\n for s in sensors.keys():\n dist_from_line = abs(cur_i - s[1])\n if dist_from_line <= sensors[s][\"r\"]:\n intervals.append([s[0] - (sensors[s][\"r\"] - dist_from_line), s[0] + (sensors[s][\"r\"] - dist_from_line)])\n \n intervals = merge_intervals(intervals)\n\n if len(intervals) > 1:\n for ind in range(len(intervals)-1):\n if intervals[ind][1] != intervals[ind + 1][0] - 1:\n\n xxx = intervals[ind][1] + 1\n yyy = cur_i\n print(xxx*4000000 + yyy)\n return\n\n\n#part1()\npart2()\n" } ]
35
LauraReynosa/Reto-18
https://github.com/LauraReynosa/Reto-18
09c8a925595de4a35f865765eb038ce003483da9
583252a450c32d8fd11972473edac5ad4895b7df
43db24e6c31368b5da3d3aee258bf7b845b60d87
refs/heads/master
2021-07-25T22:03:09.999049
2017-11-08T04:33:17
2017-11-08T04:33:17
109,902,482
0
0
null
2017-11-07T23:19:34
2017-11-07T04:25:44
2017-11-07T04:25:44
null
[ { "alpha_fraction": 0.580106794834137, "alphanum_fraction": 0.5874499082565308, "avg_line_length": 31.565217971801758, "blob_id": "7c7f15ee756005af05d6da52e712f9c9f1e2189f", "content_id": "b3a6631a55918d469fafadea8a6fea084bf90801", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1498, "license_type": "no_license", "max_line_length": 73, "num_lines": 46, "path": "/test_gun.py", "repo_name": "LauraReynosa/Reto-18", "src_encoding": "UTF-8", "text": "from unittest import TestCase\nfrom main import Gun\nimport sys\nsys.tracebacklimit = 0\n\nclass TestGun(TestCase):\n def setUp(self):\n print(self._testMethodDoc)\n self.gun = Gun(5)\n\n def test_lock(self):\n \"\"\"-- Test Gun Lock\"\"\"\n msg = \"The gun is not lock\"\n self.gun.lock()\n self.assertIs(self.gun.isLock, True, msg = msg)\n\n def test_unlock(self):\n \"\"\"-- Test Gun is Unlock\"\"\"\n msg = \"The gun is lock\"\n self.gun.unlock()\n self.assertIs(self.gun.isLock, False, msg = msg)\n\n def test_isLock(self):\n \"\"\"-- Test Calling is Lock\"\"\"\n msg = \"Problem calling isLock\"\n self.assertIsNotNone(self.gun.isLock, msg = msg)\n\n def test_shoot(self):\n \"\"\"-- Test Shooting\"\"\"\n msg = \"The shooting is failing\"\n self.gun.lock()\n self.assertIsNone(self.gun.shoot(), msg = msg)\n self.gun.unlock()\n self.assertIsNone(self.gun.shoot(), msg = msg)\n self.gun.reload(5)\n self.gun.shoot()\n self.assertEqual(self.gun.reload(2), 1, msg = msg)\n\n def test_reload(self):\n \"\"\"-- Test Reloading \"\"\"\n msg = \"Failed reloading\"\n self.assertIsNone(self.gun.reload(-5), msg = msg)\n self.assertNotIsInstance(self.gun.reload(5.5), int, msg = msg)\n self.assertNotIsInstance(self.gun.reload(\"five\"), int, msg = msg)\n self.assertIsInstance(self.gun.reload(4), int, msg = msg)\n self.assertEqual(self.gun.reload(5), 4, msg = msg)\n" } ]
1
HarrisonBoyns/flask-sql-alchemy
https://github.com/HarrisonBoyns/flask-sql-alchemy
e868159db81602541a324b0b7a8dd5c720fb2698
00470a797976a24a09bac7f35dc91e289be3ad2b
f393190a0751a45523205e531cf5275df503348e
refs/heads/main
2023-03-31T01:12:35.572135
2021-04-09T14:06:21
2021-04-09T14:06:21
355,941,563
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 28.200000762939453, "blob_id": "97b867599ebc44840a04379a18fc00163c56e42e", "content_id": "dbf57bb48567493415a9c6ab81fc32b659ef41a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 145, "license_type": "no_license", "max_line_length": 44, "num_lines": 5, "path": "/app/database/db.py", "repo_name": "HarrisonBoyns/flask-sql-alchemy", "src_encoding": "UTF-8", "text": "from flask_sqlalchemy import SQLAlchemy\n\ndb = SQLAlchemy()\n# maps these objects into rows of a database\n# therefore creates classes from objects" }, { "alpha_fraction": 0.7613636255264282, "alphanum_fraction": 0.7613636255264282, "avg_line_length": 30.225807189941406, "blob_id": "f429381f3e554236bf483176f2742721c8f2d19d", "content_id": "870102690c3acb943d1b1260c59503cf7ebadffa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 968, "license_type": "no_license", "max_line_length": 78, "num_lines": 31, "path": "/app/__init__.py", "repo_name": "HarrisonBoyns/flask-sql-alchemy", "src_encoding": "UTF-8", "text": "from flask import Flask\nfrom flask_jwt import JWT\nfrom flask_restful import Api\n\nfrom app.resources.store import StoreList, Store\nfrom app.security.security import identity, authenticate\nfrom app.resources.user import UserRegister\nfrom app.resources.items import Items, ItemList\nfrom app.database.db import db\n\n# this __init__ file basically lets one import files\napp = Flask(__name__)\n\ndb.init_app(app)\n\[email protected]_first_request\ndef create_tables():\n db.create_all()\n\napp.secret_key = \"top_secret\"\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config[\"SQLALCHEMY_DATABASE_URI\"] = 'sqlite:///data.db'\napp.config['PROPAGATE_EXCEPTIONS'] = True\napi = Api(app)\njwt = JWT(app, authentication_handler=authenticate, identity_handler=identity)\n\napi.add_resource(Items, \"/items/<string:name>\")\napi.add_resource(ItemList, \"/items\")\napi.add_resource(UserRegister, \"/signup\")\napi.add_resource(Store, \"/stores/<string:name>\")\napi.add_resource(StoreList, \"/stores\")\n" } ]
2
nagyist/mil-tokyo.webdnn
https://github.com/nagyist/mil-tokyo.webdnn
7f7422c28416a51a0f45bff197d5a3c28c64d473
92c0a6c4f330b1055c5402811702ec4b75006d59
249309eaf470a817cb22aebffec1944272f218c4
refs/heads/master
2022-12-25T04:52:09.493658
2022-10-19T08:24:41
2022-10-19T08:24:41
93,415,003
0
0
NOASSERTION
2017-06-05T14:53:30
2017-06-05T14:53:55
2022-12-17T09:03:08
Python
[ { "alpha_fraction": 0.6933333277702332, "alphanum_fraction": 0.7200000286102295, "avg_line_length": 18.736841201782227, "blob_id": "f4579e46a72c7edc6ca5d65ce67edb713d258143", "content_id": "c0a3614b90882577750d2938d8cdf2ee2a36c44f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 375, "license_type": "permissive", "max_line_length": 118, "num_lines": 19, "path": "/example/detr/README.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Object detection using DETR\n\nPyTorch (`>=1.7`) is required to run the Python script.\n\n# Operation procedure\n## Convert PyTorch model into ONNX model\n```\npython conversion.py\n```\n\n## Run on a web browser\n\nAt repository root, execute\n\n```\nyarn server\n```\n\nWith this running, open [http://localhost:8080/example/detr/](http://localhost:8080/example/detr/) with a web browser.\n" }, { "alpha_fraction": 0.5978456139564514, "alphanum_fraction": 0.6014362573623657, "avg_line_length": 23.217391967773438, "blob_id": "2778d9c181721e0b578bb03286dd337587c01432", "content_id": "31d473be0c2de0ee10add5095754f38d622b7955", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 557, "license_type": "permissive", "max_line_length": 87, "num_lines": 23, "path": "/example/minimum/make_model.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import os\nimport torch\nimport torch.nn.functional as F\nimport numpy as np\n\nclass MyModel(torch.nn.Module):\n def __init__(self):\n super().__init__()\n \n def forward(self, x):\n h = F.relu(x)\n return h\n\ndef main():\n output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"model\")\n os.makedirs(output_dir, exist_ok=True)\n\n model = MyModel()\n example_input = torch.zeros((2, 3))\n torch.onnx.export(model, (example_input, ), os.path.join(output_dir, \"model.onnx\"))\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7245989441871643, "alphanum_fraction": 0.7272727489471436, "avg_line_length": 27.769229888916016, "blob_id": "719327e8faad24aff4b62e3b2ffa51302531d2d6", "content_id": "7c807cefcf65ce3af7742b55303e52e12c8cfa5d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 374, "license_type": "permissive", "max_line_length": 75, "num_lines": 13, "path": "/src/graph_transpiler/webdnn/parse_onnx.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import numpy as np\nimport onnx\nfrom webdnn.model import Model, Graph, Variable, Operator, ConstantVariable\n\ndef _parse_node(node: onnx.NodeProto):\n pass\n\ndef _parse_graph(graph: onnx.GraphProto) -> Graph:\n pass\n\ndef parse_onnx(model: onnx.ModelProto) -> Model:\n opset_import_version = model.opset_import[0].version # type: int\n graph = _parse_graph(model.graph)\n" }, { "alpha_fraction": 0.6214689016342163, "alphanum_fraction": 0.6258148550987244, "avg_line_length": 24.853933334350586, "blob_id": "1e72ea7d9eb61581c4a76e6ff6dba5c8f9e47f35", "content_id": "16a5f158cfc1325e644ca5959f8821a5db3d1018", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2301, "license_type": "permissive", "max_line_length": 72, "num_lines": 89, "path": "/src/descriptor_runner/backend/cpu/cpuTensorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n DataArrayConstructor,\n DataArrayTypes,\n DataType,\n} from \"../../interface/core/constants\";\nimport { TensorImpl } from \"../../core/tensorImpl\";\nimport { CPUTensor } from \"../../interface/backend/cpu/cpuTensor\";\nimport { WebDNNLogging } from \"../../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.CPUTensorImpl\");\n\nlet perfTotalMemory = 0;\n\nexport class CPUTensorImpl extends TensorImpl implements CPUTensor {\n data: DataArrayTypes;\n useExternalBuffer: boolean;\n\n constructor(\n dims: ReadonlyArray<number>,\n dataType: DataType = \"float32\",\n data?: DataArrayTypes\n ) {\n super(dims, dataType, \"cpu\");\n this.data = data || new DataArrayConstructor[dataType](this.length);\n if (data) {\n this.useExternalBuffer = true;\n logger.debug(\"CPU memory use existing buffer\", {\n size: this.data.byteLength,\n total: perfTotalMemory,\n });\n } else {\n this.useExternalBuffer = false;\n perfTotalMemory += this.data.byteLength;\n logger.debug(\"CPU memory allocation\", {\n size: this.data.byteLength,\n total: perfTotalMemory,\n });\n }\n }\n\n async getData(): Promise<DataArrayTypes> {\n return this.data;\n }\n\n async setData(data: DataArrayTypes): Promise<void> {\n this.data.set(data);\n }\n\n dispose(): void {\n if (!this.useExternalBuffer) {\n perfTotalMemory -= this.data.byteLength;\n }\n logger.debug(\"CPU memory free\", {\n size: this.data.byteLength,\n total: perfTotalMemory,\n });\n this.data = new Float32Array(1);\n }\n\n static isCPUTensor(tensor: TensorImpl): tensor is CPUTensorImpl {\n return tensor.backend === \"cpu\";\n }\n\n getDataSync(): DataArrayTypes {\n return this.data;\n }\n\n getValue(idxs: number[]): number {\n if (idxs.length !== this.ndim) {\n throw new Error(\"length of idxs does not match tensor.ndim\");\n }\n let ofs = 0;\n for (let i = 0; i < this.ndim; i++) {\n ofs += this.strides[i] * idxs[i];\n }\n return this.data[ofs];\n }\n\n setValue(value: number, idxs: number[]): void {\n if (idxs.length !== this.ndim) {\n throw new Error(\"length of idxs does not match tensor.ndim\");\n }\n let ofs = 0;\n for (let i = 0; i < this.ndim; i++) {\n ofs += this.strides[i] * idxs[i];\n }\n this.data[ofs] = value;\n }\n}\n" }, { "alpha_fraction": 0.675000011920929, "alphanum_fraction": 0.675000011920929, "avg_line_length": 25.66666603088379, "blob_id": "0a3bdf121ac2c9fc663d6d469b08effa55959bc3", "content_id": "361e0a82ddc11910e6c8ff3d2890297be466619f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 80, "license_type": "permissive", "max_line_length": 43, "num_lines": 3, "path": "/src/descriptor_runner/interface/backend/webgpu/webgpuTensor.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Tensor } from \"../../core/tensor\";\n\nexport type WebGPUTensor = Tensor;\n" }, { "alpha_fraction": 0.7405462265014648, "alphanum_fraction": 0.7452731132507324, "avg_line_length": 44.30952453613281, "blob_id": "6dd6bbde5c9463211d34a2954f8ee9b2d9a8d498", "content_id": "ab36712e7ffbbe45fe93dce0c03749a5b27f02c3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1904, "license_type": "permissive", "max_line_length": 138, "num_lines": 42, "path": "/src/graph_transpiler/webdnn/passes.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "\nfrom typing import List\nimport onnx\nfrom webdnn.pass_fusion_unary_cpu import PassFusionUnaryCPU\n\nfrom webdnn.optimization_pass import OptimizationPass, OptimizationPassResult\nfrom webdnn.optimization_pass_result_cpu import OptimizationPassResultCPU\nfrom webdnn.pass_fusion_unary_wasm import PassFusionUnaryWasm\nfrom webdnn.optimization_pass_result_wasm import OptimizationPassResultWasm\nfrom webdnn.pass_fusion_unary_webgl import PassFusionUnaryWebGL\nfrom webdnn.pass_matmul_transpose_webgl2 import PassMatMulTransposeWebGL2\nfrom webdnn.pass_conv_reshape_webgl import PassConvReshapeWebGL\nfrom webdnn.optimization_pass_result_webgl import OptimizationPassResultWebGL\n\ndef make_backend_passes(backend: str) -> List[OptimizationPass]:\n if backend == \"cpu\":\n return [PassFusionUnaryCPU()]\n elif backend == \"wasm\":\n return [PassFusionUnaryWasm()]\n elif backend.startswith(\"webgl1\"):\n max_texture_size = int(backend.split(\"-\")[1])\n return [PassFusionUnaryWebGL(), PassConvReshapeWebGL(webgl2=False, max_texture_size=max_texture_size)]\n elif backend.startswith(\"webgl2\"):\n max_texture_size = int(backend.split(\"-\")[1])\n return [PassFusionUnaryWebGL(), PassConvReshapeWebGL(webgl2=True, max_texture_size=max_texture_size), PassMatMulTransposeWebGL2()]\n else:\n raise ValueError\n\ndef run_passes(model: onnx.ModelProto, backend: str) -> OptimizationPassResult:\n passes = make_backend_passes(backend)\n if backend == \"cpu\":\n result_merged = OptimizationPassResultCPU()\n elif backend == \"wasm\":\n result_merged = OptimizationPassResultWasm()\n elif backend.startswith(\"webgl\"):\n result_merged = OptimizationPassResultWebGL()\n else:\n raise NotImplementedError\n for p in passes:\n result = p.optimize(model)\n if result is not None:\n result_merged.merge(result)\n return result_merged\n" }, { "alpha_fraction": 0.730053186416626, "alphanum_fraction": 0.7482269406318665, "avg_line_length": 27.556962966918945, "blob_id": "0b9663cc93817c080fe8fe131d3fe2fbc363a65b", "content_id": "ce040ebed2f60cdb93d44aa7246416eed7266ed7", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2262, "license_type": "permissive", "max_line_length": 304, "num_lines": 79, "path": "/README.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# WebDNN\n\n[日本語](README.ja.md)\n\nThis is the alpha version of WebDNN version 2. The main difference between WebDNN 1.x and WebDNN 2.x is that WebDNN 2.x only accepts ONNX models as input, allowing ONNX models to be loaded directly into a web browser without Python preprocessing. In addition, offline model optimization is also possible.\n\n[Version 1.x](https://github.com/mil-tokyo/webdnn/tree/v1.2.11)\n\n# Supported backends (acceleration technologies)\n\nWebGL is available in most modern browsers.\n\n- WebGPU\n - The draft version implemented in Chrome Canary.\n - The WebGPU in iOS13 is not supported because it requires shaders based on the deprecated WSL language.\n- WebGL\n - Use WebGL2 if available; also supports Safari, which only supports WebGL1.\n- WebAssembly\n\n# Environment setting\n\nThe environment which runs node.js 14, python 3.6+ and emscripten 2.0+.\n\n```\nyarn\npython setup.py develop\n```\n\n# Build\n```\nyarn build:all\n```\n\nBuild outputs:\n- `dist/webdnn.js`\n - Library that can load unoptimized ONNX models\n- `dist/webdnn-core.js`\n - Library that can load optimized ONNX models by WebDNN\n\n# Basic usage\n\nLoad `dist/webdnn.js` with the `<script>` tag to globally add a `WebDNN` object. Assuming that the ONNX model `model_directory/model.onnx` exists, and run the model with a input tensor of the shape `[1, 2]`.\n\n```javascript\nconst runner = await WebDNN.load(\"model_directory/\");\nconst inputDataArray = new Float32Array([5.1, -2.3]);\nconst inputTensor = new WebDNN.CPUTensor([1, 2], \"float32\", inputDataArray);\nconst [outputTensor] = await runner.run([inputTensor]);\n\nconsole.log(outputTensor.data); // Float32Array\n```\n\nSee `example/minimum` for the complete minimal code that works.\n\n# Test\n\nGenerate ONNX models and input/output tensors to be tested\n\n```\npython test/model_test/make_models.py\n```\n\nRun on web browser\n\n```\nyarn server\n```\n\nOpen <http://localhost:8080/test/model_test/runner/standard.html> with web browser, check the backend you want to test, and click the Test button to run the test.\n\nUse\n\n```\npython test/model_test/make_models.py --optimize\n```\n\n<http://localhost:8080/test/model_test/runner/optimized.html>\n\nwhen testing, including model optimization. However, the execution time of `make_models.py` takes a long time.\n" }, { "alpha_fraction": 0.7822580933570862, "alphanum_fraction": 0.7822580933570862, "avg_line_length": 19.66666603088379, "blob_id": "6e7acd099223855138bb952b6ee4bb716b79dd73", "content_id": "dd17112090ae548c70f91832456cd689ae716cf3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 124, "license_type": "permissive", "max_line_length": 42, "num_lines": 6, "path": "/src/shader/wasm/src/common/kernel.hpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#ifdef __EMSCRIPTEN__\n#include <emscripten.h>\n#define WEBDNN_KERNEL EMSCRIPTEN_KEEPALIVE\n#else\n#define WEBDNN_KERNEL\n#endif\n" }, { "alpha_fraction": 0.6751968264579773, "alphanum_fraction": 0.6795275807380676, "avg_line_length": 24.656564712524414, "blob_id": "438db38c335fb90214ce37c896c023b605a0c060", "content_id": "957c5276129ddabe230325f1dbf066bb1470eb65", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2540, "license_type": "permissive", "max_line_length": 70, "num_lines": 99, "path": "/src/descriptor_runner/backend/wasm/wasmTensorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n DataArrayConstructor,\n DataArrayTypes,\n DataType,\n} from \"../../interface/core/constants\";\nimport { TensorImpl } from \"../../core/tensorImpl\";\nimport { WebDNNWasmContextImpl } from \"./wasmContextImpl\";\nimport {\n WasmSharedBufferInterface,\n WasmTensor,\n} from \"../../interface/backend/wasm/wasmTensor\";\nimport { WebDNNLogging } from \"../../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.WasmTensorImpl\");\n\nexport class WasmSharedBuffer implements WasmSharedBufferInterface {\n private static nextBackendBufferId = 1;\n\n refCount: number;\n\n backendBufferId: number;\n\n constructor(\n private context: WebDNNWasmContextImpl,\n public byteLength: number\n ) {\n this.refCount = 1;\n this.backendBufferId = WasmSharedBuffer.nextBackendBufferId++;\n this.context.allocBuffer(this);\n this.context.perfTotalMemory += this.byteLength;\n logger.debug(\"WASM memory allocation\", {\n size: this.byteLength,\n total: this.context.perfTotalMemory,\n });\n }\n\n incrRef(): void {\n this.refCount++;\n }\n\n dispose(): void {\n this.refCount--;\n if (this.refCount <= 0) {\n this.context.perfTotalMemory -= this.byteLength;\n logger.debug(\"WASM memory free\", {\n size: this.byteLength,\n total: this.context.perfTotalMemory,\n });\n this.context.destroyBuffer(this);\n }\n }\n}\n\nexport class WasmTensorImpl extends TensorImpl implements WasmTensor {\n sharedBuffer: WasmSharedBuffer;\n\n constructor(\n private context: WebDNNWasmContextImpl,\n dims: ReadonlyArray<number>,\n dataType: DataType = \"float32\",\n sharedBuffer?: WasmSharedBuffer\n ) {\n super(dims, dataType, \"wasm\");\n if (dataType !== \"float32\") {\n throw new Error(\"WasmTensor only supports float32\");\n }\n if (sharedBuffer) {\n this.sharedBuffer = sharedBuffer;\n } else {\n this.sharedBuffer = new WasmSharedBuffer(\n this.context,\n this.length * Float32Array.BYTES_PER_ELEMENT\n );\n }\n }\n\n alias(dims: ReadonlyArray<number>): WasmTensorImpl {\n this.sharedBuffer.incrRef();\n return new WasmTensorImpl(\n this.context,\n dims,\n this.dataType,\n this.sharedBuffer\n );\n }\n\n async getData(): Promise<DataArrayTypes> {\n const buf = await this.context.readTensor(this.sharedBuffer);\n return new DataArrayConstructor[this.dataType](buf.buffer);\n }\n\n async setData(data: DataArrayTypes): Promise<void> {\n this.context.writeTensor(this.sharedBuffer, data);\n }\n\n dispose(): void {\n this.sharedBuffer.dispose();\n }\n}\n" }, { "alpha_fraction": 0.6095210909843445, "alphanum_fraction": 0.6209806203842163, "avg_line_length": 27.47402572631836, "blob_id": "7a079fecf26d0941ff92e958d4c35f1c0483bc63", "content_id": "6479ac9fdf6c5a5aa45fe360e0b8c2df1b90dc56", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 17854, "license_type": "permissive", "max_line_length": 116, "num_lines": 616, "path": "/src/descriptor_runner/backend/webgl/webglContextImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../interface/backend/cpu/cpuContext\";\nimport {\n WebDNNWebGLContext,\n WebDNNWebGLContextOption,\n WebDNNWebGLContextPerformance,\n WebDNNWebGLVersion,\n WebGLUniformItem,\n} from \"../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../interface/backend/webgl/webglTensor\";\nimport { DataType } from \"../../interface/core/constants\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { nonnull } from \"../../util\";\nimport { WebGLTensorImpl } from \"./webglTensorImpl\";\nimport { WebDNNLogging } from \"../../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.WebDNNWebGLContextImpl\");\n\n// [x y u v] * [upper-left, lower-left, upper-right, lower-right]\nconst vertexArray = new Float32Array([-1, +1, -1, -1, +1, +1, +1, -1]),\n vertex_shader_source_1 = `\nprecision highp float;\nattribute vec2 _xy;\nvoid main() { \n gl_Position = vec4(_xy, 0, 1); \n}\n`,\n vertex_shader_source_2 = `#version 300 es\nprecision highp float;\nin vec2 _xy;\nvoid main() { \n gl_Position = vec4(_xy, 0, 1); \n}\n`;\n\nfunction deleteTextureWait() {\n return new Promise<void>((resolve) => {\n setTimeout(resolve, 1);\n });\n}\n\nfunction wait(msec = 1) {\n return new Promise<void>((resolve) => {\n setTimeout(resolve, msec);\n });\n}\n\ninterface WebGLSharedTexturePoolItem {\n textureWidth: number;\n textureHeight: number;\n dimPerPixel: 1 | 4;\n texture: WebGLTexture;\n}\n\nexport class WebGLSharedTexture {\n refCount: number;\n\n texture: WebGLTexture;\n\n constructor(\n private context: WebDNNWebGLContextImpl,\n public textureWidth: number,\n public textureHeight: number,\n public dimPerPixel: 1 | 4\n ) {\n this.refCount = 1;\n const { gl } = this.context;\n let pooled: WebGLTexture | null = null;\n for (let i = 0; i < this.context.texturePool.length; i++) {\n const item = this.context.texturePool[i];\n if (\n item.textureWidth === textureWidth &&\n item.textureHeight === textureHeight &&\n item.dimPerPixel === dimPerPixel\n ) {\n pooled = item.texture;\n this.context.texturePool.splice(i, 1);\n break;\n }\n }\n\n const byteLength =\n this.textureWidth *\n this.textureHeight *\n this.dimPerPixel *\n Float32Array.BYTES_PER_ELEMENT;\n\n if (pooled) {\n this.texture = pooled;\n\n logger.debug(\"WEBGL memory from pool\", {\n size: byteLength,\n total: this.context.perfTotalMemory,\n });\n } else {\n this.context.limitTexturePool(\n this.context.maxAllocationBytes - byteLength,\n this.context.deallocateToBytes - byteLength\n );\n this.texture = nonnull(gl.createTexture());\n\n gl.activeTexture(gl.TEXTURE0 + 9); // TODO: texture unit 9 is always available?\n gl.bindTexture(gl.TEXTURE_2D, this.texture);\n /*\n * WebGL2: dimPerPixel==1: R channelのみ使用, dimPerPixel==4: RGBAチャンネルを利用(一部の最適化されたオペレータ用)\n * WebGL1: RGBA各8bitにfloatをpackして使用(floatテクスチャ未対応環境を想定)\n */\n if (this.context.isWebGL2(gl)) {\n gl.texStorage2D(\n gl.TEXTURE_2D,\n 1,\n this.context.supportsTexture32bit\n ? dimPerPixel === 1\n ? gl.R32F\n : gl.RGBA32F\n : dimPerPixel === 1\n ? gl.R16F\n : gl.RGBA16F,\n this.textureWidth,\n this.textureHeight\n );\n } else {\n if (dimPerPixel !== 1) {\n throw new Error(\"colorPerPixel must be 1 in WebGL1\");\n }\n gl.texImage2D(\n gl.TEXTURE_2D,\n 0,\n gl.RGBA,\n this.textureWidth,\n this.textureHeight,\n 0,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n null\n );\n }\n\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);\n gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);\n gl.bindTexture(gl.TEXTURE_2D, null);\n\n this.context.perfTotalMemory += byteLength;\n logger.debug(\"WEBGL memory allocation\", {\n size: byteLength,\n total: this.context.perfTotalMemory,\n });\n }\n }\n\n incrRef(): void {\n this.refCount++;\n }\n\n dispose(): void {\n this.refCount--;\n const byteLength =\n this.textureWidth *\n this.textureHeight *\n this.dimPerPixel *\n Float32Array.BYTES_PER_ELEMENT;\n if (this.refCount <= 0) {\n this.context.texturePool.push({\n textureWidth: this.textureWidth,\n textureHeight: this.textureHeight,\n dimPerPixel: this.dimPerPixel,\n texture: this.texture,\n });\n logger.debug(\"WEBGL memory to pool\", {\n size: byteLength,\n total: this.context.perfTotalMemory,\n });\n }\n }\n}\n\nfunction initWebGL(versionOrder?: WebDNNWebGLVersion[]) {\n const canvas = document.createElement(\"canvas\");\n let gl: WebGLRenderingContext | WebGL2RenderingContext | null = null;\n for (const version of versionOrder || [\n \"webgl2-16384\",\n \"webgl2-4096\",\n \"webgl1-16384\",\n \"webgl1-4096\",\n ]) {\n let webgl2 = false;\n if (version.startsWith(\"webgl2\")) {\n gl = canvas.getContext(\"webgl2\");\n if (!gl) {\n continue;\n }\n webgl2 = true;\n } else {\n gl = canvas.getContext(\"webgl\");\n if (!gl) {\n continue;\n }\n }\n const allowedTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE) as number;\n const maxTextureSize = Number(version.slice(7)); // 16384 or 4096\n if (maxTextureSize > allowedTextureSize) {\n continue;\n }\n\n return {\n version,\n webgl2,\n maxTextureSize,\n gl,\n };\n }\n return null;\n}\n\nexport class WebDNNWebGLContextImpl implements WebDNNWebGLContext {\n backend = \"webgl\" as const;\n\n canOnlyReadRGBA: boolean;\n\n gl: WebGLRenderingContext | WebGL2RenderingContext;\n\n vshader!: WebGLShader;\n\n fb: WebGLFramebuffer;\n\n webgl2: boolean;\n\n programs: Map<string, { program: WebGLProgram }> = new Map();\n\n initialized = false;\n\n maxTextureSize: number;\n\n texturePool: WebGLSharedTexturePoolItem[] = [];\n\n perfTotalMemory = 0;\n\n private needsDeleteTextureWait = false;\n\n maxAllocationBytes: number;\n\n deallocateToBytes: number;\n\n version: WebDNNWebGLVersion;\n supportsTexture32bit: boolean;\n supportsTexture16bit: boolean;\n\n private timerQueryExt: {\n TIME_ELAPSED_EXT: number;\n GPU_DISJOINT_EXT: number;\n } | null = null;\n private performanceQueries: {\n info: WebDNNWebGLContextPerformance;\n query: WebGLQuery;\n }[] = [];\n performanceQueryKey: string | null = null;\n\n constructor(\n public cpuContext: WebDNNCPUContext,\n option: WebDNNWebGLContextOption\n ) {\n this.maxAllocationBytes = option.maxAllocationBytes || 512 * 1024 * 1024;\n this.deallocateToBytes =\n option.deallocateToBytes || Math.floor(this.maxAllocationBytes / 2);\n\n // バグ回避\n // Mac+(Chrome/Firefox)で、RチャンネルのみのテクスチャをreadPixelsで読みだそうとするとエラーとなる\n // GL ERROR :GL_INVALID_OPERATION : glReadPixels: format and type incompatible with the current read framebuffer\n const ua = navigator.userAgent;\n this.canOnlyReadRGBA =\n ua.includes(\"Macintosh\") &&\n (ua.includes(\"Chrome/\") || ua.includes(\"Firefox/\"));\n\n const initResult = initWebGL(option.versionOrder);\n if (!initResult) {\n throw new Error(\n \"WebGL is not supported or does not have enough capability on this platform.\"\n );\n }\n const { gl, version, webgl2, maxTextureSize } = initResult;\n this.gl = gl;\n this.webgl2 = webgl2;\n this.maxTextureSize = maxTextureSize;\n this.version = version;\n if (this.webgl2) {\n if (gl.getExtension(\"EXT_color_buffer_float\")) {\n // Enable color mode of gl.R32F\n this.supportsTexture32bit = true;\n // EXT_color_buffer_float が取得できればR16Fも含んでいる\n // これが取得できても、EXT_color_buffer_half_floatが取得できない環境もある\n this.supportsTexture16bit = true;\n } else if (gl.getExtension(\"EXT_color_buffer_half_float\")) {\n // Enable color mode of gl.R16F\n this.supportsTexture32bit = false;\n this.supportsTexture16bit = true;\n } else {\n // 浮動小数点数テクスチャが格納できない環境はサポート外\n throw new Error(\n \"Neither EXT_color_buffer_float nor EXT_color_buffer_half_float are supported\"\n );\n }\n\n this.timerQueryExt = gl.getExtension(\"EXT_disjoint_timer_query_webgl2\");\n } else {\n this.supportsTexture32bit = false;\n this.supportsTexture16bit = false;\n }\n gl.disable(gl.DEPTH_TEST);\n gl.disable(gl.STENCIL_TEST);\n gl.disable(gl.BLEND);\n gl.disable(gl.DITHER);\n gl.disable(gl.POLYGON_OFFSET_FILL);\n gl.disable(gl.SAMPLE_COVERAGE);\n gl.enable(gl.SCISSOR_TEST);\n gl.enable(gl.CULL_FACE);\n gl.cullFace(gl.BACK);\n gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);\n\n const vertexBuffer = this.createArrayBuffer(vertexArray);\n this.bindArrayBuffer(vertexBuffer);\n this.fb = nonnull(gl.createFramebuffer());\n gl.bindFramebuffer(gl.FRAMEBUFFER, this.fb);\n }\n\n async initialize(): Promise<void> {\n this.initialized = true;\n }\n\n private checkInitialized() {\n if (!this.initialized) {\n throw new Error(\"Not initialized\");\n }\n }\n\n isWebGLTensor(tensor: Tensor): tensor is WebGLTensor {\n return tensor.backend === this.backend;\n }\n\n assertsWebGLTensor(tensor: Tensor): asserts tensor is WebGLTensor {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n\n assertsWebGLTensorArray(tensors: Tensor[]): asserts tensors is WebGLTensor[] {\n for (const tensor of tensors) {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n }\n\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n option?: { dimPerPixel?: 1 | 4; textureShape?: ReadonlyArray<number> }\n ): WebGLTensor {\n return new WebGLTensorImpl(\n this,\n dims,\n dataType,\n option?.dimPerPixel,\n option?.textureShape\n );\n }\n\n async moveTensor(\n tensor: Tensor,\n option: { dimPerPixel?: 1 | 4; textureShape?: ReadonlyArray<number> }\n ): Promise<WebGLTensor> {\n const dst = new WebGLTensorImpl(\n this,\n tensor.dims,\n tensor.dataType,\n option.dimPerPixel,\n option.textureShape\n );\n await dst.setData(await tensor.getData());\n return dst;\n }\n\n createArrayBuffer(vertexArray: Float32Array): WebGLBuffer {\n const buffer = nonnull(this.gl.createBuffer());\n this.gl.bindBuffer(this.gl.ARRAY_BUFFER, buffer);\n this.gl.bufferData(this.gl.ARRAY_BUFFER, vertexArray, this.gl.STATIC_DRAW);\n\n return buffer;\n }\n\n bindArrayBuffer(buffer: WebGLBuffer): void {\n this.gl.bindBuffer(this.gl.ARRAY_BUFFER, buffer);\n }\n\n createShader(type: number, source: string): WebGLShader {\n const shader = nonnull(this.gl.createShader(type));\n\n this.gl.shaderSource(shader, source);\n this.gl.compileShader(shader);\n if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) {\n logger.error(this.gl.getShaderInfoLog(shader));\n throw Error(`Shader Compile failed: ${this.gl.getShaderInfoLog(shader)}`);\n }\n\n return shader;\n }\n\n addKernel(name: string, sourceCode: string): void {\n if (this.programs.has(name)) {\n return;\n }\n this.programs.set(name, { program: this.compileKernel(sourceCode) });\n }\n\n hasKernel(name: string): boolean {\n return this.programs.has(name);\n }\n\n compileKernel(sourceCode: string): WebGLProgram {\n const { gl } = this;\n if (!this.vshader) {\n this.vshader = this.createShader(\n gl.VERTEX_SHADER,\n this.webgl2 ? vertex_shader_source_2 : vertex_shader_source_1\n );\n }\n const fshader = this.createShader(gl.FRAGMENT_SHADER, sourceCode),\n program = nonnull(this.gl.createProgram());\n\n this.gl.attachShader(program, fshader);\n this.gl.attachShader(program, this.vshader);\n this.gl.linkProgram(program);\n if (!this.gl.getProgramParameter(program, this.gl.LINK_STATUS)) {\n logger.error(this.gl.getProgramInfoLog(program));\n throw new Error(\"ShaderProgram Initialization failed.\");\n }\n\n return program;\n }\n\n async runKernel(\n name: string,\n inputs: { tensor: WebGLTensorImpl; name: string }[],\n output: WebGLTensorImpl,\n uniforms: WebGLUniformItem[]\n ): Promise<void> {\n this.checkInitialized();\n const gl2 = this.gl;\n let query: WebGLQuery | null = null;\n if (\n this.isWebGL2(gl2) &&\n this.timerQueryExt &&\n this.performanceQueryKey != null\n ) {\n query = gl2.createQuery();\n if (query) {\n gl2.beginQuery(this.timerQueryExt.TIME_ELAPSED_EXT, query);\n }\n }\n\n if (this.needsDeleteTextureWait) {\n await deleteTextureWait();\n this.needsDeleteTextureWait = false;\n }\n const kobj = this.programs.get(name);\n if (!kobj) {\n throw new Error(`Unknown kernel ${name}`);\n }\n const { gl } = this,\n xyAttribLoc = gl.getAttribLocation(kobj.program, \"_xy\");\n for (let i = 0; i < inputs.length; i++) {\n inputs[i].tensor.bindToReadTexture(i);\n }\n output.bindToDrawTexture();\n\n gl.useProgram(kobj.program);\n\n for (let i = 0; i < inputs.length; i++) {\n gl.uniform1i(gl.getUniformLocation(kobj.program, inputs[i].name), i);\n }\n\n for (const uniform of uniforms) {\n switch (uniform.type) {\n case \"float\":\n gl.uniform1f(\n gl.getUniformLocation(kobj.program, uniform.name),\n uniform.value\n );\n break;\n case \"int\":\n gl.uniform1i(\n gl.getUniformLocation(kobj.program, uniform.name),\n uniform.value\n );\n break;\n default:\n throw new Error();\n }\n }\n gl.vertexAttribPointer(xyAttribLoc, 2, gl.FLOAT, true, 8, 0);\n gl.enableVertexAttribArray(xyAttribLoc);\n\n gl.drawArrays(gl.TRIANGLE_STRIP, 0, vertexArray.length / 2);\n // TODO: 完了を待つかどうか\n\n for (let i = 0; i < inputs.length; i++) {\n inputs[i].tensor.unbindFromReadTexture();\n }\n\n output.unbindFromDrawTexture();\n\n if (query) {\n if (this.isWebGL2(gl2) && this.timerQueryExt) {\n gl2.endQuery(this.timerQueryExt.TIME_ELAPSED_EXT);\n const info: WebDNNWebGLContextPerformance = {\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n key: this.performanceQueryKey!,\n kernelName: name,\n inputs: inputs.map(({ tensor, name }) => ({\n dims: tensor.dims.slice(),\n name,\n })),\n output: { dims: output.dims.slice() },\n elapsedNanoSecond: 0,\n gpuDisjoint: false,\n };\n this.performanceQueries.push({ info, query });\n }\n }\n }\n\n isWebGL2(\n gl: WebGLRenderingContext | WebGL2RenderingContext\n ): gl is WebGL2RenderingContext {\n return this.webgl2;\n }\n\n limitTexturePool(maxBytes: number, reductionBytes: number): void {\n // remove oldest textures when total size exceeds limitThreshold.\n // remove continues until total size is below removeThreshold\n // why remove multiple textures once?\n // deleteTexture does not immediately free memory, so timer wait is needed\n\n if (this.perfTotalMemory > maxBytes) {\n while (this.perfTotalMemory > reductionBytes) {\n const tex = this.texturePool.shift();\n if (!tex) {\n break;\n }\n\n const byteLength =\n tex.textureWidth *\n tex.textureHeight *\n tex.dimPerPixel *\n Float32Array.BYTES_PER_ELEMENT;\n this.perfTotalMemory -= byteLength;\n logger.debug(\"WEBGL memory free\", {\n size: byteLength,\n total: this.perfTotalMemory,\n });\n this.gl.deleteTexture(tex.texture);\n this.needsDeleteTextureWait = true;\n }\n }\n }\n\n enablePerformanceQuery(key: string | null): void {\n this.performanceQueryKey = key;\n }\n\n gatherPerformanceQueryResult(): Promise<WebDNNWebGLContextPerformance[]> {\n const gl2 = this.gl;\n if (this.isWebGL2(gl2) && this.timerQueryExt) {\n let gpuDisjoint = false;\n if (gl2.getParameter(this.timerQueryExt.GPU_DISJOINT_EXT)) {\n gpuDisjoint = true;\n }\n return new Promise((resolve) => {\n const gathereds: WebDNNWebGLContextPerformance[] = [];\n const gather = () => {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const q = this.performanceQueries[0];\n if (!q) {\n resolve(gathereds);\n break;\n } else {\n if (gl2.getQueryParameter(q.query, gl2.QUERY_RESULT_AVAILABLE)) {\n const elapsedNanoSecond = gl2.getQueryParameter(\n q.query,\n gl2.QUERY_RESULT\n ) as number;\n this.performanceQueries.shift();\n const info = q.info;\n info.elapsedNanoSecond = elapsedNanoSecond;\n info.gpuDisjoint = gpuDisjoint;\n gathereds.push(info);\n } else {\n // need wait\n wait(10).then(gather);\n break;\n }\n }\n }\n };\n gather();\n });\n } else {\n return Promise.reject(\"Performance query not supported\");\n }\n }\n}\n" }, { "alpha_fraction": 0.42633381485939026, "alphanum_fraction": 0.46255508065223694, "avg_line_length": 26.98630142211914, "blob_id": "ff4bb36ccab573dec58a63d2201b365d15911f48", "content_id": "769373bcb1fbfdf202259c28fefc8593f068de6d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 8210, "license_type": "permissive", "max_line_length": 85, "num_lines": 292, "path": "/src/descriptor_runner/operators/cpu/operators/standard/binary7.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes, DataType } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { broadcastMulti } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Binary7 extends OperatorImpl {\n constructor(\n private op: (lhs: number, rhs: number) => number,\n private allowDataTypes: DataType[]\n ) {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== inputB.dataType) {\n throw new Error(\n `Binary: input dataTypes mismatch: ${inputA.dataType} !== ${inputB.dataType}`\n );\n }\n if (!this.allowDataTypes.includes(inputA.dataType)) {\n throw new Error(\n `Binary: input dataType ${inputA.dataType} is not supported`\n );\n }\n // TODO: broadcast不要の場合に特化したパフォーマンス向上\n\n const { dims: outShape, allStrides: inAllStrides } = broadcastMulti([\n inputA.dims,\n inputB.dims,\n ]),\n output = context.emptyTensor(outShape, inputA.dataType),\n { op } = this;\n let func;\n switch (outShape.length) {\n case 0:\n func = this.op0d;\n break;\n case 1:\n func = this.op1d;\n break;\n case 2:\n func = this.op2d;\n break;\n case 3:\n func = this.op3d;\n break;\n case 4:\n func = this.op4d;\n break;\n case 5:\n func = this.op5d;\n break;\n case 6:\n func = this.op6d;\n break;\n default:\n throw new Error(\n `Binary: input ndim ${outShape.length} > 4 is not yet supported`\n );\n }\n func(inputA.data, inputB.data, output.data, op, outShape, inAllStrides);\n return [output];\n }\n\n private op0d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n outShape: number[],\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n inAllStrides: number[][]\n ) {\n dO[0] = op(dL[0], dR[0]);\n }\n\n private op1d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n dO[idx++] = op(dL[a0 * inAllStrides[0][0]], dR[a0 * inAllStrides[1][0]]);\n }\n }\n\n private op2d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n dO[idx++] = op(\n dL[a0 * inAllStrides[0][0] + a1 * inAllStrides[0][1]],\n dR[a0 * inAllStrides[1][0] + a1 * inAllStrides[1][1]]\n );\n }\n }\n }\n\n private op3d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n dO[idx++] = op(\n dL[\n a0 * inAllStrides[0][0] +\n a1 * inAllStrides[0][1] +\n a2 * inAllStrides[0][2]\n ],\n dR[\n a0 * inAllStrides[1][0] +\n a1 * inAllStrides[1][1] +\n a2 * inAllStrides[1][2]\n ]\n );\n }\n }\n }\n }\n\n private op4d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n dO[idx++] = op(\n dL[\n a0 * inAllStrides[0][0] +\n a1 * inAllStrides[0][1] +\n a2 * inAllStrides[0][2] +\n a3 * inAllStrides[0][3]\n ],\n dR[\n a0 * inAllStrides[1][0] +\n a1 * inAllStrides[1][1] +\n a2 * inAllStrides[1][2] +\n a3 * inAllStrides[1][3]\n ]\n );\n }\n }\n }\n }\n }\n\n private op5d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n for (let a4 = 0; a4 < outShape[4]; a4++) {\n dO[idx++] = op(\n dL[\n a0 * inAllStrides[0][0] +\n a1 * inAllStrides[0][1] +\n a2 * inAllStrides[0][2] +\n a3 * inAllStrides[0][3] +\n a4 * inAllStrides[0][4]\n ],\n dR[\n a0 * inAllStrides[1][0] +\n a1 * inAllStrides[1][1] +\n a2 * inAllStrides[1][2] +\n a3 * inAllStrides[1][3] +\n a4 * inAllStrides[1][4]\n ]\n );\n }\n }\n }\n }\n }\n }\n\n private op6d(\n dL: DataArrayTypes,\n dR: DataArrayTypes,\n dO: DataArrayTypes,\n op: (lhs: number, rhs: number) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n for (let a4 = 0; a4 < outShape[4]; a4++) {\n for (let a5 = 0; a5 < outShape[5]; a5++) {\n dO[idx++] = op(\n dL[\n a0 * inAllStrides[0][0] +\n a1 * inAllStrides[0][1] +\n a2 * inAllStrides[0][2] +\n a3 * inAllStrides[0][3] +\n a4 * inAllStrides[0][4] +\n a5 * inAllStrides[0][5]\n ],\n dR[\n a0 * inAllStrides[1][0] +\n a1 * inAllStrides[1][1] +\n a2 * inAllStrides[1][2] +\n a3 * inAllStrides[1][3] +\n a4 * inAllStrides[1][4] +\n a5 * inAllStrides[1][5]\n ]\n );\n }\n }\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n // Add, Sub, Mul, Div, Pow: opset under 7 requires explicit broadcast flag\n {\n opType: \"Add\",\n backend: \"cpu\",\n opsetMin: 7,\n factory: () => new Binary7((lhs, rhs) => lhs + rhs, [\"float32\", \"int32\"]),\n },\n {\n opType: \"Sub\",\n backend: \"cpu\",\n opsetMin: 7,\n factory: () => new Binary7((lhs, rhs) => lhs - rhs, [\"float32\", \"int32\"]),\n },\n {\n opType: \"Mul\",\n backend: \"cpu\",\n opsetMin: 7,\n factory: () => new Binary7((lhs, rhs) => lhs * rhs, [\"float32\", \"int32\"]),\n },\n {\n opType: \"Div\",\n backend: \"cpu\",\n opsetMin: 7,\n factory: () => new Binary7((lhs, rhs) => lhs / rhs, [\"float32\", \"int32\"]),\n },\n {\n opType: \"Pow\",\n backend: \"cpu\",\n opsetMin: 7,\n factory: () =>\n new Binary7((lhs, rhs) => lhs ** rhs, [\"float32\", \"int32\"]),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6660040020942688, "alphanum_fraction": 0.7017892599105835, "avg_line_length": 32.53333282470703, "blob_id": "13aee2ac4e943d732f07ce80d5412ff9b6064d06", "content_id": "73a99f69c875d644a47211b931e57b61235a4c2a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 503, "license_type": "permissive", "max_line_length": 70, "num_lines": 15, "path": "/src/descriptor_runner/interface/core/constants.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "export type BackendWithoutCPU = \"webgl\" | \"wasm\" | \"webgpu\";\nexport type Backend = \"cpu\" | BackendWithoutCPU;\nexport const backendsWithoutCPU: BackendWithoutCPU[] = [\n \"wasm\",\n \"webgl\",\n \"webgpu\",\n];\nexport const backends: Backend[] = [\"cpu\", \"wasm\", \"webgl\", \"webgpu\"];\nexport type DataType = \"float32\" | \"int32\" | \"bool\";\nexport const DataArrayConstructor = {\n float32: Float32Array,\n int32: Int32Array,\n bool: Uint8Array,\n};\nexport type DataArrayTypes = Float32Array | Int32Array | Uint8Array;\n" }, { "alpha_fraction": 0.6100000143051147, "alphanum_fraction": 0.6217647194862366, "avg_line_length": 25.5625, "blob_id": "7f025aedc28401670a547667c81623be543859f6", "content_id": "9938ec5e00eb23c2bc0c080b526d2fdc36fa94d6", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1700, "license_type": "permissive", "max_line_length": 86, "num_lines": 64, "path": "/src/descriptor_runner/operators/webgl/operators/standard/squeeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { Backend } from \"../../../../interface/core/constants\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Squeeze1, Squeeze13 } from \"../../../base/squeeze\";\n\nexport class WebGLSqueeze1 extends Squeeze1 {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0],\n computedShape = this.calcShape(input);\n\n return [input.alias(computedShape)];\n }\n}\n\nexport class WebGLSqueeze13 extends Squeeze13 {\n constructor() {\n super(\"webgl\");\n }\n\n getTensorBackendRequirement(\n nInputs: number,\n nOutputs: number\n ): (Backend | null)[] {\n return [this.backend, \"cpu\"];\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n const input = inputs[0],\n axes = inputs[1];\n if (!context.cpuContext.isCPUTensor(axes)) {\n throw new Error(`Unsqueeze: axes is not on cpu.`);\n }\n if (!context.isWebGLTensor(input)) {\n throw new Error(\"Unsqueeze: input is not on webgl.\");\n }\n const computedShape = this.calcShape(input, axes);\n\n return [input.alias(computedShape)];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Squeeze\",\n backend: \"webgl\",\n opsetMin: 13,\n factory: () => new WebGLSqueeze13(),\n },\n {\n opType: \"Squeeze\",\n backend: \"webgl\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new WebGLSqueeze1(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5613908171653748, "alphanum_fraction": 0.5697211027145386, "avg_line_length": 30.022472381591797, "blob_id": "6e3552484270eac47fd9201f31ff6bc89f0dc280", "content_id": "dd464cbd4a62f3ac5dba8d8c9c01c9b33a556dff", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2779, "license_type": "permissive", "max_line_length": 80, "num_lines": 89, "path": "/src/descriptor_runner/operators/cpu/operators/standard/concat.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { arrayProd, getAttrInt } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { CPUTensor } from \"../../../../interface/backend/cpu/cpuTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Concat extends OperatorImpl {\n axis!: number; // 負の場合は後ろから\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axis = getAttrInt(attribute, \"axis\", 0);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const axis = this.axis >= 0 ? this.axis : inputs[0].ndim + this.axis;\n if (axis < 0 || axis >= inputs[0].ndim) {\n throw new Error(`Concat: axis ${axis} out of range`);\n }\n const inTensors: [CPUTensor, number, number, number, number, number][] = [];\n let axisLength = 0;\n for (let i = 0; i < inputs.length; i++) {\n const it = inputs[i],\n dim = it.dims[axis],\n outerStride = it.strides[Math.max(axis - 1, 0)],\n concatStride = it.strides[axis],\n innerStride = 1;\n inTensors.push([\n it,\n axisLength,\n dim,\n outerStride,\n concatStride,\n innerStride,\n ]);\n axisLength += dim;\n }\n const outputShape = inputs[0].dims.slice();\n outputShape[axis] = axisLength;\n const outerLength = arrayProd(inputs[0].dims.slice(0, axis)),\n innerLength = arrayProd(inputs[0].dims.slice(axis + 1)),\n output = context.emptyTensor(outputShape, inputs[0].dataType),\n outOuterStride = output.strides[Math.max(axis - 1, 0)],\n outConcatStride = output.strides[axis],\n outInnerStride = 1;\n for (const [\n it,\n itAxisOffset,\n itAxisDim,\n outerStride,\n concatStride,\n innerStride,\n ] of inTensors) {\n for (let c = 0; c < itAxisDim; c++) {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n output.data[\n (c + itAxisOffset) * outConcatStride +\n outer * outOuterStride +\n inner * outInnerStride\n ] =\n it.data[\n c * concatStride + outer * outerStride + inner * innerStride\n ];\n }\n }\n }\n }\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Concat\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Concat(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.632311999797821, "alphanum_fraction": 0.632311999797821, "avg_line_length": 28.91666603088379, "blob_id": "c495aef2f6bdf6bfc2e58f020f0de6da3fb0a0d8", "content_id": "43573a0068d0aaf8bf7889676d6334518809c252", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 359, "license_type": "permissive", "max_line_length": 78, "num_lines": 12, "path": "/src/graph_transpiler/webdnn/operator_shader_wasm.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from webdnn.operator_shader import OperatorShader\n\nclass OperatorShaderWasm(OperatorShader):\n ts_code: str\n shader_name: str\n cpp_code: str\n\n def __init__(self, ts_code: str, shader_name: str, cpp_code: str) -> None:\n super().__init__()\n self.ts_code = ts_code\n self.shader_name = shader_name\n self.cpp_code = cpp_code\n" }, { "alpha_fraction": 0.6826568245887756, "alphanum_fraction": 0.7195571660995483, "avg_line_length": 13.263157844543457, "blob_id": "ffe32f34c51851fc05fbfcef4b7d21dcf3706849", "content_id": "8ea4c169b1cbb86f16333c5ca68a6ec7b9e14eeb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 403, "license_type": "permissive", "max_line_length": 96, "num_lines": 19, "path": "/example/detr/README.ja.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# DETRによる物体検出\n\nPythonスクリプトの実行には、PyTorch (`>=1.7`)が必要。\n\n# 操作手順\n## PyTorchモデルをONNXモデルに変換\n```\npython conversion.py\n```\n\n## Webブラウザ上での実行\n\nrepository rootにて\n\n```\nyarn server\n```\n\nを実行。この状態で、Webブラウザで[http://localhost:8080/example/detr/](http://localhost:8080/example/detr/)を開く。\n" }, { "alpha_fraction": 0.6436213850975037, "alphanum_fraction": 0.6493827104568481, "avg_line_length": 28.634145736694336, "blob_id": "3c032eb3ab1b17ed97a38058a4f6f955999847e1", "content_id": "58096f72e8eee8290effce3ac103a6a8081e9cb1", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1215, "license_type": "permissive", "max_line_length": 77, "num_lines": 41, "path": "/src/descriptor_runner/operators/base/squeeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { getAttrInts } from \"../operatorUtil\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { CPUTensor } from \"../..\";\n\nabstract class Squeeze extends OperatorImpl {\n protected calcShapeBase(\n inputShape: ReadonlyArray<number>,\n axes: ReadonlyArray<number>\n ): number[] {\n if (axes.length === 0) {\n // remove all dimensions of 1\n return inputShape.filter((s) => s !== 1);\n } else {\n const nonNegativeAxes = axes.map((a) =>\n a >= 0 ? a : a + inputShape.length\n );\n return inputShape.filter((_, i) => !nonNegativeAxes.includes(i));\n }\n }\n}\n\nexport abstract class Squeeze1 extends Squeeze {\n axes!: number[];\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axes = getAttrInts(attribute, \"axes\", []);\n }\n\n protected calcShape(input: Tensor): number[] {\n return this.calcShapeBase(input.dims, this.axes);\n }\n}\n\nexport abstract class Squeeze13 extends Squeeze {\n protected calcShape(input: Tensor, axes?: CPUTensor): number[] {\n return this.calcShapeBase(input.dims, axes ? Array.from(axes.data) : []);\n }\n}\n" }, { "alpha_fraction": 0.5957297086715698, "alphanum_fraction": 0.6100422143936157, "avg_line_length": 28.804195404052734, "blob_id": "b513448b52b4fbfabf28b5b530df2829b3fbfaad", "content_id": "751cb51b18ce452497df5990fc6c51816ed8f6e4", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4262, "license_type": "permissive", "max_line_length": 81, "num_lines": 143, "path": "/src/descriptor_runner/operators/webgpu/operators/standard/binary7.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGPUContext,\n WebGPUMetaBufferContentElement,\n} from \"../../../../interface/backend/webgpu/webgpuContext\";\nimport { WebGPUTensor } from \"../../../../interface/backend/webgpu/webgpuTensor\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { arrayEqual, broadcastMulti } from \"../../../operatorUtil\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { webgpuShaders } from \"../../shaders\";\n\nclass WebGPUBinary7 extends OperatorImpl {\n constructor(\n public elementwiseShaderName: string,\n private elementwiseShaderBinary: Uint32Array,\n public broadcastShaderNames: string[],\n private broadcastShaderBinaries: Uint32Array[]\n ) {\n super(\"webgpu\");\n }\n\n async run(context: WebDNNWebGPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGPUTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error();\n }\n if (arrayEqual(inputA.dims, inputB.dims)) {\n return this.runElementwise(context, inputA, inputB);\n }\n return this.runBroadcast(context, inputA, inputB);\n }\n\n private async runElementwise(\n context: WebDNNWebGPUContext,\n inputA: WebGPUTensor,\n inputB: WebGPUTensor\n ) {\n const outputTensor = context.emptyTensor(inputA.dims, \"float32\");\n\n if (!context.hasPipeline(this.elementwiseShaderName)) {\n context.createPipeline(\n this.elementwiseShaderName,\n this.elementwiseShaderBinary,\n 4\n );\n }\n\n await context.run({\n pipelineName: this.elementwiseShaderName,\n tensors: [inputA, inputB, outputTensor],\n meta: {\n elements: [{ value: inputA.length, type: \"uint32\" }],\n },\n workGroups: {\n x: Math.ceil(Math.min(outputTensor.length, 4096) / 64),\n y: 1,\n z: 1,\n },\n });\n\n return [outputTensor];\n }\n\n private async runBroadcast(\n context: WebDNNWebGPUContext,\n inputA: WebGPUTensor,\n inputB: WebGPUTensor\n ) {\n const { dims: outShape, allStrides: inAllStrides } = broadcastMulti([\n inputA.dims,\n inputB.dims,\n ]),\n outputTensor = context.emptyTensor(outShape, \"float32\"),\n outNDim = outputTensor.ndim,\n metaElements: WebGPUMetaBufferContentElement[] = [\n { value: outputTensor.length, type: \"uint32\" },\n ];\n for (let dim = 0; dim < outNDim; dim++) {\n metaElements.push({ value: outShape[dim], type: \"uint32\" });\n }\n for (let dim = 0; dim < outNDim; dim++) {\n metaElements.push({ value: inAllStrides[0][dim], type: \"uint32\" });\n }\n for (let dim = 0; dim < outNDim; dim++) {\n metaElements.push({ value: inAllStrides[1][dim], type: \"uint32\" });\n }\n\n if (!context.hasPipeline(this.broadcastShaderNames[outNDim])) {\n context.createPipeline(\n this.broadcastShaderNames[outNDim],\n this.broadcastShaderBinaries[outNDim],\n 4\n );\n }\n\n await context.run({\n pipelineName: this.broadcastShaderNames[outNDim],\n tensors: [inputA, inputB, outputTensor],\n meta: {\n elements: metaElements,\n },\n workGroups: {\n x: Math.ceil(Math.min(outputTensor.length, 4096) / 64),\n y: 1,\n z: 1,\n },\n });\n\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n // Add, Sub, Mul, Div, Pow: opset under 7 requires explicit broadcast flag\n {\n opType: \"Add\",\n backend: \"webgpu\",\n opsetMin: 7,\n factory: () =>\n new WebGPUBinary7(\n \"binary_elementwise_add\",\n webgpuShaders.binary_elementwise_add,\n [\n \"binary_broadcast_add_0d\",\n \"binary_broadcast_add_1d\",\n \"binary_broadcast_add_2d\",\n \"binary_broadcast_add_3d\",\n \"binary_broadcast_add_4d\",\n ],\n [\n webgpuShaders.binary_broadcast_add_0d,\n webgpuShaders.binary_broadcast_add_1d,\n webgpuShaders.binary_broadcast_add_2d,\n webgpuShaders.binary_broadcast_add_3d,\n webgpuShaders.binary_broadcast_add_4d,\n ]\n ),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6555184125900269, "alphanum_fraction": 0.6555184125900269, "avg_line_length": 28.3137264251709, "blob_id": "f50bd7f218bc3153d9f73ff0f069dfc40c6d90c6", "content_id": "b48a3671b2dbd601c282882c4c41cad739359ba6", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1495, "license_type": "permissive", "max_line_length": 87, "num_lines": 51, "path": "/src/descriptor_runner/backend/cpu/cpuContextImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { CPUTensor } from \"../..\";\nimport { WebDNNCPUContext } from \"../../interface/backend/cpu/cpuContext\";\nimport { DataArrayTypes, DataType } from \"../../interface/core/constants\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { CPUTensorImpl } from \"./cpuTensorImpl\";\n\nexport class WebDNNCPUContextImpl implements WebDNNCPUContext {\n backend = \"cpu\" as const;\n\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n async initialize(): Promise<void> {}\n\n isCPUTensor(tensor: Tensor): tensor is CPUTensor {\n return tensor.backend === this.backend;\n }\n\n assertsCPUTensor(tensor: Tensor): asserts tensor is CPUTensor {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n\n assertsCPUTensorArray(tensors: Tensor[]): asserts tensors is CPUTensor[] {\n for (const tensor of tensors) {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n }\n\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n data?: DataArrayTypes\n ): CPUTensor {\n return new CPUTensorImpl(dims, dataType, data);\n }\n\n async moveTensor(tensor: Tensor): Promise<CPUTensor> {\n const dst = new CPUTensorImpl(\n tensor.dims,\n tensor.dataType,\n await tensor.getData()\n );\n return dst;\n }\n}\n" }, { "alpha_fraction": 0.5780670642852783, "alphanum_fraction": 0.5964580178260803, "avg_line_length": 28.759008407592773, "blob_id": "63545eb576a9c3810cad0ea689c1d59469b9142c", "content_id": "d6cedf876a802a2d65ef304d8b2a7f08294f0317", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13753, "license_type": "permissive", "max_line_length": 89, "num_lines": 444, "path": "/src/graph_transpiler/webdnn/pass_matmul_transpose_webgl2.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Use RGBA channel in WebGL2\n\nfrom typing import Dict, Iterable, List, Optional\nimport numpy as np\nimport onnx\nfrom webdnn.optimization_pass_result_webgl import OptimizationPassResultWebGL\nfrom webdnn.optimization_pass import OptimizationPass, OptimizationPassResult\nfrom webdnn.onnx_util import tensor_proto_to_numpy, get_attr_int\nfrom webdnn.operator_shader_webgl import OperatorShaderWebGL\n\nSHADER_CODE = \"\"\"import {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { calcStrides } from \"../../../operatorUtil\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\n\n// Version 13\nclass MatMulNT141 extends OperatorImpl {\n constructor() {\n super(\"webgl\");\n }\n\n protected calcShapeNT141(\n dimsA: ReadonlyArray<number>,\n dimsB: ReadonlyArray<number>\n ) {\n /*\n *Matmulの出力shape、入力stride計算\n *行列Bが転置状態かつ4chで入ってくる場合\n *matmul((a,b,m,k), (a,b,n,k)) => (a,b,m,n)\n *\n *a, bの部分は2個に限らず0~無限個の次元がつけられる。\n *2行列で各次元のサイズは一致が必要。\n *broadcastingあり。次元数が少ない側には先頭にサイズ1の次元が付与。\n *そのうえでサイズ1とそれ以外のサイズがある場合にそれ以外のサイズに合わせbroadcast\n *\n *一方の入力が1次元の場合の特例。\n *(k), (a,b,n,k) => (a,b,n)\n *(k)を(a,b,1,k)にbroadcastしたうえで計算して、(a,b,1,n)を得て、1の軸を消して(a,b,n)\n *\n *(a,b,m,k), (k) => (a,b,m)\n *(k)を(a,b,1,k)にbroadcastしたうえで計算して、(a,b,m,1)を得て、1の軸を消して(a,b,m)\n *\n *両方1次元だと、単純な内積で(1,1)を得て1の軸2つが消え、0次元のスカラー値。\n */\n\n // 出力の次元数(1次元の場合の特例適用前)\n const totalNDims = Math.max(dimsA.length, dimsB.length, 2),\n expandedDimsA = dimsA.slice();\n if (expandedDimsA.length === 0) {\n throw new Error();\n } else if (expandedDimsA.length === 1) {\n expandedDimsA.unshift(1);\n }\n while (expandedDimsA.length < totalNDims) {\n expandedDimsA.unshift(1);\n }\n const expandedDimsB = dimsB.slice();\n if (expandedDimsB.length === 0) {\n throw new Error();\n } else if (expandedDimsB.length === 1) {\n expandedDimsB.unshift(1);\n }\n while (expandedDimsB.length < totalNDims) {\n expandedDimsB.unshift(1);\n }\n\n const resultDims = [\n expandedDimsA[expandedDimsA.length - 2],\n expandedDimsB[expandedDimsB.length - 2],\n ],\n innerProductLength = expandedDimsA[expandedDimsA.length - 1];\n if (innerProductLength !== expandedDimsB[expandedDimsB.length - 1]) {\n throw new Error();\n }\n const stridesA = calcStrides(expandedDimsA),\n stridesB = calcStrides(expandedDimsB);\n for (let i = expandedDimsA.length - 3; i >= 0; i--) {\n const resultDim = Math.max(expandedDimsA[i], expandedDimsB[i]);\n // Broadcastされた次元はstrideは0 (出力サイズ1の次元でも0にしてOK)\n if (expandedDimsA[i] === 1) {\n stridesA[i] = 0;\n }\n if (expandedDimsB[i] === 1) {\n stridesB[i] = 0;\n }\n resultDims.unshift(resultDim);\n }\n // B is 4ch\n for (let i = 0; i < stridesB.length; i++) {\n stridesB[i] /= 4;\n }\n\n const resultDimsAfterSqueeze = resultDims.slice();\n if (dimsA.length === 1) {\n resultDimsAfterSqueeze.splice(resultDimsAfterSqueeze.length - 2, 1);\n }\n if (dimsB.length === 1) {\n resultDimsAfterSqueeze.splice(resultDimsAfterSqueeze.length - 1, 1);\n }\n\n return {\n resultDims,\n resultDimsAfterSqueeze,\n stridesA,\n stridesB,\n innerProductLength,\n };\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (!context.webgl2) {\n throw new Error(\"This operator can only run on WebGL2\");\n }\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error(\"only float32 is supported\");\n }\n if (inputA.dimPerPixel !== 1 || inputB.dimPerPixel !== 4) {\n throw new Error();\n }\n const {\n resultDims,\n resultDimsAfterSqueeze,\n stridesA,\n stridesB,\n innerProductLength,\n } = this.calcShapeNT141(inputA.dims, inputB.dims),\n output = context.emptyTensor(resultDimsAfterSqueeze, \"float32\");\n if (resultDims.length === 2) {\n await this.calcDim2(\n context,\n inputA,\n inputB,\n output,\n resultDims,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else if (resultDims.length === 3) {\n await this.calcDim3(\n context,\n inputA,\n inputB,\n output,\n resultDims,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else {\n // TODO: 4次元以上のサポート\n throw new Error();\n }\n\n return [output];\n }\n\n private async calcDim2(\n context: WebDNNWebGLContext,\n dA: WebGLTensor,\n dB: WebGLTensor,\n dC: WebGLTensor,\n resultDims: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLengthDiv4 ${innerProductLength / 4}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\nuniform sampler2D tex_input_a;\nuniform int tex_input_a_stride_0;\nuniform int tex_input_a_stride_1;\n\nivec2 get_coord_a(int d0) {\n int flat_index = d0 * tex_input_a_stride_0;\n int texture_w = textureSize(tex_input_a, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nuniform sampler2D tex_input_b;\nuniform int tex_input_b_stride_0;\nuniform int tex_input_b_stride_1;\n\nivec2 get_coord_b(int d0) {\n int flat_index = d0 * tex_input_b_stride_0;\n int texture_w = textureSize(tex_input_b, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n ivec2 c_a = get_coord_a(tex_output_0);\n ivec2 c_b = get_coord_b(tex_output_1);\n int texture_w_a = textureSize(tex_input_a, 0).x;\n int texture_w_b = textureSize(tex_input_b, 0).x;\n for (int ip = 0; ip < innerProductLengthDiv4; ip++) {\n vec4 vec_a;\n vec_a.r = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.g = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.b = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.a = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec4 vec_b = texelFetch(tex_input_b, c_b, 0);\n s += dot(vec_a, vec_b);\n c_b.x += 1;\n if (c_b.x >= texture_w_b) {\n c_b = ivec2(c_b.x - texture_w_b, c_b.y + 1);\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`,\n kernelName = `matmulnt141_2_${innerProductLength}`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n stridesA,\n dA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n stridesB,\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(resultDims, dC, context.webgl2),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dA, name: \"tex_input_a\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dC,\n uniforms\n );\n }\n\n private async calcDim3(\n context: WebDNNWebGLContext,\n dA: WebGLTensor,\n dB: WebGLTensor,\n dC: WebGLTensor,\n resultDims: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLengthDiv4 ${innerProductLength / 4}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\nuniform sampler2D tex_input_a;\nuniform int tex_input_a_stride_0;\nuniform int tex_input_a_stride_1;\nuniform int tex_input_a_stride_2;\n\nivec2 get_coord_a(int d0, int d1) {\n int flat_index = d0 * tex_input_a_stride_0 + d1 * tex_input_a_stride_1;\n int texture_w = textureSize(tex_input_a, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nuniform sampler2D tex_input_b;\nuniform int tex_input_b_stride_0;\nuniform int tex_input_b_stride_1;\nuniform int tex_input_b_stride_2;\n\nivec2 get_coord_b(int d0, int d1) {\n int flat_index = d0 * tex_input_b_stride_0 + d1 * tex_input_b_stride_1;\n int texture_w = textureSize(tex_input_b, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n ivec2 c_a = get_coord_a(tex_output_0, tex_output_1);\n ivec2 c_b = get_coord_b(tex_output_0, tex_output_2);\n int texture_w_a = textureSize(tex_input_a, 0).x;\n int texture_w_b = textureSize(tex_input_b, 0).x;\n for (int ip = 0; ip < innerProductLengthDiv4; ip++) {\n vec4 vec_a;\n vec_a.r = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.g = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.b = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec_a.a = texelFetch(tex_input_a, c_a, 0).r;\n c_a.x += 1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n vec4 vec_b = texelFetch(tex_input_b, c_b, 0);\n s += dot(vec_a, vec_b);\n c_b.x += 1;\n if (c_b.x >= texture_w_b) {\n c_b = ivec2(c_b.x - texture_w_b, c_b.y + 1);\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`,\n kernelName = `matmulnt141_3_${innerProductLength}`;\n context.addKernel(kernelName, kernelSource);\n\n if (stridesA[2] > dA.textureWidth || stridesB[2] > dB.textureWidth) {\n throw new Error(\"MatMul: kernel assumption does not hold\");\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n stridesA,\n dA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n stridesB,\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(resultDims, dC, context.webgl2),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dA, name: \"tex_input_a\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dC,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"MatMulNT141\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new MatMulNT141(),\n },\n ];\n}\n\n\"\"\"\n\nclass PassMatMulTransposeWebGL2(OptimizationPass):\n def optimize(self, model: onnx.ModelProto) -> Optional[OptimizationPassResult]:\n graph = model.graph\n changed = False\n result = OptimizationPassResultWebGL()\n for node in graph.node:\n if node.op_type == \"MatMul\":\n rhs_name = node.input[1]\n initializers = graph.initializer\n optimizable = False\n rhs_array = None\n rhs_initializer = None\n for initializer in initializers:\n if initializer.name == rhs_name:\n rhs_array = tensor_proto_to_numpy(initializer)\n rhs_array_shape = rhs_array.shape\n if len(rhs_array_shape) < 2 or rhs_array_shape[-2] % 4 != 0:\n continue\n optimizable = True\n rhs_initializer = initializer\n break\n if not optimizable:\n continue\n initializers.remove(rhs_initializer)\n changed = True\n # optimize it to MatMulNT141\n node.op_type = \"MatMulNT141\"\n # add hint to use RGBA texture for weight\n result.tensor_move_options[rhs_name] = {\"dimPerPixel\": 4}\n # move inner-product axis to last\n transposed_rhs_array = np.moveaxis(rhs_array, -2, -1)\n result.initializers[rhs_name] = transposed_rhs_array\n result.operator_shaders[\"matmulnt141\"] = OperatorShaderWebGL(SHADER_CODE)\n # TODO: check weight is not used by other operator\n return result if changed else None\n" }, { "alpha_fraction": 0.5634799599647522, "alphanum_fraction": 0.588836133480072, "avg_line_length": 26.155717849731445, "blob_id": "09d02832969f5fbb43eeb3c7b3363ccead63dcab", "content_id": "6a7425f5cc67736126530addb8841472e7d0f6ab", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 11195, "license_type": "permissive", "max_line_length": 117, "num_lines": 411, "path": "/src/descriptor_runner/backend/webgl/webglTensorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes, DataType } from \"../../interface/core/constants\";\nimport {\n shaderGenHeader,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../operators/webgl/shaderHelper\";\nimport { TensorImpl } from \"../../core/tensorImpl\";\nimport { WebDNNWebGLContextImpl, WebGLSharedTexture } from \"./webglContextImpl\";\nimport { WebGLTensor } from \"../../interface/backend/webgl/webglTensor\";\nimport { WebGLUniformItem } from \"../../interface/backend/webgl/webglContext\";\nimport {\n packToFloat16Array,\n packToFloat32Array,\n unpackFromFloat16Array,\n unpackFromFloat32Array,\n} from \"./pack\";\n\nexport class WebGLTensorImpl extends TensorImpl implements WebGLTensor {\n textureWidth: number;\n\n textureHeight: number;\n\n sharedTexture: WebGLSharedTexture;\n\n private isBoundToDrawFrameBuffer = false;\n\n private readTextureUnitIndices: number[] = [];\n\n constructor(\n private context: WebDNNWebGLContextImpl,\n dims: ReadonlyArray<number>,\n dataType: DataType = \"float32\",\n public readonly dimPerPixel: 1 | 4 = 1,\n textureShape?: ReadonlyArray<number>,\n sharedTexture?: WebGLSharedTexture\n ) {\n super(dims, dataType, \"webgl\");\n if (dataType !== \"float32\") {\n throw new Error(\"WebGLTensor only supports float32\");\n }\n const pixels = Math.ceil(this.length / dimPerPixel);\n // This makes computing slightly slow. why?\n // this.textureWidth = Math.pow(\n // 2,\n // Math.ceil(Math.log2(Math.min(pixels, this.context.maxTextureSize)))\n // );\n if (textureShape) {\n this.textureHeight = textureShape[0];\n this.textureWidth = textureShape[1];\n } else {\n this.textureWidth = this.context.maxTextureSize;\n this.textureHeight = Math.ceil(pixels / this.textureWidth);\n }\n if (\n this.textureHeight > this.context.maxTextureSize ||\n this.textureWidth > this.context.maxTextureSize\n ) {\n throw new Error(\n `Cannot allocate texture of size ${this.length} in this environment. Please split large tensor in the model.`\n );\n }\n if (sharedTexture) {\n this.sharedTexture = sharedTexture;\n } else {\n this.sharedTexture = new WebGLSharedTexture(\n this.context,\n this.textureWidth,\n this.textureHeight,\n this.dimPerPixel\n );\n }\n }\n\n getTexture(): WebGLTexture {\n return this.sharedTexture.texture;\n }\n\n alias(dims: ReadonlyArray<number>): WebGLTensorImpl {\n this.sharedTexture.incrRef();\n return new WebGLTensorImpl(\n this.context,\n dims,\n this.dataType,\n this.dimPerPixel,\n [this.textureHeight, this.textureWidth],\n this.sharedTexture\n );\n }\n\n async getData(): Promise<DataArrayTypes> {\n const { gl } = this.context;\n let data: Float32Array;\n\n if (\n this.context.isWebGL2(gl) &&\n this.context.canOnlyReadRGBA &&\n this.dimPerPixel === 1\n ) {\n // RGBAにパックしてから読み取る必要がある\n const packed = await this.packToRGBA();\n data = (await packed.getData()) as Float32Array;\n packed.dispose();\n return data;\n }\n\n this.bindToDrawTexture();\n if (this.context.isWebGL2(gl)) {\n if (this.context.supportsTexture32bit) {\n const buf = new Float32Array(\n this.textureHeight * this.textureWidth * this.dimPerPixel\n );\n gl.readPixels(\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n this.dimPerPixel === 1 ? gl.RED : gl.RGBA,\n gl.FLOAT,\n buf\n );\n data = unpackFromFloat32Array(buf, this.length);\n } else {\n // 16bit\n const buf = new Uint16Array(\n this.textureHeight * this.textureWidth * this.dimPerPixel\n );\n gl.readPixels(\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n this.dimPerPixel === 1 ? gl.RED : gl.RGBA,\n gl.HALF_FLOAT,\n buf\n );\n data = unpackFromFloat16Array(buf, this.length);\n }\n } else {\n const buf = new Uint8Array(this.textureHeight * this.textureWidth * 4);\n gl.readPixels(\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n buf\n );\n data = this.unpackColor(buf);\n }\n this.unbindFromDrawTexture();\n return data;\n }\n\n private unpackColor(buf: Uint8Array): Float32Array {\n // unpack 8bit texture according to shaderHelper\n const unpacked = new Float32Array(this.length);\n for (let i = 0; i < this.length; i++) {\n const b0 = buf[i * 4];\n const b1 = buf[i * 4 + 1];\n const b2 = buf[i * 4 + 2];\n const b3 = buf[i * 4 + 3];\n let val = 0.0;\n if (b0 > 0) {\n let sign: number, exponent: number;\n if (b0 >= 128) {\n sign = 1.0;\n exponent = b0 - 192;\n } else {\n sign = -1.0;\n exponent = b0 - 64;\n }\n const scaled = b1 / 255 + b2 / (255 * 255) + b3 / (255 * 255 * 255);\n val = scaled * Math.pow(2, exponent) * sign;\n }\n unpacked[i] = val;\n }\n return unpacked;\n }\n\n async setData(data: DataArrayTypes): Promise<void> {\n const { gl } = this.context;\n this.bindToReadTexture(9);\n if (this.context.isWebGL2(gl)) {\n if (this.context.supportsTexture32bit) {\n const buf = packToFloat32Array(\n data,\n this.textureWidth * this.textureHeight * this.dimPerPixel\n );\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n this.dimPerPixel === 1 ? gl.RED : gl.RGBA,\n gl.FLOAT,\n buf\n );\n } else {\n const buf = packToFloat16Array(\n data,\n this.textureWidth * this.textureHeight * this.dimPerPixel\n );\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n this.dimPerPixel === 1 ? gl.RED : gl.RGBA,\n gl.HALF_FLOAT,\n buf\n );\n }\n } else {\n const buf = this.packColor(data);\n gl.texSubImage2D(\n gl.TEXTURE_2D,\n 0,\n 0,\n 0,\n this.textureWidth,\n this.textureHeight,\n gl.RGBA,\n gl.UNSIGNED_BYTE,\n buf\n );\n }\n\n this.unbindFromReadTexture();\n }\n\n private packColor(data: DataArrayTypes): Uint8Array {\n const packed = new Uint8Array(this.textureWidth * this.textureHeight * 4);\n for (let i = 0; i < this.length; i++) {\n const val = data[i];\n let b0 = 0,\n b1 = 0,\n b2 = 0,\n b3 = 0;\n if (val !== 0.0) {\n const sign = val > 0.0 ? 192 : 64;\n const absval = Math.abs(val);\n const exponent = Math.ceil(Math.log2(absval) + 0.0001);\n const scaled = absval * Math.pow(2, -exponent);\n let s1 = scaled;\n let s2 = scaled * 255;\n s2 -= Math.trunc(s2);\n s1 -= s2 / 255;\n let s3 = scaled * (255 * 255);\n s3 -= Math.trunc(s3);\n s2 -= s3 / 255;\n b0 = sign + exponent;\n b1 = Math.min(Math.max(Math.ceil((s1 - 0.5 / 255) * 255), 0), 255);\n b2 = Math.min(Math.max(Math.ceil((s2 - 0.5 / 255) * 255), 0), 255);\n b3 = Math.min(Math.max(Math.ceil((s3 - 0.5 / 255) * 255), 0), 255);\n }\n packed[i * 4] = b0;\n packed[i * 4 + 1] = b1;\n packed[i * 4 + 2] = b2;\n packed[i * 4 + 3] = b3;\n }\n return packed;\n }\n\n dispose(): void {\n this.sharedTexture.dispose();\n }\n\n bindToReadTexture(unit: number): void {\n if (this.isBoundToDrawFrameBuffer)\n throw Error(\n \"This buffer is already registered as draw buffer. \" +\n \"You may forgot to unbind the binding while previous operations.\"\n );\n\n const { gl } = this.context;\n\n gl.activeTexture(gl.TEXTURE0 + unit);\n gl.bindTexture(gl.TEXTURE_2D, this.getTexture());\n\n this.readTextureUnitIndices.push(unit);\n }\n\n unbindFromReadTexture(): void {\n const { gl } = this.context;\n\n for (const unit of this.readTextureUnitIndices) {\n gl.activeTexture(gl.TEXTURE0 + unit);\n gl.bindTexture(gl.TEXTURE_2D, null);\n }\n\n this.readTextureUnitIndices = [];\n }\n\n bindToDrawTexture(): void {\n if (this.readTextureUnitIndices.length > 0)\n throw Error(\n \"This buffer is already registered as read buffer. \" +\n \"You cannot bind a texture as both read and draw texture buffer at same time.\"\n );\n if (this.isBoundToDrawFrameBuffer)\n throw Error(\n \"This buffer is already registered as draw buffer. \" +\n \"You may forgot to unbind the binding while previous operations.\"\n );\n\n const { gl } = this.context;\n gl.viewport(0, 0, this.textureWidth, this.textureHeight);\n gl.scissor(0, 0, this.textureWidth, this.textureHeight);\n\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n this.getTexture(),\n 0\n );\n\n this.isBoundToDrawFrameBuffer = true;\n }\n\n unbindFromDrawTexture(): void {\n if (!this.isBoundToDrawFrameBuffer) return;\n\n const { gl } = this.context;\n\n gl.framebufferTexture2D(\n gl.FRAMEBUFFER,\n gl.COLOR_ATTACHMENT0,\n gl.TEXTURE_2D,\n null,\n 0\n );\n\n this.isBoundToDrawFrameBuffer = false;\n }\n\n private async packToRGBA(): Promise<WebGLTensorImpl> {\n const outputTensor = new WebGLTensorImpl(\n this.context,\n this.dims,\n \"float32\",\n 4\n ),\n inputPixels = this.length,\n outputPixels = Math.ceil(outputTensor.length / 4),\n kernelName = \"RToRGBA\";\n if (!this.context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(this.context.webgl2)}\n \n${shaderGenTensorOutputUniform(1)}\n${shaderGenTensorNDGet(\"tex_input\", 1, this.context.webgl2)}\nuniform int input_pixels;\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n vec4 result = vec4(0.0, 0.0, 0.0, 0.0);\n int pos = tex_output_0 * 4;\n if (pos < input_pixels) {\n result.r = get_tex_input(pos);\n }\n pos++;\n if (pos < input_pixels) {\n result.g = get_tex_input(pos);\n }\n pos++;\n if (pos < input_pixels) {\n result.b = get_tex_input(pos);\n }\n pos++;\n if (pos < input_pixels) {\n result.a = get_tex_input(pos);\n }\n fragColor = result;\n return;\n}\n `;\n this.context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [1],\n [this.textureHeight, this.textureWidth],\n this.context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outputPixels],\n [outputTensor.textureHeight, outputTensor.textureWidth],\n this.context.webgl2\n ),\n { name: \"input_pixels\", type: \"int\", value: inputPixels },\n ];\n\n await this.context.runKernel(\n kernelName,\n [{ tensor: this, name: \"tex_input\" }],\n outputTensor,\n uniforms\n );\n return outputTensor;\n }\n}\n" }, { "alpha_fraction": 0.7242798209190369, "alphanum_fraction": 0.7325102686882019, "avg_line_length": 26, "blob_id": "de9faff3d9b0f67f51f0cba939052f133b63afdc", "content_id": "640f710708f8691de47d90d03680deb7b5306ea8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 243, "license_type": "permissive", "max_line_length": 50, "num_lines": 9, "path": "/src/descriptor_runner/interface/backend/webgl/webglTensor.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Tensor } from \"../../core/tensor\";\n\nexport interface WebGLTensor extends Tensor {\n readonly textureWidth: number;\n readonly textureHeight: number;\n readonly dimPerPixel: 1 | 4;\n\n alias(dims: ReadonlyArray<number>): WebGLTensor;\n}\n" }, { "alpha_fraction": 0.6276021599769592, "alphanum_fraction": 0.6329992413520813, "avg_line_length": 27.822221755981445, "blob_id": "547baf0d16a709ceeb7955d626662106ba2577b5", "content_id": "5989d691660981ec187685f03c4949512c43fd19", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1309, "license_type": "permissive", "max_line_length": 72, "num_lines": 45, "path": "/src/descriptor_runner/operators/base/pad11.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../operatorImpl\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { onnx } from \"onnx-proto\";\nimport { getAttrString } from \"../operatorUtil\";\nimport { Backend } from \"../../interface/core/constants\";\nimport { CPUTensor } from \"../../interface/backend/cpu/cpuTensor\";\n\ntype PadMode = \"constant\" | \"reflect\" | \"edge\";\n\n/*\n * Opset 11\n * opset 2は互換性なし\n */\nexport abstract class Pad11 extends OperatorImpl {\n mode!: PadMode;\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.mode = getAttrString(attribute, \"mode\", \"constant\") as PadMode;\n }\n\n getTensorBackendRequirement(\n nInputs: number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nOutputs: number\n ): (Backend | null)[] {\n if (nInputs === 2) {\n return [this.backend, \"cpu\"];\n } else {\n return [this.backend, \"cpu\", \"cpu\"];\n }\n }\n\n protected calcShape(\n input: Tensor,\n padTensor: CPUTensor\n ): { outputShape: number[]; pads: number[] } {\n const outputShape: number[] = [];\n const pads: number[] = Array.from(padTensor.data);\n for (let i = 0; i < input.ndim; i++) {\n outputShape.push(input.dims[i] + pads[i] + pads[i + input.ndim]);\n }\n return { outputShape, pads };\n }\n}\n" }, { "alpha_fraction": 0.580185055732727, "alphanum_fraction": 0.6071704030036926, "avg_line_length": 27.822221755981445, "blob_id": "e6f1a7855d225ac6a3f5aa51795231d0eb4a61cb", "content_id": "9621c6b84d76e2a95174d754316dcc3c49bc7241", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2616, "license_type": "permissive", "max_line_length": 90, "num_lines": 90, "path": "/src/descriptor_runner/operators/webgl/rawcomputation/averagepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../../interface/backend/webgl/webglTensor\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../shaderHelper\";\n\nexport async function averagepool(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dI: WebGLTensor,\n countIncludePad: boolean,\n batch: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n ch: number\n): Promise<void> {\n // ループ回数は定数が必要\n const kernelName = `averagepool_${kernelShape[0]}_${kernelShape[1]}_${countIncludePad}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n #define K0 ${kernelShape[0]}\n #define K1 ${kernelShape[1]}\n uniform int CH;\n uniform int S0;\n uniform int S1;\n uniform int P0;\n uniform int P1;\n uniform int IS0;\n uniform int IS1;\n ${shaderGenTensorOutputUniform(4)}\n \n ${shaderGenTensorNDGet(\"tex_input\", 4, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(4)}\n float s = 0.0;\n ${countIncludePad ? \"const float c = float(K0 * K1);\" : \"float c = 0.0;\"}\n for (int k0 = 0; k0 < K0; k0++) {\n for (int k1 = 0; k1 < K1; k1++) {\n int in0 = tex_output_2 * S0 - P0 + k0;\n int in1 = tex_output_3 * S1 - P1 + k1;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n s += get_tex_input(tex_output_0, tex_output_1, in0, in1);\n ${countIncludePad ? \"\" : \"c++;\"}\n }\n }\n }\n ${shaderGenOutput(\"s / c\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n dX.strides,\n dX,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(dI.dims, dI, context.webgl2),\n { name: \"CH\", type: \"int\", value: ch },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dI,\n uniforms\n );\n}\n" }, { "alpha_fraction": 0.6531932353973389, "alphanum_fraction": 0.6548100113868713, "avg_line_length": 52.78260803222656, "blob_id": "5c24b8f44e6e1c2dcb3c64365455c488b1e268fa", "content_id": "740d27abd7936355efa9adce77c86c9f4a830a34", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1237, "license_type": "permissive", "max_line_length": 106, "num_lines": 23, "path": "/src/graph_transpiler/webdnn/optimization_pass_result_wasm.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import os\nimport glob\nfrom webdnn.optimization_pass import OptimizationPassResult\n\nclass OptimizationPassResultWasm(OptimizationPassResult):\n def write_code(self, root_directory: str):\n self.remove_code(root_directory)\n directory = os.path.join(root_directory, \"src/descriptor_runner/operators/wasm/operators/autogen\")\n for key, s in self.operator_shaders.items():\n with open(os.path.join(directory, f\"{key}.ts\"), \"w\", encoding=\"utf-8\", newline=\"\\n\") as f:\n f.write(s.ts_code)\n directory = os.path.join(root_directory, \"src/shader/wasm/src/kernels/autogen\")\n for key, s in self.operator_shaders.items():\n with open(os.path.join(directory, f\"{key}.cpp\"), \"w\", encoding=\"utf-8\", newline=\"\\n\") as f:\n f.write(s.cpp_code)\n\n def remove_code(self, root_directory: str):\n directory = os.path.join(root_directory, \"src/descriptor_runner/operators/wasm/operators/autogen\")\n for path in glob.glob(os.path.join(directory, \"*.ts\")):\n os.remove(path)\n directory = os.path.join(root_directory, \"src/shader/wasm/src/kernels/autogen\")\n for path in glob.glob(os.path.join(directory, \"*.cpp\")):\n os.remove(path)\n" }, { "alpha_fraction": 0.5570336580276489, "alphanum_fraction": 0.5827908515930176, "avg_line_length": 25.365671157836914, "blob_id": "fd3cc37ac1b0ce11334e136bb4f7122e96968055", "content_id": "76e6ca4286ac3bcbfd27ed4119eeb482461aace8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3573, "license_type": "permissive", "max_line_length": 79, "num_lines": 134, "path": "/src/descriptor_runner/operators/webgl/operators/standard/maxpool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { MaxPool } from \"../../../base/maxpool\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\n\n// Version 11\nexport class WebGLMaxPool extends MaxPool {\n constructor() {\n super(\"webgl\");\n }\n\n async run(\n context: WebDNNWebGLContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0];\n if (nOutputs !== 1) {\n // TODO: Indicesの出力対応\n throw new Error(\"MaxPool: output indices is not yet supported\");\n }\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"MaxPool other than 2D is not yet supported\");\n }\n if (inputX.dimPerPixel !== 1) {\n throw new Error();\n }\n\n const {\n batch,\n dilations,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch,\n } = this.calcShape(inputX.dims),\n output = context.emptyTensor(\n [batch, ch, outShape[0], outShape[1]],\n \"float32\"\n ),\n // ループ回数は定数が必要\n kernelName = `maxpool_${kernelShape[0]}_${kernelShape[1]}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define K0 ${kernelShape[0]}\n#define K1 ${kernelShape[1]}\nuniform int CH;\nuniform int S0;\nuniform int S1;\nuniform int P0;\nuniform int P1;\nuniform int D0;\nuniform int D1;\nuniform int IS0;\nuniform int IS1;\n${shaderGenTensorOutputUniform(4)}\n\n${shaderGenTensorNDGet(\"tex_input\", 4, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(4)}\n float s = -65536.0;\n for (int k0 = 0; k0 < K0; k0++) {\n for (int k1 = 0; k1 < K1; k1++) {\n int in0 = tex_output_2 * S0 - P0 + k0 * D0;\n int in1 = tex_output_3 * S1 - P1 + k1 * D1;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n float v = get_tex_input(tex_output_0, tex_output_1, in0, in1);\n if (v > s) {\n s = v;\n }\n }\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n inputX.strides,\n inputX,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(output.dims, output, context.webgl2),\n { name: \"CH\", type: \"int\", value: ch },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"D0\", type: \"int\", value: dilations[0] },\n { name: \"D1\", type: \"int\", value: dilations[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: inputX, name: \"tex_input\" }],\n output,\n uniforms\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"MaxPool\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLMaxPool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5928718447685242, "alphanum_fraction": 0.6072652339935303, "avg_line_length": 26.528301239013672, "blob_id": "f973a62dfa49bde816fbda2788b791bb6542dc9e", "content_id": "4a434681064488785d8dd3f301d1a8cbd98ae7c0", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1467, "license_type": "permissive", "max_line_length": 80, "num_lines": 53, "path": "/src/descriptor_runner/operators/cpu/operators/standard/averagepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { averagepool } from \"../../rawcomputation/averagepool\";\nimport { AveragePool } from \"../../../base/averagepool\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 1, 7, 10, 11+\nclass CpuAveragePool extends AveragePool {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputX = inputs[0];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"AveragePool other than 2D is not yet supported\");\n }\n const { batch, kernelShape, pads, strides, inShape, outShape, ch } =\n this.calcShape(inputX.dims),\n outputData = new Float32Array(batch * outShape[0] * outShape[1] * ch);\n averagepool(\n inputX.data as Float32Array,\n outputData,\n this.countIncludePad,\n batch,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch\n );\n const output = context.emptyTensor(\n [batch, ch, outShape[0], outShape[1]],\n \"float32\",\n outputData\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"AveragePool\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuAveragePool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5577736496925354, "alphanum_fraction": 0.6210999488830566, "avg_line_length": 36.4455451965332, "blob_id": "9df9ed3579af2ce0b9cebdb7c4973f4627ad694d", "content_id": "35a0d8b4d6e74d44cf86555c7161aede744afab8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7608, "license_type": "permissive", "max_line_length": 125, "num_lines": 202, "path": "/example/benchmark/make_models.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import argparse\nimport json\nimport os\nimport shutil\nimport subprocess\nimport sys\nimport numpy as np\nimport torch\nimport torch.onnx\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision.models as models\nfrom webdnn.tensor_export import serialize_tensors\n\nsys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"models\"))\n\ntorch.manual_seed(0)\n\nOUTPUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"runner\", \"model\")\nEXTERNAL_MODEL_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"runner\", \"external_model\")\nRUN_OPTIMIZE = False\nTARGET_MODELS = None\nBACKENDS = None\n\ndef dump_expected(directory, arrays_dict):\n serialize_tensors(directory + \"/expected.bin\", arrays_dict)\n\nname_all = []\n\n\ndef scalar(value):\n return torch.Tensor([value]).squeeze()\n\n\ndef rand_scalar():\n return torch.rand(1).squeeze()\n\n\ndef randn_scalar():\n return torch.randn(1).squeeze()\n\ndef optimize_if_requested(output_dir):\n onnx_path = f\"{output_dir}/model.onnx\"\n if RUN_OPTIMIZE:\n optimize_args = [\"python\", \"-m\", \"webdnn.optimize_model\", onnx_path, os.path.join(output_dir, \"optimized\")]\n if BACKENDS is not None:\n optimize_args.append(\"--backends\")\n optimize_args.append(BACKENDS)\n subprocess.check_call(optimize_args)\n\n\ndef dump(name, model, input_shapes):\n name_all.append(name)\n if TARGET_MODELS is not None and name not in TARGET_MODELS:\n return\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n os.makedirs(output_dir, exist_ok=True)\n inputs = []\n model.eval()\n for shape in input_shapes:\n if isinstance(shape, torch.Tensor):\n # 特定の入力をしたい場合はtorch.Tensor自体を与える\n inputs.append(shape)\n else:\n if len(shape) == 0:\n inputs.append(randn_scalar()) # スカラー\n else:\n inputs.append(torch.randn(*shape))\n input_names = [f\"input_{i}\" for i in range(len(inputs))]\n onnx_path = f\"{output_dir}/model.onnx\"\n with torch.no_grad():\n example_output = model(*inputs)\n if isinstance(example_output, tuple):\n output_names = [f\"output_{i}\" for i in range(len(example_output))]\n else:\n output_names = [\"output_0\"]\n torch.onnx.export(model, tuple(inputs), onnx_path,\n verbose=True,\n input_names=input_names,\n output_names=output_names, opset_version=10)\n dumps = {}\n for tensor, name in zip(inputs, input_names):\n dumps[name] = tensor.numpy()\n if isinstance(example_output, tuple):\n for i, eo in enumerate(example_output):\n dumps[f\"output_{i}\"] = eo.numpy()\n else:\n dumps[\"output_0\"] = example_output.numpy()\n dump_expected(output_dir, dumps)\n optimize_if_requested(output_dir)\n\ndef use_detr():\n name = \"detr\"\n name_all.append(name)\n if TARGET_MODELS is not None and name not in TARGET_MODELS:\n return\n from detr import dump_detr\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n dump_detr(output_dir)\n optimize_if_requested(output_dir)\n\ndef use_external_model(name, src_dir):\n name_all.append(name)\n if TARGET_MODELS is not None and name not in TARGET_MODELS:\n return\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n os.makedirs(output_dir, exist_ok=True)\n for filename in [\"model.onnx\", \"expected.bin\"]:\n src_path = os.path.join(src_dir, filename)\n if os.path.exists(src_path):\n shutil.copy(src_path, os.path.join(output_dir, filename))\n expected_numpy_path = os.path.join(src_dir, \"expected.npz\")\n if os.path.exists(expected_numpy_path):\n expected_numpy_arrays = dict(np.load(expected_numpy_path))\n dump_expected(output_dir, expected_numpy_arrays)\n optimize_if_requested(output_dir)\n\ndef use_external_models():\n dirs = os.listdir(EXTERNAL_MODEL_DIR)\n for name in dirs:\n src_dir = os.path.join(EXTERNAL_MODEL_DIR, name)\n if os.path.exists(os.path.join(src_dir, \"model.onnx\")):\n use_external_model(name, src_dir)\n\ndef output_list():\n with open(f\"{OUTPUT_DIR}/cases.json\", \"w\") as f:\n json.dump(list(sorted(name_all)), f)\n\n\ndef main():\n global RUN_OPTIMIZE, TARGET_MODELS, BACKENDS\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--optimize\", action=\"store_true\", help=\"specify this to make optimized model (takes time)\")\n parser.add_argument(\"--only\", help=\"output only specific models (separated by comma)\")\n parser.add_argument(\"--backends\")\n args = parser.parse_args()\n RUN_OPTIMIZE = args.optimize\n if args.only:\n TARGET_MODELS = args.only.split(\",\")\n if args.backends is not None:\n BACKENDS = args.backends\n dump(\"resnet18\", models.resnet18(pretrained=True), [(1, 3, 224, 224)])\n dump(\"resnet50\", models.resnet50(pretrained=True), [(1, 3, 224, 224)])\n dump(\"conv-64-64-3-1-1\", nn.Conv2d(64, 64, 3, stride=1, padding=1, bias=False), [(1, 64, 56, 56)])\n dump(\"conv-256-64-1-1-0\", nn.Conv2d(256, 64, 1, stride=1, padding=0, bias=False), [(1, 256, 56, 56)])\n dump(\"conv-128-128-3-1-1\", nn.Conv2d(128, 128, 3, stride=1, padding=1, bias=False), [(1, 128, 28, 28)])\n dump(\"conv-512-128-1-1-0\", nn.Conv2d(512, 128, 1, stride=1, padding=0, bias=False), [(1, 512, 28, 28)])\n dump(\"conv-256-256-3-1-1\", nn.Conv2d(256, 256, 3, stride=1, padding=1, bias=False), [(1, 256, 14, 14)])\n dump(\"conv-1024-256-1-1-0\", nn.Conv2d(1024, 256, 1, stride=1, padding=0, bias=False), [(1, 1024, 14, 14)])\n dump(\"convtranspose-512-256-3-2-0\", nn.ConvTranspose2d(512, 256, 3, stride=2, padding=0, bias=False), [(1, 512, 64, 64)])\n dump(\"convtranspose-32-16-3-2-0\", nn.ConvTranspose2d(32, 16, 3, stride=2, padding=0, bias=False), [(1, 32, 64, 64)])\n dump(\"gemm-1024-2048-4096\", Gemm(n=2048,k=4096), [(1024, 4096)])\n dump(\"gemm-1048576-32-288\", Gemm(n=32,k=288), [(1048576, 288)])\n dump(\"matmul-850x1x256-256x2048\", MatMul(), [(850, 1, 256), (256, 2048)])\n dump(\"matmul-850x1x2048-2048x256\", MatMul(), [(850, 1, 2048), (2048, 256)])\n dump(\"matmul-850x1x2048-c2048x256\", MatMulConstR((2048, 256)), [(850, 1, 2048)])\n dump(\"matmul-8x850x32-8x32x850\", MatMul(), [(8, 850, 32), (8, 32, 850)])\n dump(\"matmul-8x850x850-8x850x32\", MatMul(), [(8, 850, 850), (8, 850, 32)])\n dump(\"softmax-8-850-850\", torch.nn.Softmax(dim=2), [(8, 850, 850)])\n dump(\"softmax-8-100-850\", torch.nn.Softmax(dim=2), [(8, 100, 850)])\n dump(\"softmax-8-100-100\", torch.nn.Softmax(dim=2), [(8, 100, 100)])\n use_detr()\n use_external_models()\n output_list()\n\nclass MatMul(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x, y):\n return torch.matmul(x, y)\n\nclass MatMulConstR(nn.Module):\n def __init__(self, rhs_shape):\n super().__init__()\n self.rhs = nn.Parameter(torch.Tensor(\n np.random.normal(size=rhs_shape)))\n \n def forward(self, lhs):\n return torch.matmul(lhs, self.rhs)\n\nclass Permute(nn.Module):\n def __init__(self, order):\n super().__init__()\n self.order = order\n\n def forward(self, x):\n # Transpose runs on backend where tensor is, so moving the tensor to non-cpu backend by using Add operator\n x = x + 1.0\n return x.permute(*self.order)\n\nclass Gemm(nn.Module):\n def __init__(self, n, k):\n super().__init__()\n # input, output\n self.fc = nn.Linear(k, n, bias=True)\n\n def forward(self, x):\n return self.fc(x)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6264150738716125, "alphanum_fraction": 0.6371068954467773, "avg_line_length": 31.4489803314209, "blob_id": "ef5768acc7ad40b93f115d4dae1893afad03a545", "content_id": "b74c3f3a9252a7bcd4cad2ddfdbd119dced0d02a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1590, "license_type": "permissive", "max_line_length": 80, "num_lines": 49, "path": "/src/descriptor_runner/operators/cpu/operators/standard/clip.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayConstructor } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { onnx } from \"onnx-proto\";\nimport { getAttrFloat } from \"../../../operatorUtil\";\n\nclass CPUClip extends OperatorImpl {\n clipMax!: number;\n clipMin!: number;\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.clipMax = getAttrFloat(attribute, \"max\", 65536);\n this.clipMin = getAttrFloat(attribute, \"min\", -65536);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n if (![\"float32\"].includes(input.dataType)) {\n throw new Error(`Unary: DataType ${input.dataType} not supported`);\n }\n const newData = new DataArrayConstructor[input.dataType](input.data.length);\n const { clipMax, clipMin } = this;\n for (let i = 0; i < newData.length; i++) {\n newData[i] = Math.min(clipMax, Math.max(input.data[i], clipMin));\n }\n const output = context.emptyTensor(input.dims, input.dataType, newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Clip\",\n backend: \"cpu\",\n opsetMin: 1,\n opsetMax: 11,\n factory: () => new CPUClip(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6420168280601501, "alphanum_fraction": 0.6492997407913208, "avg_line_length": 31.454545974731445, "blob_id": "7ee16c8d23ba9ac08a93409ab1192ecdad0de7d6", "content_id": "1e16f39b945bcf35021c4d8071f0e7117246916d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1795, "license_type": "permissive", "max_line_length": 80, "num_lines": 55, "path": "/src/descriptor_runner/operators/cpu/operators/standard/cast.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport {\n DataArrayConstructor,\n DataType,\n} from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Opset 6+ (opset 1 requires \"to\" is string)\nclass Cast extends OperatorImpl {\n to!: onnx.TensorProto.DataType;\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.to = getAttrInt(attribute, \"to\", onnx.TensorProto.DataType.FLOAT);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n // TODO: コピー回避\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n let outputDataType: DataType;\n switch (this.to) {\n case onnx.TensorProto.DataType.FLOAT:\n outputDataType = \"float32\";\n break;\n case onnx.TensorProto.DataType.UINT8: // TODO: clip value\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.INT64:\n outputDataType = \"int32\";\n break;\n default:\n throw new Error(\n `Cast: converting to DataType ${this.to} is not yet supported`\n );\n }\n const newData = new DataArrayConstructor[outputDataType](input.data.length);\n newData.set(input.data);\n const output = context.emptyTensor(input.dims, outputDataType, newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n { opType: \"Cast\", backend: \"cpu\", opsetMin: 6, factory: () => new Cast() },\n ];\n}\n" }, { "alpha_fraction": 0.6265822649002075, "alphanum_fraction": 0.6329113841056824, "avg_line_length": 23.30769157409668, "blob_id": "e182314828abb88a86940b8e93a62a0f83f5ba3a", "content_id": "c8baa0de929130d153094b8faf5cd636ad6e5ec9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 316, "license_type": "permissive", "max_line_length": 92, "num_lines": 13, "path": "/src/shader/wasm/src/kernels/standard/dynamic_unarys.cpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#include <algorithm>\n#include <cmath>\n#include \"../../common/kernel.hpp\"\n#include \"../../common/unary.hpp\"\n\nextern \"C\"\n{\n void WEBDNN_KERNEL kernel_leakyrelu(const float *src, float *dst, int length, float alpha)\n {\n webdnn_unary(src, dst, length, [alpha](float s) { return s < 0.0f ? s * alpha : s; });\n }\n\n}\n" }, { "alpha_fraction": 0.5111830234527588, "alphanum_fraction": 0.571050226688385, "avg_line_length": 31.317934036254883, "blob_id": "c6aa6e101fb22ddbc3b1d126549821e733eff5bc", "content_id": "4adb1f9dc1caef21a2164a532897bd8cb32d72b8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 24016, "license_type": "permissive", "max_line_length": 177, "num_lines": 736, "path": "/test/model_test/make_models.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import argparse\nimport json\nimport os\nimport subprocess\nimport numpy as np\nimport torch\nimport torch.onnx\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision.models as models\nimport onnx\nfrom onnx import helper\nfrom onnx import AttributeProto, TensorProto, GraphProto\nimport onnxruntime\nfrom webdnn.tensor_export import serialize_tensors\n\ntorch.manual_seed(0)\n\nOUTPUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"runner\", \"model\")\nRUN_OPTIMIZE = False\n\nclass ReLU(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x):\n return F.relu(x)\n\n\nclass ReLUExp(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x):\n return torch.exp(F.relu(x))\n\n\nclass Transpose(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x):\n return torch.transpose(x, 0, 1)\n\n\nclass MatMul(nn.Module):\n def __init__(self):\n super().__init__()\n self.fc = nn.Linear(128, 64, bias=False)\n\n def forward(self, x):\n return self.fc(x)\n\n\nclass MatMul2(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x, y):\n return torch.matmul(x, y)\n\n\nclass Gemm(nn.Module):\n def __init__(self):\n super().__init__()\n self.fc = nn.Linear(128, 64, bias=True)\n\n def forward(self, x):\n return self.fc(x)\n\n\nclass Conv(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv = nn.Conv2d(3, 64, 7, 2, 3, bias=False)\n\n def forward(self, x):\n return self.conv(x)\n\n\nclass BN(nn.Module):\n def __init__(self):\n super().__init__()\n self.bn = nn.BatchNorm2d(3)\n\n def forward(self, x):\n return self.bn(x)\n\n\nclass MaxPool(nn.Module):\n def __init__(self):\n super().__init__()\n self.pool = nn.MaxPool2d(\n kernel_size=3, stride=2, padding=0, ceil_mode=True)\n\n def forward(self, x):\n return self.pool(x)\n\n\nclass AveragePool(nn.Module):\n def __init__(self):\n super().__init__()\n # count_include_pad=True にすると、pytorch->onnxでは単独のPadオペレータが生成される\n self.pool = nn.AvgPool2d(\n kernel_size=3, stride=1, padding=0, ceil_mode=True, count_include_pad=False)\n\n def forward(self, x):\n return self.pool(x)\n\n\nclass Concat3(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, c0, c1, c2):\n return torch.cat([c0, c1, c2], dim=1)\n\n\nclass Concat4(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, c0, c1, c2, c3):\n return torch.cat([c0, c1, c2, c3], dim=1)\n\n\nclass Reshape(nn.Module):\n def __init__(self):\n super().__init__()\n\n def forward(self, x):\n # Flattenが使われてしまいReshapeのテストとしては使えない\n return x.view(1, -1)\n\n\nclass Add(nn.Module):\n def forward(self, x, y):\n return x + y\n\n\nclass Sub(nn.Module):\n def forward(self, x, y):\n return x - y\n\n\nclass Mul(nn.Module):\n def forward(self, x, y):\n return x * y\n\n\nclass Div(nn.Module):\n def forward(self, x, y):\n return x / y\n\n\nclass Pow(nn.Module):\n def forward(self, x, y):\n return torch.pow(x, y)\n\n\nclass ResNetPartial(nn.Module):\n def __init__(self) -> None:\n super().__init__()\n resnet = models.resnet18(pretrained=True)\n self.conv1 = resnet.conv1\n self.bn1 = resnet.bn1\n\n def forward(self, x):\n x = self.conv1(x)\n x = self.bn1(x)\n return x\n\n\nclass Flatten2(nn.Module):\n def forward(self, x):\n # [a,b,c,d] -> [a,b,c*d]\n return x.flatten(2)\n\n\nclass Slice2d(nn.Module):\n def forward(self, x):\n return x[10:, :]\n\n\nclass Slice2d2(nn.Module):\n def forward(self, x):\n return x[10::4, -100:-5:3]\n\n\nclass Permute(nn.Module):\n def forward(self, x):\n # Transpose runs on backend where tensor is, so moving the tensor to non-cpu backend by using Add operator\n x = x + 1.0\n return x.permute(2, 0, 1)\n\n\nclass Unsqueeze(nn.Module):\n def forward(self, x):\n return x.unsqueeze(2).unsqueeze(-1)\n\n\nclass Squeeze1(nn.Module):\n def forward(self, x):\n return x.squeeze()\n\n\nclass Squeeze2(nn.Module):\n def forward(self, x):\n return x.squeeze(4)\n\n\nclass Cast(nn.Module):\n def forward(self, x):\n return x.type(torch.int32)\n\n\nclass Sqrt(nn.Module):\n def forward(self, x):\n return torch.sqrt(x)\n\n\nclass Sigmoid(nn.Module):\n def forward(self, x):\n return torch.sigmoid(x)\n\n\nclass Abs(nn.Module):\n def forward(self, x):\n return torch.abs(x)\n\n\nclass Acos(nn.Module):\n def forward(self, x):\n return torch.acos(x)\n\n\nclass Acosh(nn.Module):\n def forward(self, x):\n return torch.acosh(x)\n\n\nclass Asin(nn.Module):\n def forward(self, x):\n return torch.asin(x)\n\n\nclass Asinh(nn.Module):\n def forward(self, x):\n return torch.asinh(x)\n\n\nclass Atan(nn.Module):\n def forward(self, x):\n return torch.atan(x)\n\n\nclass Atanh(nn.Module):\n def forward(self, x):\n return torch.atanh(x)\n\n\nclass Cos(nn.Module):\n def forward(self, x):\n return torch.cos(x)\n\n\nclass Cosh(nn.Module):\n def forward(self, x):\n return torch.cosh(x)\n\n\nclass Exp(nn.Module):\n def forward(self, x):\n return torch.exp(x)\n\n\nclass Log(nn.Module):\n def forward(self, x):\n return torch.log(x)\n\n\nclass Round(nn.Module):\n def forward(self, x):\n return torch.round(x)\n\n\nclass Sign(nn.Module):\n def forward(self, x):\n return torch.sign(x)\n\n\nclass Sin(nn.Module):\n def forward(self, x):\n return torch.sin(x)\n\n\nclass Sinh(nn.Module):\n def forward(self, x):\n return torch.sinh(x)\n\n\nclass Tan(nn.Module):\n def forward(self, x):\n return torch.tan(x)\n\n\nclass HardSwish(nn.Module):\n def forward(self, x):\n return torch.nn.Hardswish()(x)\n\n\nclass Neg(nn.Module):\n def forward(self, x):\n return torch.neg(x)\n\n\nclass Softplus(nn.Module):\n def forward(self, x):\n return torch.nn.Softplus()(x)\n\n\nclass Softsign(nn.Module):\n def forward(self, x):\n return torch.nn.Softsign()(x)\n\n\nclass Reciprocal(nn.Module):\n def forward(self, x):\n return torch.reciprocal(x)\n\n\nclass Tanh(nn.Module):\n def forward(self, x):\n return torch.tanh(x)\n\n\nclass Ceil(nn.Module):\n def forward(self, x):\n return torch.ceil(x)\n\n\nclass Floor(nn.Module):\n def forward(self, x):\n return torch.floor(x)\n\n\nclass Gather0D(nn.Module):\n def forward(self, x):\n # 1D -> scalar\n return x[3]\n\n\nclass Gather1D(nn.Module):\n def forward(self, x):\n return x[[2, 4, 5]]\n\n\nclass Pad(nn.Module):\n def __init__(self, pad, mode=\"constant\", constant=0.0):\n super().__init__()\n self.pad = pad\n self.mode = mode\n self.constant = constant\n\n def forward(self, x):\n x = F.relu(x) # Padは入力テンソルがCPUならGPUに移動せず実行する仕様としているため、まずGPUに移動させる\n return F.pad(x, pad=self.pad, mode=self.mode, value=self.constant)\n\n\nclass ReduceMax(nn.Module):\n def forward(self, x):\n return torch.max(x, -1, keepdim=True)[0]\n\n\nclass ReduceMean(nn.Module):\n def forward(self, x):\n return torch.mean(x, -1, keepdim=True)\n\n\nclass ReduceMin(nn.Module):\n def forward(self, x):\n return torch.min(x, -1, keepdim=True)[0]\n\n\nclass ReduceProd(nn.Module):\n def forward(self, x):\n return torch.prod(x, -1, keepdim=True)\n\n\nclass ReduceSum(nn.Module):\n def forward(self, x):\n return torch.sum(x, -1, keepdim=True)\n\n\nclass ReduceSum2(nn.Module):\n def forward(self, x):\n return torch.sum(x, (1, 3), keepdim=True)\n\n\nclass ReduceSum3(nn.Module):\n def forward(self, x):\n return torch.sum(x, (1, 3), keepdim=False)\n\n\nclass ReduceSum4(nn.Module):\n def forward(self, x):\n return torch.sum(x)\n\n\nclass Split(nn.Module):\n def __init__(self, split_size_or_sections, dim) -> None:\n super().__init__()\n self.split_size_or_sections = split_size_or_sections\n self.dim = dim\n\n def forward(self, x):\n return torch.split(x, self.split_size_or_sections, dim=self.dim)\n\n\nclass InstanceNorm(nn.Module):\n def __init__(self, num_features, eps) -> None:\n super().__init__()\n self.instance_norm = torch.nn.InstanceNorm2d(num_features, eps, affine=True)\n self.instance_norm.bias.data = torch.rand(*self.instance_norm.bias.data.shape)\n self.instance_norm.weight.data = torch.rand(*self.instance_norm.weight.data.shape)\n\n def forward(self, x):\n return self.instance_norm(x)\n\n\ndef dump_expected(directory, arrays_dict):\n casted_arrays_dict = {}\n for k, array in arrays_dict.items():\n if array.dtype == np.float64:\n c_array = array.astype(np.float32)\n elif array.dtype == np.int64:\n c_array = array.astype(np.int32)\n elif array.dtype == np.uint64:\n c_array = array.astype(np.uint32)\n else:\n c_array = array\n casted_arrays_dict[k] = c_array\n serialize_tensors(directory + \"/expected.bin\", casted_arrays_dict)\n\nname_all = []\n\n\ndef scalar(value):\n return torch.Tensor([value]).squeeze()\n\n\ndef rand_scalar():\n return torch.rand(1).squeeze()\n\n\ndef randn_scalar():\n return torch.randn(1).squeeze()\n\nDATA_TYPE_TO_NUMPY = {\n 1: np.float32,\n 2: np.uint8,\n 3: np.int8,\n 4: np.uint16,\n 5: np.int16,\n 6: np.int32,\n 7: np.int64,\n 9: np.bool,\n 10: np.float16,\n 11: np.float64,\n 12: np.uint32,\n 13: np.uint64,\n}\n\ndef _data_type_from_numpy(np_dtype) -> int:\n # dict like {np.float32: 1} cannot be used due to key equality check\n for k, v in DATA_TYPE_TO_NUMPY.items():\n if v == np_dtype:\n return k\n raise ValueError\n\ndef array_to_tensor_value_info(array, name):\n dtype = _data_type_from_numpy(array.dtype)\n return helper.make_tensor_value_info(name, dtype, list(array.shape))\n\ndef dump_direct_onnx(name, op_type, input_arrays, output_arrays, attributes={}, opset_version=10, large=False):\n name_all.append({\"name\": name, \"large\": large})\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n os.makedirs(output_dir, exist_ok=True)\n input_tvs = []\n input_names = []\n for i, array in enumerate(input_arrays):\n name = f\"input_{i}\"\n input_names.append(name)\n input_tvs.append(array_to_tensor_value_info(array, name))\n output_tvs = []\n output_names = []\n for i, array in enumerate(output_arrays):\n name = f\"output_{i}\"\n output_names.append(name)\n output_tvs.append(array_to_tensor_value_info(array, name))\n node_def = helper.make_node(\n op_type, # name\n input_names,\n output_names,\n **attributes, # attributes\n )\n graph_def = helper.make_graph(\n [node_def], # nodes\n 'test-model', # name\n input_tvs, # inputs\n output_tvs, # outputs\n )\n model_def = helper.make_model(graph_def, producer_name='onnx-example')\n model_def.opset_import[0].version = opset_version\n onnx.checker.check_model(model_def)\n onnx_path = f\"{output_dir}/model.onnx\"\n onnx.save_model(model_def, onnx_path)\n\n # make test case expected value\n session = onnxruntime.InferenceSession(onnx_path)\n dumps = {f\"input_{i}\": array for i, array in enumerate(input_arrays)}\n outputs = session.run(output_names, dumps)\n for i in range(len(outputs)):\n dumps[f\"output_{i}\"] = outputs[i]\n dump_expected(output_dir, dumps)\n if RUN_OPTIMIZE:\n subprocess.check_call([\"python\", \"-m\", \"webdnn.optimize_model\", onnx_path, os.path.join(output_dir, \"optimized\")])\n\n\ndef dump(name, model, input_shapes, opset_version=10, large=False):\n name_all.append({\"name\": name, \"large\": large})\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n os.makedirs(output_dir, exist_ok=True)\n inputs = []\n model.eval()\n for shape in input_shapes:\n if isinstance(shape, torch.Tensor):\n # 特定の入力をしたい場合はtorch.Tensor自体を与える\n inputs.append(shape)\n else:\n if len(shape) == 0:\n inputs.append(randn_scalar()) # スカラー\n else:\n inputs.append(torch.randn(*shape))\n input_names = [f\"input_{i}\" for i in range(len(inputs))]\n onnx_path = f\"{output_dir}/model.onnx\"\n with torch.no_grad():\n example_output = model(*inputs)\n if isinstance(example_output, tuple):\n output_names = [f\"output_{i}\" for i in range(len(example_output))]\n else:\n output_names = [\"output_0\"]\n torch.onnx.export(model, tuple(inputs), onnx_path,\n verbose=True,\n input_names=input_names,\n output_names=output_names, opset_version=opset_version)\n dumps = {}\n for tensor, name in zip(inputs, input_names):\n dumps[name] = tensor.numpy()\n if isinstance(example_output, tuple):\n for i, eo in enumerate(example_output):\n dumps[f\"output_{i}\"] = eo.numpy()\n else:\n dumps[\"output_0\"] = example_output.numpy()\n dump_expected(output_dir, dumps)\n if RUN_OPTIMIZE:\n subprocess.check_call([\"python\", \"-m\", \"webdnn.optimize_model\", onnx_path, os.path.join(output_dir, \"optimized\")])\n\n\ndef output_list():\n with open(f\"{OUTPUT_DIR}/cases.json\", \"w\") as f:\n json.dump(name_all, f)\n\n\ndef main():\n global RUN_OPTIMIZE\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--optimize\", action=\"store_true\", help=\"specify this to make optimized model (takes time)\")\n args = parser.parse_args()\n RUN_OPTIMIZE = args.optimize\n dump_direct_onnx(\"max\", \"Max\", [np.random.rand(3, 4).astype(np.float32), np.random.rand(1, 4).astype(np.float32)], [np.random.rand(3, 4).astype(np.float32)])\n dump_direct_onnx(\"mean\", \"Mean\", [np.random.rand(3, 4).astype(np.float32), np.random.rand(1, 4).astype(np.float32)], [np.random.rand(3, 4).astype(np.float32)])\n dump_direct_onnx(\"tile\", \"Tile\", [np.random.rand(3, 4, 5, 6).astype(np.float32), np.array([2, 3, 4, 5], dtype=np.int64)], [np.zeros((3*2, 4*3, 5*4, 6*5), dtype=np.float32)])\n dump(\"relu\", ReLU(), [(3, 4)])\n dump(\"relu2\", ReLU(), [(100, 20, 30, 400)], large=True)\n dump(\"reluexp\", ReLUExp(), [(3, 4)])\n dump(\"sqrt\", Sqrt(), [torch.rand(3, 4)])\n dump(\"sqrtscalar\", Sqrt(), [rand_scalar()])\n dump(\"sigmoid\", Sigmoid(), [(3, 4)])\n dump(\"abs\", Abs(), [(3, 4)])\n dump(\"acos\", Acos(), [torch.rand(3, 4)])\n # dump(\"acosh\", Acosh(), [torch.rand(3, 4) + 2]) # unsupported ONNX export\n dump(\"asin\", Asin(), [torch.rand(3, 4)])\n # dump(\"asinh\", Asinh(), [torch.rand(3, 4)]) # unsupported ONNX export\n dump(\"atan\", Atan(), [torch.rand(3, 4)])\n # dump(\"atanh\", Atanh(), [torch.rand(3, 4) * 0.5]) # unsupported ONNX export\n dump(\"ceil\", Ceil(), [(3, 4)])\n dump(\"cos\", Cos(), [(3, 4)])\n # dump(\"cosh\", Cosh(), [(3, 4)]) # unsupported ONNX export\n dump(\"exp\", Exp(), [(3, 4)])\n dump(\"floor\", Floor(), [(3, 4)])\n # dump(\"hardswish\", HardSwish(), [(3, 4)], opset_version=14) # ONNX opset 14\n dump(\"log\", Log(), [torch.rand(3, 4) + 1])\n dump(\"neg\", Neg(), [(3, 4)])\n # dump(\"reciprocal\", Reciprocal(), [(3, 4)]) # does not output Reciprocal operator\n dump(\"round\", Round(), [torch.randn(3, 4) * 10], opset_version=11)\n dump(\"sign\", Sign(), [torch.randn(3, 4) * 10])#TODO 0\n dump(\"sin\", Sin(), [(3, 4)])\n # dump(\"sinh\", Sinh(), [(3, 4)]) # unsupported ONNX export\n dump(\"softplus\", Softplus(), [(3, 4)])\n dump(\"softsign\", Softsign(), [(3, 4)])\n dump(\"tan\", Tan(), [(3, 4)])\n dump(\"tanh\", Tanh(), [(3, 4)])\n dump(\"elu\", nn.ELU(alpha=2.0), [(3, 4, 5, 6)])\n # dump(\"hardsigmoid\", nn.Hardsigmoid(), [(3, 4, 5, 6)]) # unsupported ONNX export\n dump(\"leakyrelu\", nn.LeakyReLU(negative_slope=0.5), [(3, 4, 5, 6)])\n dump(\"selu\", nn.SELU(), [(3, 4, 5, 6)])\n dump(\"gemm\", Gemm(), [(3, 128)])\n dump(\"matmul\", MatMul(), [(3, 128)])\n dump(\"matmul2\", MatMul2(), [(1, 3, 128), (4, 128, 1)])\n # (in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, padding_mode='zeros')\n #dump(\"conv1\", nn.Conv2d(1, 1, 1, 1, 0, bias=False), [(1, 1, 3, 3)])\n dump(\"conv1\", nn.Conv2d(2, 4, 3, 1, 0, bias=False), [(1, 2, 8, 8)])\n dump(\"conv2\", nn.Conv2d(3, 4, 7, 2, 3, bias=False), [(2, 3, 8, 16)])\n dump(\"conv3\", nn.Conv2d(4, 8, 7, 2, 3,\n groups=2, bias=False), [(3, 4, 8, 16)])\n dump(\"conv4\", nn.Conv2d(4, 8, 3, 2, 3, dilation=2,\n groups=2, bias=False), [(3, 4, 8, 16)])\n dump(\"conv5\", nn.Conv2d(4, 8, 3, 2, 3,\n dilation=2, groups=2), [(3, 4, 8, 16)])\n # go through each path in ConvReshapeWebGL\n # cinkhkw % 4 == 0, outc_per_group % 4 == 0\n dump(\"conv6\", nn.Conv2d(16, 32, 3, 1, 1, bias=False), [(2, 16, 28, 28)])\n # cinkhkw % 4 == 0, outc_per_group % 4 != 0\n dump(\"conv7\", nn.Conv2d(16, 33, 3, 1, 1, bias=False), [(2, 16, 28, 28)])\n # cinkhkw % 4 == 0, group * batch * outh * outw > 16384\n dump(\"conv8\", nn.Conv2d(2048, 512, 3, 1, 1, groups=512, bias=False), [(1, 2048, 7, 7)])\n # cinkhkw % 4 != 0\n dump(\"conv9\", nn.Conv2d(64, 64, 3, 1, 1, groups=64, bias=False), [(1, 64, 7, 7)])\n # cinkhkw % 4 != 0, group * batch * outh * outw > 16384\n dump(\"conv10\", nn.Conv2d(512, 512, 3, 1, 1, groups=512, bias=False), [(1, 512, 7, 7)])\n # very large im2col (IM2COL_NUMEL_LIMIT)\n dump(\"conv11\", nn.Conv2d(1, 1, 4, 1, 1, bias=True), [(32, 1, 1027, 1027)], large=True)\n # in_channels, out_channels, kernel_size\n dump(\"convtranspose1\", nn.ConvTranspose2d(16, 32, 3, stride=1, padding=0, output_padding=0, groups=1, dilation=1, bias=False), [(1, 16, 7, 7)])\n dump(\"convtranspose2\", nn.ConvTranspose2d(16, 32, 3, stride=2, padding=0, output_padding=0, groups=1, dilation=1, bias=True), [(2, 16, 7, 9)])\n dump(\"convtranspose3\", nn.ConvTranspose2d(16, 32, 3, stride=2, padding=0, output_padding=0, groups=2, dilation=1, bias=False), [(2, 16, 7, 9)])\n dump(\"convtranspose4\", nn.ConvTranspose2d(16, 32, 3, stride=2, padding=1, output_padding=1, groups=1, dilation=1, bias=False), [(2, 16, 7, 9)])\n dump(\"convtranspose5\", nn.ConvTranspose2d(16, 32, 3, stride=2, padding=1, output_padding=1, groups=1, dilation=2, bias=False), [(2, 16, 7, 9)])\n dump(\"maxpool1\", nn.MaxPool2d(kernel_size=3,\n stride=2, padding=0), [(2, 3, 10, 12)])\n dump(\"maxpool2\", nn.MaxPool2d(kernel_size=3, stride=2,\n padding=0, ceil_mode=True), [(2, 3, 10, 12)])\n # count_include_pad=Trueのとき、Padオペレータが出力される\n #dump(\"avgpool1\", nn.AvgPool2d(kernel_size=3, stride=2, padding=0), [(2, 3, 10, 12)])\n #dump(\"avgpool2\", nn.AvgPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True), [(2, 3, 10, 12)])\n dump(\"avgpool3\", nn.AvgPool2d(kernel_size=3, stride=2, padding=0,\n ceil_mode=True, count_include_pad=False), [(2, 3, 10, 12)])\n dump(\"globalavgpool\", nn.AdaptiveAvgPool2d((1, 1)), [(2, 3, 10, 12)])\n dump(\"pad1\", Pad((1, 2)), [(2, )], opset_version=11)\n dump(\"pad2\", Pad((1, 2, 3, 4)), [(2, 3)], opset_version=11)\n dump(\"pad3\", Pad((1, 2, 3, 4, 5, 6)), [(2, 3, 4)], opset_version=11)\n dump(\"pad4\", Pad((1, 2, 3, 4, 5, 6, 7, 8)), [(2, 3, 4, 5)], opset_version=11)\n dump(\"pad5\", Pad((1, 2, 3, 4, 5, 6, 7, 8, 9, 10)), [(2, 3, 4, 5, 6)], opset_version=11)\n dump(\"pad6\", Pad((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)), [(2, 3, 4, 5, 6, 7)], opset_version=11)\n dump(\"padc2\", Pad((1, 2, 3, 4), constant=-1.0), [(2, 3)], opset_version=11)\n dump(\"padc3\", Pad((1, 2, 3, 4, 5, 6), constant=-1.0), [(2, 3, 4)], opset_version=11)\n dump(\"padc4\", Pad((1, 2, 3, 4, 5, 6, 7, 8), constant=-1.0), [(2, 3, 4, 5)], opset_version=11)\n dump(\"padc5\", Pad((1, 2, 3, 4, 5, 6, 7, 8, 9, 10), constant=-1.0), [(2, 3, 4, 5, 6)], opset_version=11)\n # only padding 2, 3, 4 dim for tensor with dimension 3,4,5 is supported\n dump(\"padr3\", Pad((2, 1), mode=\"reflect\"), [(2, 3, 4)], opset_version=11)\n dump(\"padr4\", Pad((4, 3, 2, 1), mode=\"reflect\"), [(2, 3, 4, 5)], opset_version=11)\n dump(\"padr5\", Pad((6, 5, 4, 3, 2, 1), mode=\"reflect\"), [(3, 4, 5, 6, 7)], opset_version=11)\n dump(\"pade3\", Pad((2, 1), mode=\"replicate\"), [(2, 3, 4)], opset_version=11)\n dump(\"pade4\", Pad((4, 3, 2, 1), mode=\"replicate\"), [(2, 3, 4, 5)], opset_version=11)\n dump(\"pade5\", Pad((6, 5, 4, 3, 2, 1), mode=\"replicate\"), [(3, 4, 5, 6, 7)], opset_version=11)\n # (2, 3, 10, 12) -< (2, 3*10*12)\n dump(\"flatten\", nn.Flatten(), [(2, 3, 10, 12)])\n dump(\"add1\", Add(), [(2, 3, 10, 12), (2, 3, 10, 12)])\n dump(\"add2\", Add(), [(2, 3, 10, 12), tuple()])\n dump(\"add3\", Add(), [(2, 3, 10, 12), (1, 3, 1, 12)])\n dump(\"add4\", Add(), [tuple(), tuple()])\n dump(\"sub1\", Sub(), [(2, 3, 10, 12), (2, 3, 10, 12)])\n dump(\"sub2\", Sub(), [(2, 3, 10, 12), tuple()])\n dump(\"sub3\", Sub(), [(2, 3, 10, 12), (1, 3, 1, 12)])\n dump(\"sub4\", Sub(), [tuple(), tuple()])\n dump(\"mul1\", Mul(), [(2, 3, 10, 12), (2, 3, 10, 12)])\n dump(\"mul2\", Mul(), [(2, 3, 10, 12), tuple()])\n dump(\"mul3\", Mul(), [(2, 3, 10, 12), (1, 3, 1, 12)])\n dump(\"mul4\", Mul(), [tuple(), tuple()])\n dump(\"div1\", Div(), [(2, 3, 10, 12), (2, 3, 10, 12)])\n dump(\"div2\", Div(), [(2, 3, 10, 12), tuple()])\n dump(\"div3\", Div(), [(2, 3, 10, 12), (1, 3, 1, 12)])\n dump(\"div4\", Div(), [tuple(), tuple()])\n dump(\"pow1\", Pow(), [torch.rand(2, 3, 10, 12), torch.rand(2, 3, 10, 12)])\n dump(\"pow2\", Pow(), [torch.rand(2, 3, 10, 12), rand_scalar()])\n dump(\"pow3\", Pow(), [torch.rand(2, 3, 10, 12), torch.rand(1, 3, 1, 12)])\n dump(\"pow4\", Pow(), [rand_scalar(), rand_scalar()])\n dump(\"pow5\", Pow(), [torch.rand(2, 3, 10, 12), scalar(2.0)])\n # dump(\"maxpool3\", nn.MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=2), [(2, 3, 10, 12)])\n # dump(\"resnet18\", models.resnet18(pretrained=True), [(1, 3, 224, 224)])\n # dump(\"resnet18-conv0\", nn.Conv2d(3, 64, 7, 2, 3), [(1, 3, 224, 224)])\n # dump(\"resnet18-maxpool\", nn.MaxPool2d(kernel_size=3, stride=2, padding=1), [(1, 64, 64, 64)])\n # dump(\"resnet18-conv1\", nn.Conv2d(128, 256, 3, 2, 1), [(1, 128, 32, 32)])\n # dump(\"resnet18-partial\", ResNetPartial(), [(1, 3, 224, 224)])\n # dump(\"resnet50\", models.resnet50(pretrained=True), [(1, 3, 224, 224)])\n dump(\"flatten2\", Flatten2(), [(2, 3, 4, 5)])\n dump(\"slice2d\", Slice2d(), [(100, 200)])\n dump(\"slice2d2\", Slice2d2(), [(100, 200)])\n # dump(\"concat3\", Concat3(), [(2, 3, 224, 224), (2, 8, 224, 224), (2, 1, 224, 224)])\n # dump(\"concat4\", Concat4(), [(2, 3, 224, 224), (2, 8, 224, 224), (2, 1, 224, 224), (2, 9, 224, 224)])\n dump(\"transpose\", Permute(), [(3, 4, 5)])\n dump(\"squeeze1\", Squeeze1(), [(3, 4, 1, 5, 1, 6)])\n dump(\"squeeze2\", Squeeze2(), [(3, 4, 1, 5, 1, 6)])\n dump(\"unsqueeze\", Unsqueeze(), [(3, 4, 5, 6)])\n dump(\"cast\", Cast(), [(3, 4)])\n # dump(\"gather0d\", Gather0D(), [(10,)])\n # #dump(\"gather1d\", Gather1D(), [(10,)])\n dump(\"reducemax\", ReduceMax(), [(3, 4, 5, 6)])\n dump(\"reducemean\", ReduceMean(), [(3, 4, 5, 6)])\n dump(\"reducemin\", ReduceMin(), [(3, 4, 5, 6)])\n dump(\"reduceprod\", ReduceProd(), [(3, 4, 5, 6)])\n dump(\"reducesum\", ReduceSum(), [(3, 4, 5, 6)])\n dump(\"reducesum2\", ReduceSum2(), [(3, 4, 5, 6)])\n dump(\"reducesum3\", ReduceSum3(), [(3, 4, 5, 6)])\n dump(\"reducesum4\", ReduceSum4(), [(3, 4, 5, 6)])\n dump(\"softmax\", torch.nn.Softmax(dim=-1), [(3, 4, 5, 6)])\n dump(\"split1\", Split([2, 3, 5, 7, 60-2-3-5-7], -1), [(3, 4, 5, 60)])\n dump(\"split2\", Split([2, 3, 5, 7, 40-2-3-5-7], 1), [(3, 40, 5, 6)])\n dump(\"split3\", Split(4, 0), [(30, 4, 5, 6)])\n dump(\"instancenorm1\", InstanceNorm(4, 0.01), [(3, 4, 5, 6)])\n output_list()\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.593406617641449, "alphanum_fraction": 0.598901093006134, "avg_line_length": 21.75, "blob_id": "9732bc4aa5acd704c8244bab449ffa6260facb7e", "content_id": "51dfc99b416f057a9c931bb4c7b2b328207aa69d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 182, "license_type": "permissive", "max_line_length": 71, "num_lines": 8, "path": "/src/shader/wasm/src/common/unary.hpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "template <typename T, class tFunction>\nstatic void webdnn_unary(const T *src, T *dst, int length, tFunction f)\n{\n for (int i = 0; i < length; i++)\n {\n dst[i] = f(src[i]);\n }\n}\n" }, { "alpha_fraction": 0.4304690361022949, "alphanum_fraction": 0.4685553014278412, "avg_line_length": 30.391143798828125, "blob_id": "aea63871ab6dd18ff6d64ad7a832dd1a60cd69f2", "content_id": "3dd734f06f61ad1e20d96ee694a65bb85c2ece8a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 8507, "license_type": "permissive", "max_line_length": 84, "num_lines": 271, "path": "/src/descriptor_runner/operators/cpu/operators/standard/slice.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Slice10 extends OperatorImpl {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const data = inputs[0],\n starts = inputs[1],\n ends = inputs[2],\n axesTensor = inputs[3];\n let steps = inputs[4];\n let axes: number[];\n if (axesTensor) {\n axes = Array.from(axesTensor.data);\n for (let i = 0; i < axes.length; i++) {\n if (axes[i] < 0) {\n axes[i] += data.ndim;\n }\n }\n } else {\n axes = [];\n for (let i = 0; i < data.ndim; i++) {\n axes.push(i);\n }\n }\n // Currently, only common usage is supported\n if (!steps) {\n steps = context.emptyTensor([axes.length], \"int32\");\n steps.data.fill(1);\n }\n const ranges = data.dims.map((d) => [0, d, 1, d]); // Start, stop, step, srcsize\n for (let i = 0; i < axes.length; i++) {\n ranges[axes[i]] = [\n starts.data[i],\n ends.data[i],\n steps.data[i],\n data.dims[axes[i]],\n ];\n }\n const rangesWithSize = ranges.map(([start, stop, step, srcsize]) => {\n if (start < 0) {\n start += srcsize;\n }\n start = Math.max(Math.min(start, srcsize - 1), 0);\n if (stop < 0) {\n stop += srcsize;\n }\n stop = Math.max(Math.min(stop, srcsize), -1);\n const dstsize = Math.max(Math.ceil((stop - start) / step), 0);\n return [start, stop, step, srcsize, dstsize];\n }),\n output = context.emptyTensor(\n rangesWithSize.map(([, , , , dstsize]) => dstsize),\n data.dataType\n );\n let func;\n switch (data.ndim) {\n case 1:\n func = this.copy1d;\n break;\n case 2:\n func = this.copy2d;\n break;\n case 3:\n func = this.copy3d;\n break;\n case 4:\n func = this.copy4d;\n break;\n case 5:\n func = this.copy5d;\n break;\n case 6:\n func = this.copy6d;\n break;\n default:\n throw new Error(\n `Slice: input dimension ${data.ndim} > 6 is not yet supported`\n );\n }\n func(data.data, output.data, rangesWithSize, data.strides, output.strides);\n return [output];\n }\n\n copy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let x = 0; x < rangesWithSize[0][4]; x++) {\n dO[x * dstStrides[0]] =\n dI[(rangesWithSize[0][0] + x * rangesWithSize[0][2]) * srcStrides[0]];\n }\n }\n\n copy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let y = 0; y < rangesWithSize[0][4]; y++) {\n for (let x = 0; x < rangesWithSize[1][4]; x++) {\n dO[y * dstStrides[0] + x * dstStrides[1]] =\n dI[\n (rangesWithSize[0][0] + y * rangesWithSize[0][2]) * srcStrides[0] +\n (rangesWithSize[1][0] + x * rangesWithSize[1][2]) * srcStrides[1]\n ];\n }\n }\n }\n\n copy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < rangesWithSize[0][4]; d0++) {\n for (let d1 = 0; d1 < rangesWithSize[1][4]; d1++) {\n for (let d2 = 0; d2 < rangesWithSize[2][4]; d2++) {\n dO[d0 * dstStrides[0] + d1 * dstStrides[1] + d2 * dstStrides[2]] =\n dI[\n (rangesWithSize[0][0] + d0 * rangesWithSize[0][2]) *\n srcStrides[0] +\n (rangesWithSize[1][0] + d1 * rangesWithSize[1][2]) *\n srcStrides[1] +\n (rangesWithSize[2][0] + d2 * rangesWithSize[2][2]) *\n srcStrides[2]\n ];\n }\n }\n }\n }\n\n copy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < rangesWithSize[0][4]; d0++) {\n for (let d1 = 0; d1 < rangesWithSize[1][4]; d1++) {\n for (let d2 = 0; d2 < rangesWithSize[2][4]; d2++) {\n for (let d3 = 0; d3 < rangesWithSize[3][4]; d3++) {\n dO[\n d0 * dstStrides[0] +\n d1 * dstStrides[1] +\n d2 * dstStrides[2] +\n d3 * dstStrides[3]\n ] =\n dI[\n (rangesWithSize[0][0] + d0 * rangesWithSize[0][2]) *\n srcStrides[0] +\n (rangesWithSize[1][0] + d1 * rangesWithSize[1][2]) *\n srcStrides[1] +\n (rangesWithSize[2][0] + d2 * rangesWithSize[2][2]) *\n srcStrides[2] +\n (rangesWithSize[3][0] + d3 * rangesWithSize[3][2]) *\n srcStrides[3]\n ];\n }\n }\n }\n }\n }\n\n copy5d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < rangesWithSize[0][4]; d0++) {\n for (let d1 = 0; d1 < rangesWithSize[1][4]; d1++) {\n for (let d2 = 0; d2 < rangesWithSize[2][4]; d2++) {\n for (let d3 = 0; d3 < rangesWithSize[3][4]; d3++) {\n for (let d4 = 0; d4 < rangesWithSize[4][4]; d4++) {\n dO[\n d0 * dstStrides[0] +\n d1 * dstStrides[1] +\n d2 * dstStrides[2] +\n d3 * dstStrides[3] +\n d4 * dstStrides[4]\n ] =\n dI[\n (rangesWithSize[0][0] + d0 * rangesWithSize[0][2]) *\n srcStrides[0] +\n (rangesWithSize[1][0] + d1 * rangesWithSize[1][2]) *\n srcStrides[1] +\n (rangesWithSize[2][0] + d2 * rangesWithSize[2][2]) *\n srcStrides[2] +\n (rangesWithSize[3][0] + d3 * rangesWithSize[3][2]) *\n srcStrides[3] +\n (rangesWithSize[4][0] + d4 * rangesWithSize[4][2]) *\n srcStrides[4]\n ];\n }\n }\n }\n }\n }\n }\n\n copy6d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n rangesWithSize: number[][],\n srcStrides: ReadonlyArray<number>,\n dstStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < rangesWithSize[0][4]; d0++) {\n for (let d1 = 0; d1 < rangesWithSize[1][4]; d1++) {\n for (let d2 = 0; d2 < rangesWithSize[2][4]; d2++) {\n for (let d3 = 0; d3 < rangesWithSize[3][4]; d3++) {\n for (let d4 = 0; d4 < rangesWithSize[4][4]; d4++) {\n for (let d5 = 0; d5 < rangesWithSize[5][4]; d5++) {\n dO[\n d0 * dstStrides[0] +\n d1 * dstStrides[1] +\n d2 * dstStrides[2] +\n d3 * dstStrides[3] +\n d4 * dstStrides[4] +\n d5 * dstStrides[5]\n ] =\n dI[\n (rangesWithSize[0][0] + d0 * rangesWithSize[0][2]) *\n srcStrides[0] +\n (rangesWithSize[1][0] + d1 * rangesWithSize[1][2]) *\n srcStrides[1] +\n (rangesWithSize[2][0] + d2 * rangesWithSize[2][2]) *\n srcStrides[2] +\n (rangesWithSize[3][0] + d3 * rangesWithSize[3][2]) *\n srcStrides[3] +\n (rangesWithSize[4][0] + d4 * rangesWithSize[4][2]) *\n srcStrides[4] +\n (rangesWithSize[5][0] + d5 * rangesWithSize[5][2]) *\n srcStrides[5]\n ];\n }\n }\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Slice\",\n backend: \"cpu\",\n opsetMin: 10,\n factory: () => new Slice10(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5934441089630127, "alphanum_fraction": 0.5966309905052185, "avg_line_length": 31.540740966796875, "blob_id": "9d98d256f2c06a88a51e3cb8aa5772d0b9c47f68", "content_id": "6ee863b5a1dea6b23372851141f0466125b2f044", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4419, "license_type": "permissive", "max_line_length": 93, "num_lines": 135, "path": "/test/model_test/runner/test.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "function wait() {\n return new Promise((resolve) => {\n setTimeout(resolve, 1);\n });\n}\n\nasync function runTest(optimized) {\n try {\n let caseDirs = [];\n const resultDom = document.getElementById(\"result\");\n const previousResultList = document.getElementById(\"resultList\");\n if (previousResultList) {\n resultDom.removeChild(previousResultList);\n }\n const resultList = document.createElement(\"ol\");\n resultDom.appendChild(resultList);\n\n // URLに?case=xxx があればケースxxxだけを実行\n const runLarge = document.getElementsByName(\"large\")[0].checked;\n const usp = new URLSearchParams(location.search);\n const selectedCase = usp.get(\"case\");\n if (selectedCase) {\n caseDirs.push(`model/${selectedCase}/`);\n } else {\n const listJSON = await (await fetch(\"model/cases.json\")).json();\n for (const {name, large} of listJSON) {\n if (!large || (large && runLarge)) {\n caseDirs.push(`model/${name}/`);\n }\n }\n }\n const checkboxes = document.getElementsByName(\"backend\");\n const backendOrders = [[\"cpu\"]];\n for (const checkbox of checkboxes) {\n if (checkbox.checked) {\n backendOrders.push([checkbox.value, \"cpu\"]);\n }\n }\n let allOk = true;\n const allResults = {};\n for (const caseDir of caseDirs) {\n for (const backendOrder of backendOrders) {\n console.log(\"test\", caseDir, backendOrder);\n const msg = await runTestOne(caseDir, backendOrder, optimized);\n const ok = !msg;\n allOk &= ok;\n allResults[caseDir] = ok;\n resultList.innerHTML += `<li><span class=\"${\n ok ? \"result-ok\" : \"result-fail\"\n }\">${ok ? \"OK\" : \"Fail\"}, ${caseDir}, ${backendOrder[0]}</span> <span>${\n msg ? msg : \"\"\n }</span></li>`;\n await wait();\n }\n }\n console.log(\"done all test\");\n if (allOk) {\n console.log(\"all ok\");\n resultList.innerHTML += `<li><span class=\"result-ok\">Done. All cases OK.</span></li>`;\n } else {\n console.error(\"failed\", allResults);\n resultList.innerHTML += `<li><span class=\"result-fail\">Some cases failed.</span></li>`;\n }\n } catch (error) {\n console.error(error);\n alert(error.message);\n }\n}\n\nasync function runTestOne(directory, backendOrder, optimized) {\n const runner = await WebDNN.load(\n optimized ? `${directory}optimized/` : directory,\n { backendOrder, optimized }\n );\n const expectedTensors = await runner\n .getTensorLoader(directory + \"expected.bin\")\n .loadAll();\n const inputTensors = runner\n .getInputNames()\n .map((iname) => expectedTensors.get(iname));\n console.time(`Run ${directory}`);\n const outputTensors = await runner.run(inputTensors);\n console.timeEnd(`Run ${directory}`);\n const isClose = (expected, actual, name) => {\n if (expected.dims.length !== actual.dims.length) {\n return `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`;\n }\n if (expected.dims.some((nd, i) => nd !== actual.dims[i])) {\n return `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`;\n }\n\n if (expected.data.length !== actual.data.length) {\n return `${name}: data length mismatch`;\n }\n\n for (let i = 0; i < expected.data.length; i++) {\n const e = expected.data[i];\n const a = actual.data[i];\n // considering a is NaN\n if (!(Math.abs(e - a) <= Math.abs(e) * 1e-2 + 1e-3)) {\n // let kvs = \"\";\n // for (let j = 0; j < Math.min(2048, expected.data.length); j++) {\n // kvs += `${j}=${actual.data[j]},`;\n // }\n // return `${kvs}`;\n return `${name}: index ${i}, expected ${e} !== actual ${a}`;\n }\n }\n\n return null;\n };\n\n const outputNames = runner.getOutputNames();\n let errorMessage = null;\n for (let i = 0; i < outputNames.length; i++) {\n const oname = outputNames[i];\n errorMessage = isClose(expectedTensors.get(oname), outputTensors[i], oname);\n if (errorMessage) {\n break;\n }\n }\n\n return errorMessage;\n}\n\nwindow.addEventListener(\"DOMContentLoaded\", async () => {\n const usp = new URLSearchParams(location.search);\n const backends = (usp.get(\"backend\") || \"\").split(\",\");\n const checkboxes = document.getElementsByName(\"backend\");\n for (const checkbox of checkboxes) {\n if (backends.includes(checkbox.value)) {\n checkbox.checked = true;\n }\n }\n});\n" }, { "alpha_fraction": 0.704402506351471, "alphanum_fraction": 0.704402506351471, "avg_line_length": 30.799999237060547, "blob_id": "c1d7c37710851c0a2ec1eb525657584b70b397b3", "content_id": "05548f0bcf5a8cb75132fcade965adf5a4b0be9b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 318, "license_type": "permissive", "max_line_length": 54, "num_lines": 10, "path": "/src/descriptor_runner/interface/backend/cpu/cpuTensor.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes } from \"../../core/constants\";\nimport { Tensor } from \"../../core/tensor\";\n\nexport interface CPUTensor extends Tensor {\n data: DataArrayTypes;\n getDataSync(): DataArrayTypes;\n getValue(idxs: number[]): number;\n setValue(value: number, idxs: number[]): void;\n useExternalBuffer: boolean;\n}\n" }, { "alpha_fraction": 0.7314356565475464, "alphanum_fraction": 0.7363861203193665, "avg_line_length": 31.31999969482422, "blob_id": "0c6717c196b4edb22073ac84a7f876a87e3e5758", "content_id": "1b5e67823b3be48fe09ace383b32363699f6da1f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1616, "license_type": "permissive", "max_line_length": 76, "num_lines": 50, "path": "/src/descriptor_runner/interface/backend/webgpu/webgpuContext.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { BackendContext } from \"../../core/backendContext\";\nimport { DataType } from \"../../core/constants\";\nimport { Tensor } from \"../../core/tensor\";\nimport { WebDNNCPUContext } from \"../cpu/cpuContext\";\nimport { WebGPUTensor } from \"./webgpuTensor\";\n\n// for future use\n// eslint-disable-next-line @typescript-eslint/no-empty-interface\nexport interface WebDNNWebGPUContextOption {}\n\ntype WorkGroupDim = \"x\" | \"y\" | \"z\";\n\nexport interface WebGPUMetaBufferContentElement {\n value: number;\n type: \"int32\" | \"uint32\" | \"float32\";\n}\n\nexport interface WebGPUMetaBufferContent {\n elements: WebGPUMetaBufferContentElement[];\n}\n\nexport interface WebGPURunnerRequest {\n pipelineName: string;\n tensors: WebGPUTensor[];\n meta: WebGPUMetaBufferContent | null;\n workGroups: { [key in WorkGroupDim]: number };\n}\n\nexport interface WebDNNWebGPUContext extends BackendContext {\n backend: \"webgpu\";\n cpuContext: WebDNNCPUContext;\n initialize(): Promise<void>;\n isWebGLTensor(tensor: Tensor): tensor is WebGPUTensor;\n assertsWebGPUTensor(tensor: Tensor): asserts tensor is WebGPUTensor;\n assertsWebGPUTensorArray(\n tensors: Tensor[]\n ): asserts tensors is WebGPUTensor[];\n\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n forWriteFromCPU?: boolean,\n forReadToCPU?: boolean\n ): WebGPUTensor;\n // eslint-disable-next-line @typescript-eslint/ban-types\n moveTensor(tensor: Tensor, option: {}): Promise<WebGPUTensor>;\n hasPipeline(name: string): boolean;\n createPipeline(name: string, shader: Uint32Array, nBuffers: number): void;\n run(request: WebGPURunnerRequest): Promise<void>;\n}\n" }, { "alpha_fraction": 0.6481481194496155, "alphanum_fraction": 0.6481481194496155, "avg_line_length": 26, "blob_id": "ccd4226da4ca9d8de8b4c9d568b352d7a5467c40", "content_id": "c71ccea4364d2c60a5af93c71b19647ad1042939", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 216, "license_type": "permissive", "max_line_length": 49, "num_lines": 8, "path": "/src/graph_transpiler/webdnn/operator_shader_webgl.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from webdnn.operator_shader import OperatorShader\n\nclass OperatorShaderWebGL(OperatorShader):\n ts_code: str\n\n def __init__(self, ts_code: str) -> None:\n super().__init__()\n self.ts_code = ts_code\n" }, { "alpha_fraction": 0.5941505432128906, "alphanum_fraction": 0.6066076755523682, "avg_line_length": 24.006771087646484, "blob_id": "2b70df8c3a9702eeca71a8fc2bdfcab2806abe79", "content_id": "9412ffffb43a394c8b81b868b8231896316442b9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 11146, "license_type": "permissive", "max_line_length": 79, "num_lines": 443, "path": "/src/descriptor_runner/operators/webgl/operators/standard/softmax.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenOutputVec4,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorNDGetVec4,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Opset 1\nexport class Softmax extends OperatorImpl {\n axis!: number;\n\n constructor() {\n super(\"webgl\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n // TODO: cpuと共通\n super.initialize(attribute);\n this.axis = getAttrInt(attribute, \"axis\", -1);\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n let { axis } = this;\n if (axis < 0) {\n axis += input.ndim;\n }\n if (axis !== input.ndim - 1) {\n throw new Error(\n \"Softmax: currently only reducing final axis is supported\"\n );\n }\n // 最終軸のreductionに特化した実装\n const reductionLength = input.dims[axis],\n outerLength = input.length / reductionLength;\n if (context.webgl2) {\n // 最大値計算\n const maxSumExpTensor = context.emptyTensor(\n [outerLength * 4],\n \"float32\",\n { dimPerPixel: 4 }\n );\n await this.calcMax2(\n context,\n outerLength,\n reductionLength,\n input,\n maxSumExpTensor\n );\n\n // 結果計算\n const output = context.emptyTensor(input.dims, input.dataType);\n await this.calcOutput2(\n context,\n outerLength,\n reductionLength,\n input,\n maxSumExpTensor,\n output\n );\n maxSumExpTensor.dispose();\n return [output];\n } else {\n // 最大値計算\n const maxTensor = context.emptyTensor([outerLength]);\n await this.calcMax(\n context,\n outerLength,\n reductionLength,\n input,\n maxTensor\n );\n\n // Sum(exp)計算\n const sumExpTensor = context.emptyTensor([outerLength]);\n await this.calcSumExp(\n context,\n outerLength,\n reductionLength,\n input,\n maxTensor,\n sumExpTensor\n );\n // 結果計算\n const output = context.emptyTensor(input.dims, input.dataType);\n await this.calcOutput(\n context,\n outerLength,\n reductionLength,\n input,\n maxTensor,\n sumExpTensor,\n output\n );\n maxTensor.dispose();\n sumExpTensor.dispose();\n return [output];\n }\n }\n\n private async calcMax2(\n context: WebDNNWebGLContext,\n outerLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxSumExpTensor: WebGLTensor\n ) {\n const kernelName = `softmax_max_${reductionLength}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define reductionLength ${reductionLength}\n${shaderGenTensorOutputUniform(1)}\nuniform sampler2D tex_input;\nuniform int tex_input_stride_0;\nuniform int tex_input_stride_1;\n\nivec2 get_coord(int d0) {\n int flat_index = d0 * tex_input_stride_0;\n int texture_w = textureSize(tex_input, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int texture_w = textureSize(tex_input, 0).x;\n ivec2 c_init = get_coord(tex_output_0);\n ivec2 c_i = c_init;\n float s_max = texelFetch(tex_input, c_i, 0).r;\n c_i.x += 1;\n if (c_i.x >= texture_w) {\n c_i = ivec2(0, c_i.y + 1);\n }\n for (int i = 1; i < reductionLength; i++) {\n float v = texelFetch(tex_input, c_i, 0).r;\n if (v > s_max) {\n s_max = v;\n }\n c_i.x += 1;\n if (c_i.x >= texture_w) {\n c_i = ivec2(0, c_i.y + 1);\n }\n }\n c_i = c_init;\n float s_sum_exp = 0.0;\n for (int i = 0; i < reductionLength; i++) {\n float v = texelFetch(tex_input, c_i, 0).r;\n s_sum_exp += exp(v - s_max);\n c_i.x += 1;\n if (c_i.x >= texture_w) {\n c_i = ivec2(0, c_i.y + 1);\n }\n }\n s_sum_exp = 1.0 / s_sum_exp;\n\n vec4 s = vec4(s_max, s_sum_exp, 0.0, 0.0);\n ${shaderGenOutputVec4(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength],\n maxSumExpTensor,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n maxSumExpTensor,\n uniforms\n );\n }\n\n private async calcOutput2(\n context: WebDNNWebGLContext,\n outerLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxSumExpTensor: WebGLTensor,\n output: WebGLTensor\n ) {\n const kernelName = `softmax_output`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(2)}\n\n${shaderGenTensorNDGet(\"tex_input\", 2, context.webgl2)}\n${shaderGenTensorNDGetVec4(\"tex_max_sum_exp\", 1, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(2)}\n vec4 m = get_vec4_tex_max_sum_exp(tex_output_0);\n float v = get_tex_input(tex_output_0, tex_output_1);\n float s = exp(v - m.r) * m.g;\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_max_sum_exp\",\n [1],\n maxSumExpTensor,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength, reductionLength],\n output,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: input, name: \"tex_input\" },\n { tensor: maxSumExpTensor, name: \"tex_max_sum_exp\" },\n ],\n output,\n uniforms\n );\n }\n\n private async calcMax(\n context: WebDNNWebGLContext,\n outerLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxTensor: WebGLTensor\n ) {\n const kernelName = `softmax_max_${reductionLength}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define reductionLength ${reductionLength}\n${shaderGenTensorOutputUniform(1)}\n\n${shaderGenTensorNDGet(\"tex_input\", 2, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n float s = get_tex_input(tex_output_0, 0);\n for (int i = 1; i < reductionLength; i++) {\n float v = get_tex_input(tex_output_0, i);\n if (v > s) {\n s = v;\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength],\n maxTensor,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n maxTensor,\n uniforms\n );\n }\n\n private async calcSumExp(\n context: WebDNNWebGLContext,\n outerLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxTensor: WebGLTensor,\n sumExpTensor: WebGLTensor\n ) {\n const kernelName = `softmax_sumexp_${reductionLength}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define reductionLength ${reductionLength}\n${shaderGenTensorOutputUniform(1)}\n\n${shaderGenTensorNDGet(\"tex_input\", 2, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_max\", 1, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n float s = 0.0;\n float m = get_tex_max(tex_output_0);\n for (int i = 0; i < reductionLength; i++) {\n float v = get_tex_input(tex_output_0, i);\n s += exp(v - m);\n }\n s = 1.0 / s;\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_max\",\n [1],\n maxTensor,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength],\n sumExpTensor,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: input, name: \"tex_input\" },\n { tensor: maxTensor, name: \"tex_max\" },\n ],\n sumExpTensor,\n uniforms\n );\n }\n\n private async calcOutput(\n context: WebDNNWebGLContext,\n outerLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxTensor: WebGLTensor,\n sumExpTensor: WebGLTensor,\n output: WebGLTensor\n ) {\n const kernelName = `softmax_output`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(2)}\n\n${shaderGenTensorNDGet(\"tex_input\", 2, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_max\", 1, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_sumexp\", 1, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(2)}\n float m = get_tex_max(tex_output_0);\n float se = get_tex_sumexp(tex_output_0);\n float v = get_tex_input(tex_output_0, tex_output_1);\n float s = exp(v - m) * se;\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_max\",\n [1],\n maxTensor,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_sumexp\",\n [1],\n sumExpTensor,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength, reductionLength],\n output,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: input, name: \"tex_input\" },\n { tensor: maxTensor, name: \"tex_max\" },\n { tensor: sumExpTensor, name: \"tex_sumexp\" },\n ],\n output,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Softmax\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new Softmax(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6566666960716248, "alphanum_fraction": 0.6650000214576721, "avg_line_length": 32.33333206176758, "blob_id": "31b682153d5a5d47b9827db362d29d4a2f3569b9", "content_id": "668329d9c2836bdf530d5367b20def7a87eaafcb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 600, "license_type": "permissive", "max_line_length": 63, "num_lines": 18, "path": "/src/descriptor_runner/operators/base/flatten.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../operatorImpl\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { arrayProd, getAttrInt } from \"../operatorUtil\";\nimport { onnx } from \"onnx-proto\";\n\nexport abstract class Flatten extends OperatorImpl {\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n const axis = getAttrInt(attribute, \"axis\", 1);\n if (axis !== 1) {\n throw new Error(`Flatten: only axis === 1 is supported`);\n }\n }\n\n protected calcShape(input: Tensor): number[] {\n return [input.dims[0], arrayProd(input.dims.slice(1))];\n }\n}\n" }, { "alpha_fraction": 0.686274528503418, "alphanum_fraction": 0.6873065233230591, "avg_line_length": 27.5, "blob_id": "b218a32a8adf53e4685d2bb3047ba72ee417f555", "content_id": "6439c16a0b8c823d331838c89062736209ef8675", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1938, "license_type": "permissive", "max_line_length": 119, "num_lines": 68, "path": "/src/descriptor_runner/image/image_source.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * @module webdnn/image\n */\n\n/** Don't Remove This comment block */\n\n/**\n * Load image of specified url\n *\n * @param {string} url the image url\n * @returns {Promise<HTMLImageElement>} image element\n */\nexport async function loadImageByUrl(url: string): Promise<HTMLImageElement> {\n const image = document.createElement(\"img\");\n\n return new Promise((resolve, reject) => {\n image.onload = resolve;\n image.onerror = reject;\n image.src = url;\n }).then(() => image);\n}\n\n/* istanbul ignore next */\n/**\n * Load image file selected in `<input type=\"file\">` element.\n *\n * @param {HTMLInputElement} input the `<input type=\"file\">` element\n * @returns {Promise<HTMLImageElement>} image element\n */\nexport async function loadImageFromFileInput(\n input: HTMLInputElement\n): Promise<HTMLImageElement> {\n const { files } = input;\n if (!files || files.length == 0) throw new Error(\"No file is selected\");\n\n const url = URL.createObjectURL(files[0]);\n\n return loadImageByUrl(url);\n}\n\n/* istanbul ignore next */\n/**\n * Load image selected in file picker dialog\n *\n * Currently, web specification not supported the case if the dialog is canceled and no file is selected. In this case,\n * the returned promise will never be resolved.\n *\n * @returns {Promise<HTMLImageElement>} image element\n * @protected\n */\nexport async function loadImageByDialog(): Promise<HTMLImageElement> {\n const input = document.createElement(\"input\");\n input.style.display = \"none\";\n input.type = \"file\";\n input.accept = \"image/*\";\n // Avoid GC for iOS Safari\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (window as any)._webdnn_image_input = input;\n\n return new Promise<HTMLImageElement>((resolve) => {\n input.onchange = () => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n delete (window as any)._webdnn_image_input;\n resolve(loadImageFromFileInput(input));\n };\n input.click();\n });\n}\n" }, { "alpha_fraction": 0.6060903668403625, "alphanum_fraction": 0.6100196242332458, "avg_line_length": 28.08571434020996, "blob_id": "c519ed39cbab92bebcf4510b686745aea3000b4a", "content_id": "0c491249020c3f84f1a66f2cb7b0e5d60cbded6f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1018, "license_type": "permissive", "max_line_length": 83, "num_lines": 35, "path": "/src/descriptor_runner/operators/wasm/operators/standard/flatten.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { WebDNNWasmContext } from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Flatten } from \"../../../base/flatten\";\n\nclass WasmFlatten extends Flatten {\n constructor() {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n // TODO: avoid copy\n const input = inputs[0];\n context.assertsWasmTensor(input);\n const computedShape = this.calcShape(input),\n output = context.emptyTensor(computedShape, input.dataType);\n context.runKernel(\"kernel_copy\", [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: output.length },\n ]);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Flatten\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmFlatten(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.48658961057662964, "alphanum_fraction": 0.5064526796340942, "avg_line_length": 24.8289852142334, "blob_id": "591b7ea221022d5041cc357f6c34758c863d7a09", "content_id": "cfeb2f7bb3b697128d7639ba573610bcd3a7eb92", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 8919, "license_type": "permissive", "max_line_length": 142, "num_lines": 345, "path": "/src/descriptor_runner/operators/cpu/operators/standard/conv.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { CPUTensor } from \"../../../..\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { arrayProd } from \"../../../../util\";\nimport { Conv } from \"../../../base/conv\";\n\n// if number of elements of im2col output exceeds it, split the task by batch dimension.\nconst IM2COL_NUMEL_LIMIT = 511 * 1024 * 1024;\n\nclass CpuConv extends Conv {\n constructor() {\n super(\"cpu\");\n }\n\n runSplitBatch(\n context: WebDNNCPUContext,\n inputXFull: CPUTensor,\n inputW: CPUTensor,\n inputB?: CPUTensor\n ): Tensor[] {\n if (this.group > 1) {\n throw new Error(\"Conv: batch splitting with group > 1 is not supported\");\n }\n\n const {\n batch: allBatch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputXFull.dims, inputW.dims);\n const im2colNumelPerBatch =\n group *\n outShape[0] *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1];\n const iterBatch = Math.floor(IM2COL_NUMEL_LIMIT / im2colNumelPerBatch);\n const yShape = [allBatch, chOut, outShape[0], outShape[1]];\n if (iterBatch <= 0) {\n throw new Error(\n `Conv: the size of buffer needed to process single batch exceeds limit. Input shape: ${inputXFull.dims}, weight shape: ${inputW.dims}`\n );\n }\n const output = context.emptyTensor(yShape);\n for (let i = 0; i < allBatch; i += iterBatch) {\n const batch = Math.min(iterBatch, allBatch - i);\n const iterXShape = inputXFull.dims.slice();\n iterXShape[0] = batch;\n const xSizePerBatch = arrayProd(iterXShape.slice(1));\n const iterXData = new Float32Array(\n inputXFull.data.buffer,\n inputXFull.data.byteOffset +\n i * xSizePerBatch * Float32Array.BYTES_PER_ELEMENT,\n batch * xSizePerBatch\n );\n const inputX = context.emptyTensor(iterXShape, \"float32\", iterXData);\n const im2colData = new Float32Array(\n group *\n batch *\n outShape[0] *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1]\n ),\n matmulData = new Float32Array(\n group * batch * outShape[0] * outShape[1] * chOutPerGroup\n ),\n transposeData = new Float32Array(\n output.data.buffer,\n output.data.byteOffset +\n i *\n chOut *\n outShape[0] *\n outShape[1] *\n Float32Array.BYTES_PER_ELEMENT,\n batch * chOut * outShape[0] * outShape[1]\n );\n this.im2col(\n inputX.data as Float32Array,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup\n );\n this.matmul(\n im2colData,\n inputW.data as Float32Array,\n matmulData,\n group,\n batch * outShape[0] * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n this.transpose(\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n }\n\n if (inputB) {\n this.bias(\n inputB.data as Float32Array,\n output.data as Float32Array,\n allBatch,\n chOut,\n outShape[0] * outShape[1]\n );\n }\n return [output];\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"Conv other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims);\n const im2colNumel =\n group *\n batch *\n outShape[0] *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1];\n if (im2colNumel > IM2COL_NUMEL_LIMIT) {\n return this.runSplitBatch(context, inputX, inputW, inputB);\n }\n const im2colData = new Float32Array(im2colNumel),\n matmulData = new Float32Array(\n group * batch * outShape[0] * outShape[1] * chOutPerGroup\n ),\n transposeData = new Float32Array(\n batch * chOut * outShape[0] * outShape[1]\n );\n this.im2col(\n inputX.data as Float32Array,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup\n );\n this.matmul(\n im2colData,\n inputW.data as Float32Array,\n matmulData,\n group,\n batch * outShape[0] * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n this.transpose(\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n if (inputB) {\n this.bias(\n inputB.data as Float32Array,\n transposeData,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n }\n const output = context.emptyTensor(\n [batch, chOut, outShape[0], outShape[1]],\n \"float32\",\n transposeData\n );\n return [output];\n }\n\n private im2col(\n dX: Float32Array,\n dI: Float32Array,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number\n ): void {\n let idx = 0;\n for (let g = 0; g < group; g++) {\n for (let b = 0; b < batch; b++) {\n for (let oy = 0; oy < outShape[0]; oy++) {\n for (let ox = 0; ox < outShape[1]; ox++) {\n for (let ci = 0; ci < chInPerGroup; ci++) {\n for (let ky = 0; ky < kernelShape[0]; ky++) {\n for (let kx = 0; kx < kernelShape[1]; kx++) {\n let v = 0;\n const iny = oy * strides[0] - pads[0] + ky * dilations[0],\n inx = ox * strides[1] - pads[1] + kx * dilations[1];\n if (\n iny >= 0 &&\n iny < inShape[0] &&\n inx >= 0 &&\n inx < inShape[1]\n ) {\n v =\n dX[\n ((b * chIn + g * chInPerGroup + ci) * inShape[0] +\n iny) *\n inShape[1] +\n inx\n ];\n }\n dI[idx++] = v;\n }\n }\n }\n }\n }\n }\n }\n }\n\n private matmul(\n dI: Float32Array,\n dW: Float32Array,\n dT: Float32Array,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n // DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n for (let g = 0; g < group; g++) {\n for (let y = 0; y < bout; y++) {\n for (let x = 0; x < chOutPerGroup; x++) {\n let s = 0;\n for (let ip = 0; ip < cinkhkw; ip++) {\n s +=\n dI[(g * bout + y) * cinkhkw + ip] *\n dW[(g * chOutPerGroup + x) * cinkhkw + ip];\n }\n dT[(g * bout + y) * chOutPerGroup + x] = s;\n }\n }\n }\n }\n\n private transpose(\n dT: Float32Array,\n dO: Float32Array,\n group: number,\n batch: number,\n outarea: number,\n chOutPerGroup: number\n ) {\n // DT(group, batch, outh, outw, choutpergroup) -> dO(batch, group, choutpergroup, outh, outw)\n let idx = 0;\n for (let b = 0; b < batch; b++) {\n for (let g = 0; g < group; g++) {\n for (let c = 0; c < chOutPerGroup; c++) {\n for (let x = 0; x < outarea; x++) {\n dO[idx++] = dT[((g * batch + b) * outarea + x) * chOutPerGroup + c];\n }\n }\n }\n }\n }\n\n private bias(\n dB: Float32Array,\n dO: Float32Array,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n let idx = 0;\n for (let b = 0; b < batch; b++) {\n for (let c = 0; c < chOut; c++) {\n for (let x = 0; x < outarea; x++) {\n dO[idx++] += dB[c];\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Conv\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuConv(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6264441609382629, "alphanum_fraction": 0.6302952766418457, "avg_line_length": 25.86206817626953, "blob_id": "94342574f11f10dc43e28b6b1960529aee5e3bf9", "content_id": "6e454b9aaaa8750b6889e352a86ef2ccf20e7cc1", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 779, "license_type": "permissive", "max_line_length": 84, "num_lines": 29, "path": "/src/descriptor_runner/core/inputProxy.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataType } from \"../interface/core/constants\";\nimport { arrayProd } from \"../operators/operatorUtil\";\n\nexport class InputProxy implements ArrayLike<number> {\n readonly length: number;\n\n [n: number]: number;\n\n readonly dims: ReadonlyArray<number>;\n\n constructor(dims: ReadonlyArray<number>, public readonly dataType: DataType) {\n this.dims = dims;\n const length = arrayProd(dims);\n this.length = length;\n for (let i = 0; i < length; i++) {\n this[i] = 0;\n }\n /*\n * For large length, error occurs (RangeError: Maximum call stack size exceeded)\n * Array.prototype.push.apply( this, new Array(length) );\n */\n }\n\n set(array: ArrayLike<number>): void {\n for (let i = 0; i < array.length; i++) {\n this[i] = array[i];\n }\n }\n}\n" }, { "alpha_fraction": 0.6421856880187988, "alphanum_fraction": 0.6433607339859009, "avg_line_length": 23.66666603088379, "blob_id": "e94d6e59509f47ae714335c5886e6efe2f1d5fd4", "content_id": "33d8cc40fc3b8e71101267b07b70e782efcb57af", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1762, "license_type": "permissive", "max_line_length": 75, "num_lines": 69, "path": "/src/descriptor_runner/core/operatorTable.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"../interface/core/constants\";\nimport { Operator, OperatorEntry } from \"../interface/core/operator\";\n\nconst registeredOperators: {\n [opType: string]: OperatorEntry[];\n} = {};\n\nfunction registerOperator(operatorEntry: OperatorEntry) {\n if (!(operatorEntry.opType in registeredOperators)) {\n registeredOperators[operatorEntry.opType] = [];\n }\n registeredOperators[operatorEntry.opType].push(operatorEntry);\n}\n\nexport function registerOperators(operatorEntries: OperatorEntry[]): void {\n for (const entry of operatorEntries) {\n registerOperator(entry);\n }\n}\n\nexport function instantiateOperator(\n opType: string,\n opset: number,\n backendOrder: Backend[],\n currentTensorsBackends: Backend[][]\n): Operator | null {\n const entries = registeredOperators[opType];\n if (!entries) {\n return null;\n }\n\n let localBackendOrder = backendOrder;\n // 特殊なオペレータ\n switch (opType) {\n case \"Flatten\":\n case \"Pad\":\n case \"Reshape\":\n case \"Squeeze\":\n case \"Transpose\":\n case \"Unsqueeze\":\n // データ側テンソル(currentTensorsBackends[0])のあるオペレータ上で実行\n for (const backend of backendOrder) {\n if (currentTensorsBackends[0].includes(backend)) {\n localBackendOrder = [backend];\n }\n }\n break;\n case \"Shape\":\n // 常にCPU\n localBackendOrder = [\"cpu\"];\n break;\n }\n\n for (const backend of localBackendOrder) {\n for (const entry of entries) {\n if (entry.backend !== backend) {\n continue;\n }\n if (entry.opsetMin > opset) {\n continue;\n }\n if (entry.opsetMax != null && entry.opsetMax <= opset) {\n continue;\n }\n return entry.factory();\n }\n }\n return null;\n}\n" }, { "alpha_fraction": 0.609375, "alphanum_fraction": 0.6337890625, "avg_line_length": 29.567163467407227, "blob_id": "713e609f7539d93e5f331ae0ff888422b0878ead", "content_id": "9dc573c4fa553b3817b3dba25165a725b114d15d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2048, "license_type": "permissive", "max_line_length": 129, "num_lines": 67, "path": "/example/resnet/export_pytorch_model.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import argparse\nimport json\nimport os\nimport subprocess\nimport urllib.request\nimport numpy as np\nimport torch\nimport torch.onnx\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision.models as models\nfrom webdnn.tensor_export import serialize_tensors\n\ntorch.manual_seed(0)\n\n\ndef export_test_case(output_dir, model):\n example_input = torch.randn((1, 3, 224, 224))\n with torch.no_grad():\n example_output = model(example_input)\n serialize_tensors(os.path.join(output_dir, \"expected.bin\"), {\n \"input\": example_input.numpy(),\n \"output\": example_output.numpy(),\n })\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n # resnet does not include softmax\n self.resnet = models.resnet50(pretrained=True)\n \n def forward(self, x):\n h = self.resnet(x)\n h = F.softmax(h, dim=-1)\n return h\n\ndef download_sample_image(path, url):\n if os.path.exists(path):\n return\n urllib.request.urlretrieve(url, path)\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--optimize\", action=\"store_true\", help=\"specify this to make optimized model (takes time)\")\n args = parser.parse_args()\n model = Model()\n\n output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"output\")\n\n os.makedirs(output_dir, exist_ok=True)\n\n download_sample_image(os.path.join(output_dir, \"000000039769.jpg\"), 'http://images.cocodataset.org/val2017/000000039769.jpg')\n\n onnx_path = os.path.join(output_dir, \"model.onnx\")\n \n with torch.no_grad():\n torch.onnx.export(model, (torch.zeros((1, 3, 224, 224))), onnx_path,\n verbose=True,\n input_names=[\"input\"],\n output_names=[\"output\"], opset_version=10)\n \n export_test_case(output_dir, model)\n if args.optimize:\n subprocess.check_call([\"python\", \"-m\", \"webdnn.optimize_model\", onnx_path, os.path.join(output_dir, \"optimized\")])\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.36319729685783386, "alphanum_fraction": 0.4296063780784607, "avg_line_length": 29.674074172973633, "blob_id": "ae85c605fbd58757e9b5163bac459ae6f72396ef", "content_id": "2f3dcae400dd66bf3f57a8f6ee13c895271f9c30", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4141, "license_type": "permissive", "max_line_length": 109, "num_lines": 135, "path": "/src/shader/wasm/src/common/binary7.hpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "template <typename T, class tFunction>\nstatic void webdnn_binary7_d6(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0, int o1, int o2, int o3, int o4, int o5,\n int isl0, int isl1, int isl2, int isl3, int isl4, int isl5,\n int isr0, int isr1, int isr2, int isr3, int isr4, int isr5)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n for (int d1 = 0; d1 < o1; d1++)\n {\n for (int d2 = 0; d2 < o2; d2++)\n {\n for (int d3 = 0; d3 < o3; d3++)\n {\n for (int d4 = 0; d4 < o4; d4++)\n {\n for (int d5 = 0; d5 < o5; d5++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0 + d1 * isl1 + d2 * isl2 + d3 * isl3 + d4 * isl4 + d5 * isl5],\n srcr[d0 * isr0 + d1 * isr1 + d2 * isr2 + d3 * isr3 + d4 * isr4 + d5 * isr5]);\n }\n }\n }\n }\n }\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d5(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0, int o1, int o2, int o3, int o4,\n int isl0, int isl1, int isl2, int isl3, int isl4,\n int isr0, int isr1, int isr2, int isr3, int isr4)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n for (int d1 = 0; d1 < o1; d1++)\n {\n for (int d2 = 0; d2 < o2; d2++)\n {\n for (int d3 = 0; d3 < o3; d3++)\n {\n for (int d4 = 0; d4 < o4; d4++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0 + d1 * isl1 + d2 * isl2 + d3 * isl3 + d4 * isl4],\n srcr[d0 * isr0 + d1 * isr1 + d2 * isr2 + d3 * isr3 + d4 * isr4]);\n }\n }\n }\n }\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d4(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0, int o1, int o2, int o3,\n int isl0, int isl1, int isl2, int isl3,\n int isr0, int isr1, int isr2, int isr3)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n for (int d1 = 0; d1 < o1; d1++)\n {\n for (int d2 = 0; d2 < o2; d2++)\n {\n for (int d3 = 0; d3 < o3; d3++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0 + d1 * isl1 + d2 * isl2 + d3 * isl3],\n srcr[d0 * isr0 + d1 * isr1 + d2 * isr2 + d3 * isr3]);\n }\n }\n }\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d3(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0, int o1, int o2,\n int isl0, int isl1, int isl2,\n int isr0, int isr1, int isr2)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n for (int d1 = 0; d1 < o1; d1++)\n {\n for (int d2 = 0; d2 < o2; d2++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0 + d1 * isl1 + d2 * isl2],\n srcr[d0 * isr0 + d1 * isr1 + d2 * isr2]);\n }\n }\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d2(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0, int o1,\n int isl0, int isl1,\n int isr0, int isr1)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n for (int d1 = 0; d1 < o1; d1++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0 + d1 * isl1],\n srcr[d0 * isr0 + d1 * isr1]);\n }\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d1(tFunction f, const T *srcl, const T *srcr, T *dst,\n int o0,\n int isl0,\n int isr0)\n{\n int dstidx = 0;\n for (int d0 = 0; d0 < o0; d0++)\n {\n dst[dstidx++] = f(srcl[d0 * isl0],\n srcr[d0 * isr0]);\n }\n}\n\ntemplate <typename T, class tFunction>\nstatic void webdnn_binary7_d0(tFunction f, const T *srcl, const T *srcr, T *dst)\n{\n dst[0] = f(srcl[0],\n srcr[0]);\n}\n" }, { "alpha_fraction": 0.6254886388778687, "alphanum_fraction": 0.6348710060119629, "avg_line_length": 28.06818199157715, "blob_id": "0ab5a0be42b1f5e53b2375f6f7371dd9d53a0f36", "content_id": "a28a0e38e41d3004639a59b850fb243ae9b2112b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1305, "license_type": "permissive", "max_line_length": 86, "num_lines": 44, "path": "/src/descriptor_runner/operators/webgl/operators/standard/cast.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Cast extends OperatorImpl {\n to!: onnx.TensorProto.DataType;\n\n constructor() {\n super(\"webgl\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.to = getAttrInt(attribute, \"to\", onnx.TensorProto.DataType.FLOAT);\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n // 現状、trivialなfloat32->float32のキャストのみ通す\n if (input.dataType !== \"float32\") {\n throw new Error(`Cast: input must be float32`);\n }\n if (this.to !== onnx.TensorProto.DataType.FLOAT) {\n throw new Error(`Cast: output must be float32`);\n }\n\n return [input.alias(input.dims)];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Cast\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new Cast(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5204636454582214, "alphanum_fraction": 0.5373761653900146, "avg_line_length": 27.677793502807617, "blob_id": "6d734baf48e28a51c1ff4f2bdb34806373ebae8f", "content_id": "3aef96ad091a0d75a99910c99b3fce017569942b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 42233, "license_type": "permissive", "max_line_length": 219, "num_lines": 1468, "path": "/src/graph_transpiler/webdnn/pass_conv_reshape_webgl.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Use RGBA channel in WebGL2\n\nfrom typing import Dict, Iterable, List, Optional\nimport onnx\nfrom webdnn.optimization_pass_result_webgl import OptimizationPassResultWebGL\nfrom webdnn.optimization_pass import OptimizationPass, OptimizationPassResult\nfrom webdnn.onnx_util import tensor_proto_to_numpy, get_attr_int\nfrom webdnn.operator_shader_webgl import OperatorShaderWebGL\n\nSHADER_CODE = \"\"\"import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { arange } from \"../../../../util\";\nimport { Conv } from \"../../../base/conv\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenOutputVec4,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorNDGetVec4,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\n\nconst IM2COL_SPLIT_NUMEL = 4194304;\n\nclass WebGLConvReshapeWebGL extends Conv {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"Conv other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims);\n if (inputX.dimPerPixel !== 1 || (inputB && inputB.dimPerPixel !== 1)) {\n throw new Error();\n }\n // Wのdimperpixelは分岐\n // 場合分け\n let matmulData: WebGLTensor;\n\n const im2colLengthPerOutRow =\n group *\n batch *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1];\n const im2colLength = im2colLengthPerOutRow * outShape[0];\n const cinkhkw = chInPerGroup * kernelShape[0] * kernelShape[1];\n // const gbout = group * batch * outShape[0] * outShape[1];\n const chunkCount = Math.ceil(im2colLength / IM2COL_SPLIT_NUMEL);\n const defaultChunkSize = Math.ceil(outShape[0] / chunkCount);\n const chunkInfos: { offset: number; length: number }[] = [];\n const matmulOutputs: WebGLTensor[] = [];\n // split by outShape0 -> im2col -> matmul -> concat\n const rectangleCase = group * batch * defaultChunkSize * outShape[1] < context.maxTextureSize;\n for (let chunk = 0; chunk < chunkCount; chunk++) {\n const chunkOffset = chunk * defaultChunkSize;\n const chunkSize = Math.min(defaultChunkSize, outShape[0] - chunkOffset);\n chunkInfos.push({ offset: chunkOffset, length: chunkSize });\n const chunkGBout = group * batch * chunkSize * outShape[1];\n // W is reshaped to *, cinkhkw by optimizer\n if (rectangleCase) {\n if (context.webgl2 && cinkhkw % 4 === 0 && inputW.dimPerPixel === 4) {\n if (chOutPerGroup % 4 === 0) {\n // all 4ch\n\n const im2colData = context.emptyTensor(\n [\n group *\n batch *\n chunkSize *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ],\n \"float32\",\n {\n dimPerPixel: 4,\n textureShape: [chunkGBout, cinkhkw / 4],\n }\n );\n await this.im2col4(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor(\n [chunkGBout * chOutPerGroup],\n \"float32\",\n {\n dimPerPixel: 4,\n textureShape: [chunkGBout, chOutPerGroup / 4],\n }\n );\n await this.matmul44(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n } else {\n // input 4ch, output 1ch\n\n const im2colData = context.emptyTensor(\n [\n group *\n batch *\n chunkSize *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ],\n \"float32\",\n {\n dimPerPixel: 4,\n textureShape: [chunkGBout, cinkhkw / 4],\n }\n );\n await this.im2col4(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor(\n [chunkGBout * chOutPerGroup],\n \"float32\",\n {\n dimPerPixel: 1,\n textureShape: [chunkGBout, chOutPerGroup],\n }\n );\n await this.matmul41(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n }\n } else {\n // all 1ch\n\n const im2colData = context.emptyTensor(\n [\n group *\n batch *\n chunkSize *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ],\n \"float32\",\n {\n dimPerPixel: 1,\n textureShape: [\n group * batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n ],\n }\n );\n await this.im2col1(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor(\n [group * batch * chunkSize * outShape[1] * chOutPerGroup],\n \"float32\",\n {\n dimPerPixel: 1,\n textureShape: [\n group * batch * chunkSize * outShape[1],\n chOutPerGroup,\n ],\n }\n );\n await this.matmul11(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n }\n } else {\n if (inputW.dimPerPixel === 4) {\n // generic but W is 4ch\n\n const im2colData = context.emptyTensor([\n group *\n batch *\n chunkSize *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ]);\n await this.im2col1(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor([\n group * batch * chunkSize * outShape[1] * chOutPerGroup,\n ]);\n await this.matmulgw4(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n } else {\n // generic all 1ch\n\n const im2colData = context.emptyTensor([\n group *\n batch *\n chunkSize *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ]);\n await this.im2col1(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor([\n group * batch * chunkSize * outShape[1] * chOutPerGroup,\n ]);\n await this.matmulg1(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n }\n }\n }\n if (matmulOutputs.length === 1) {\n matmulData = matmulOutputs[0];\n } else {\n matmulData = context.emptyTensor(\n [group * batch * outShape[0] * outShape[1] * chOutPerGroup],\n \"float32\",\n {\n dimPerPixel: matmulOutputs[0].dimPerPixel,\n }\n );\n await this.concat(\n context,\n matmulOutputs,\n matmulData,\n group * batch,\n outShape[0],\n outShape[1] * chOutPerGroup,\n chunkInfos\n );\n matmulOutputs.forEach((mO) => mO.dispose());\n }\n\n const output = context.emptyTensor([\n batch,\n chOut,\n outShape[0],\n outShape[1],\n ]);\n if (inputB) {\n const transposeData = context.emptyTensor([\n batch * chOut * outShape[0] * outShape[1],\n ]);\n\n if (matmulData.dimPerPixel === 4) {\n await this.transpose4(\n context,\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n } else {\n await this.transpose1(\n context,\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n }\n matmulData.dispose();\n await this.bias(\n context,\n transposeData,\n inputB,\n output,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n transposeData.dispose();\n } else {\n if (matmulData.dimPerPixel === 4) {\n await this.transpose4(\n context,\n matmulData,\n output,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n } else {\n await this.transpose1(\n context,\n matmulData,\n output,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n }\n matmulData.dispose();\n }\n return [output];\n }\n\n private async transpose1(\n context: WebDNNWebGLContext,\n dT: WebGLTensor,\n dO: WebGLTensor,\n group: number,\n batch: number,\n outarea: number,\n chOutPerGroup: number\n ) {\n // DT(group, batch, outh, outw, choutpergroup) -> dO(batch, group, choutpergroup, outh, outw)\n\n const kernelName = `convreshapewebgl_transpose1`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int COPG;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COPG;\n int c = rem - quo * COPG;\n rem = quo;\n quo = rem / GROUP;\n int g = rem - quo * GROUP;\n int b = quo;\n \n float s = 0.0;\n s = get_tex_input(((g * BATCH + b) * OUTAREA + x) * COPG + c);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dT, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dT, name: \"tex_input\" }],\n dO,\n uniforms\n );\n }\n\n private async concat(\n context: WebDNNWebGLContext,\n dCs: WebGLTensor[],\n dO: WebGLTensor,\n outerLength: number,\n concatLength: number,\n innerLength: number,\n chunks: { offset: number; length: number }[]\n ): Promise<void> {\n const dPP4 = dCs.every((dC) => dC.dimPerPixel === 4);\n if (!dPP4 && !dCs.every((dC) => dC.dimPerPixel === 1)) {\n throw new Error(\n \"ConvReshapeWebGL: concat tensor's dimPerPixel is not unified\"\n );\n }\n const kernelName = `convreshapewebgl_concat_${chunks.length}_${dPP4}`;\n if (!context.hasKernel(kernelName)) {\n const getEach = arange(chunks.length)\n .map((i) =>\n dPP4\n ? shaderGenTensorNDGetVec4(`tex_input_${i}`, 3, context.webgl2)\n : shaderGenTensorNDGet(`tex_input_${i}`, 3, context.webgl2)\n )\n .join(\"\");\n const uniformChunks = arange(chunks.length)\n .map((i) => `uniform int CHUNK_OFS${i};`)\n .join(\"\");\n let takeCode = `\nif (tex_output_1 < CHUNK_OFS1) {\n s = get${\n dPP4 ? \"_vec4\" : \"\"\n }_tex_input_0(tex_output_0, tex_output_1, tex_output_2);\n}\n`;\n for (let i = 1; i < chunks.length - 1; i++) {\n takeCode += ` else if (tex_output_1 < CHUNK_OFS${i + 1}) {\n s = get${\n dPP4 ? \"_vec4\" : \"\"\n }_tex_input_${i}(tex_output_0, tex_output_1 - CHUNK_OFS${i}, tex_output_2);\n}\n`;\n }\n takeCode += `\nelse {\n s = get${dPP4 ? \"_vec4\" : \"\"}_tex_input_${\n chunks.length - 1\n }(tex_output_0, tex_output_1 - CHUNK_OFS${\n chunks.length - 1\n }, tex_output_2);\n}\n`;\n\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(3)}\n ${uniformChunks}\n \n ${getEach}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(3)}\n ${dPP4 ? \"vec4 s = vec4(0.0, 0.0, 0.0, 0.0);\" : \"float s = 0.0;\"}\n\n ${takeCode}\n ${\n dPP4\n ? shaderGenOutputVec4(\"s\", context.webgl2)\n : shaderGenOutput(\"s\", context.webgl2)\n }\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const innerLengthPixel = dPP4 ? innerLength / 4 : innerLength;\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorOutputUniformItem(\n [outerLength, concatLength, innerLengthPixel],\n dO,\n context.webgl2\n ),\n ];\n for (let i = 0; i < chunks.length; i++) {\n uniforms.push(\n ...shaderGenTensorNDGetUniformItem(\n `tex_input_${i}`,\n [chunks[i].length * innerLengthPixel, innerLengthPixel, 1],\n dCs[i],\n context.webgl2\n )\n );\n uniforms.push({\n name: `CHUNK_OFS${i}`,\n value: chunks[i].offset,\n type: \"int\",\n });\n }\n await context.runKernel(\n kernelName,\n dCs.map((dC, i) => ({ tensor: dC, name: `tex_input_${i}` })),\n dO,\n uniforms\n );\n }\n\n private async transpose4(\n context: WebDNNWebGLContext,\n dT: WebGLTensor,\n dO: WebGLTensor,\n group: number,\n batch: number,\n outarea: number,\n chOutPerGroup: number\n ) {\n // DT(group, batch, outh, outw, choutpergroup) -> dO(batch, group, choutpergroup, outh, outw)\n\n const kernelName = `convreshapewebgl_transpose4`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int COPG;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGetVec4(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COPG;\n int c = rem - quo * COPG;\n rem = quo;\n quo = rem / GROUP;\n int g = rem - quo * GROUP;\n int b = quo;\n\n int flat_index = ((g * BATCH + b) * OUTAREA + x) * COPG + c;\n int rgba_index = flat_index / 4;\n int color = flat_index - rgba_index * 4;\n \n float s = 0.0;\n switch (color) {\n case 0:\n s = get_vec4_tex_input(rgba_index).r;\n break;\n case 1:\n s = get_vec4_tex_input(rgba_index).g;\n break;\n case 2:\n s = get_vec4_tex_input(rgba_index).b;\n break;\n case 3:\n s = get_vec4_tex_input(rgba_index).a;\n break;\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dT, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dT, name: \"tex_input\" }],\n dO,\n uniforms\n );\n }\n private async bias(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dB: WebGLTensor,\n dO: WebGLTensor,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n const kernelName = `convreshapewebgl_bias`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int BATCH;\n uniform int COUT;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_b\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COUT;\n int c = rem - quo * COUT;\n int b = quo;\n \n float s = 0.0;\n s = get_tex_input_i(tex_output_flat) + get_tex_input_b(c);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n [1],\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COUT\", type: \"int\", value: chOut },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dI, name: \"tex_input_i\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dO,\n uniforms\n );\n }\n\n private async im2col4(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dI: WebGLTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number,\n outShape0Offset: number,\n outShape0ChunkSize: number\n ) {\n const kernelName = `convreshapewebgl_im2col4_split`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int O0;\n uniform int O1;\n uniform int CI;\n uniform int CIPG;\n uniform int K0;\n uniform int K1;\n uniform int S0;\n uniform int S1;\n uniform int P0;\n uniform int P1;\n uniform int D0;\n uniform int D1;\n uniform int IS0;\n uniform int IS1;\n uniform int O0OFS;\n uniform int O0CHUNK;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n float get_one_pixel(int tex_output_flat) {\n int rem = tex_output_flat;\n int quo = rem / K0;\n int k1 = rem - quo * K1;\n rem = quo;\n quo = rem / K0;\n int k0 = rem - quo * K0;\n rem = quo;\n quo = rem / CIPG;\n int ci = rem - quo * CIPG;\n rem = quo;\n quo = rem / O1;\n int o1 = rem - quo * O1;\n rem = quo;\n quo = rem / O0CHUNK;\n int o0 = rem - quo * O0CHUNK + O0OFS;\n rem = quo;\n quo = rem / BATCH;\n int b = rem - quo * BATCH;\n int g = quo;\n \n int in0 = o0 * S0 - P0 + k0 * D0;\n int in1 = o1 * S1 - P1 + k1 * D1;\n float s = 0.0;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n s = get_tex_input(((b * CI + g * CIPG + ci) * IS0 + in0) * IS1 + in1);\n }\n\n return s;\n }\n\n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n vec4 s = vec4(get_one_pixel(tex_output_flat * 4),\n get_one_pixel(tex_output_flat * 4 + 1),\n get_one_pixel(tex_output_flat * 4 + 2),\n get_one_pixel(tex_output_flat * 4 + 3));\n ${shaderGenOutputVec4(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dX, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dI.length / 4], dI, context.webgl2), // Div by RGBA\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"O0\", type: \"int\", value: outShape[0] },\n { name: \"O1\", type: \"int\", value: outShape[1] },\n { name: \"CI\", type: \"int\", value: chIn },\n { name: \"CIPG\", type: \"int\", value: chInPerGroup },\n { name: \"K0\", type: \"int\", value: kernelShape[0] },\n { name: \"K1\", type: \"int\", value: kernelShape[1] },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"D0\", type: \"int\", value: dilations[0] },\n { name: \"D1\", type: \"int\", value: dilations[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n { name: \"O0OFS\", type: \"int\", value: outShape0Offset },\n { name: \"O0CHUNK\", type: \"int\", value: outShape0ChunkSize },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dI,\n uniforms\n );\n }\n\n private async matmul44(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `convreshapewebgl_matmul44_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n #define cinkhkwdiv4 ${cinkhkw / 4}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n highp float helper_gfcx = gl_FragCoord.x;\n highp float helper_gfcy = gl_FragCoord.y;\n int xdiv4 = int(helper_gfcx - 0.5);\n int gy = int(helper_gfcy - 0.5);\n int g = gy / BOUT;\n int y = gy - g * BOUT;\n \n vec4 packs = vec4(0.0, 0.0, 0.0, 0.0);\n int iofs = g * BOUT + y;\n int wofs0 = g * COPG + xdiv4 * 4;\n int wofs1 = wofs0 + 1;\n int wofs2 = wofs0 + 2;\n int wofs3 = wofs0 + 3;\n for (int ip = 0; ip < cinkhkwdiv4; ip++) {\n vec4 tfi = texelFetch(tex_input_i, ivec2(ip, iofs), 0);\n vec4 tfw0 = texelFetch(tex_input_w, ivec2(ip, wofs0), 0);\n vec4 tfw1 = texelFetch(tex_input_w, ivec2(ip, wofs1), 0);\n vec4 tfw2 = texelFetch(tex_input_w, ivec2(ip, wofs2), 0);\n vec4 tfw3 = texelFetch(tex_input_w, ivec2(ip, wofs3), 0);\n packs += vec4(dot(tfi, tfw0), dot(tfi, tfw1), dot(tfi, tfw2), dot(tfi, tfw3));\n }\n ${shaderGenOutputVec4(\"packs\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length / 4], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n\n private async matmul41(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `convreshapewebgl_matmul41_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n #define cinkhkwdiv4 ${cinkhkw / 4}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n highp float helper_gfcx = gl_FragCoord.x;\n highp float helper_gfcy = gl_FragCoord.y;\n int x = int(helper_gfcx - 0.5);\n int gy = int(helper_gfcy - 0.5);\n int g = gy / BOUT;\n int y = gy - g * BOUT;\n \n float s = 0.0;\n int iofs = g * BOUT + y;\n int wofs = g * COPG + x;\n for (int ip = 0; ip < cinkhkwdiv4; ip++) {\n s += dot(texelFetch(tex_input_i, ivec2(ip, iofs), 0), texelFetch(tex_input_w, ivec2(ip, wofs), 0));\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n\n private async im2col1(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dI: WebGLTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number,\n chOut: number,\n chOutPerGroup: number,\n outShape0Offset: number,\n outShape0ChunkSize: number\n ) {\n const kernelName = `convreshapewebgl_im2col1_split`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int O0;\n uniform int O1;\n uniform int CI;\n uniform int CIPG;\n uniform int K0;\n uniform int K1;\n uniform int S0;\n uniform int S1;\n uniform int P0;\n uniform int P1;\n uniform int D0;\n uniform int D1;\n uniform int IS0;\n uniform int IS1;\n uniform int O0OFS;\n uniform int O0CHUNK;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / K0;\n int k1 = rem - quo * K1;\n rem = quo;\n quo = rem / K0;\n int k0 = rem - quo * K0;\n rem = quo;\n quo = rem / CIPG;\n int ci = rem - quo * CIPG;\n rem = quo;\n quo = rem / O1;\n int o1 = rem - quo * O1;\n rem = quo;\n quo = rem / O0CHUNK;\n int o0 = rem - quo * O0CHUNK + O0OFS;\n rem = quo;\n quo = rem / BATCH;\n int b = rem - quo * BATCH;\n int g = quo;\n \n int in0 = o0 * S0 - P0 + k0 * D0;\n int in1 = o1 * S1 - P1 + k1 * D1;\n float s = 0.0;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n s = get_tex_input(((b * CI + g * CIPG + ci) * IS0 + in0) * IS1 + in1);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dX, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dI.length], dI, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"O0\", type: \"int\", value: outShape[0] },\n { name: \"O1\", type: \"int\", value: outShape[1] },\n { name: \"CI\", type: \"int\", value: chIn },\n { name: \"CIPG\", type: \"int\", value: chInPerGroup },\n { name: \"K0\", type: \"int\", value: kernelShape[0] },\n { name: \"K1\", type: \"int\", value: kernelShape[1] },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"D0\", type: \"int\", value: dilations[0] },\n { name: \"D1\", type: \"int\", value: dilations[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n { name: \"O0OFS\", type: \"int\", value: outShape0Offset },\n { name: \"O0CHUNK\", type: \"int\", value: outShape0ChunkSize },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dI,\n uniforms\n );\n }\n\n private async matmul11(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `convreshapewebgl_matmul11_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n highp float helper_gfcx = gl_FragCoord.x;\n highp float helper_gfcy = gl_FragCoord.y;\n int x = int(helper_gfcx - 0.5);\n int gy = int(helper_gfcy - 0.5);\n int g = gy / BOUT;\n int y = gy - g * BOUT;\n \n float s = 0.0;\n ${\n context.webgl2\n ? `\n int iofs = g * BOUT + y;\n int wofs = g * COPG + x;\n`\n : `\n float iofs = (float(g * BOUT + y) + 0.5) / float(tex_input_i_texture_h);\n float wofs = (float(g * COPG + x) + 0.5) / float(tex_input_w_texture_h);\n`\n }\n for (int ip = 0; ip < cinkhkw; ip++) {\n ${\n context.webgl2\n ? \"s += texelFetch(tex_input_i, ivec2(ip, iofs), 0).r * texelFetch(tex_input_w, ivec2(ip, wofs), 0).r;\"\n : \"s += decode_float(texture2D(tex_input_i, vec2((float(ip) + 0.5) / float(tex_input_i_texture_w), iofs))) * decode_float(texture2D(tex_input_w, vec2((float(ip) + 0.5) / float(tex_input_w_texture_w), wofs)));\"\n }\n \n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n\n private async matmulg1(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `convreshapewebgl_matmulg1_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / COPG;\n int x = rem - quo * COPG;\n rem = quo;\n quo = rem / BOUT;\n int y = rem - quo * BOUT;\n int g = quo;\n \n float s = 0.0;\n for (int ip = 0; ip < cinkhkw; ip++) {\n s += get_tex_input_i((g * BOUT + y) * cinkhkw + ip) * get_tex_input_w((g * COPG + x) * cinkhkw + ip);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n\n private async matmulgw4(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `convreshapewebgl_matmulgw4_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n #define cinkhkwdiv4 ${cinkhkw / 4}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGetVec4(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / COPG;\n int x = rem - quo * COPG;\n rem = quo;\n quo = rem / BOUT;\n int y = rem - quo * BOUT;\n int g = quo;\n \n float s = 0.0;\n int iofs = (g * BOUT + y) * cinkhkw;\n int wofs = (g * COPG + x) * cinkhkwdiv4;\n for (int ip = 0; ip < cinkhkwdiv4; ip++) {\n s += dot(vec4(get_tex_input_i(iofs), get_tex_input_i(iofs+1), get_tex_input_i(iofs+2), get_tex_input_i(iofs+3)), get_vec4_tex_input_w(wofs));\n iofs += 4;\n wofs += 1;\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"ConvReshapeWebGL\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLConvReshapeWebGL(),\n },\n ];\n}\n\n\"\"\"\n\nclass PassConvReshapeWebGL(OptimizationPass):\n def __init__(self, webgl2: bool, max_texture_size: int) -> None:\n super().__init__()\n self.webgl2 = webgl2\n self.max_texture_size = max_texture_size\n\n def optimize(self, model: onnx.ModelProto) -> Optional[OptimizationPassResult]:\n graph = model.graph\n changed = False\n result = OptimizationPassResultWebGL()\n for node in graph.node:\n if node.op_type == \"Conv\":\n group = get_attr_int(node, \"group\", 1)\n weight_name = node.input[1]\n initializers = graph.initializer\n optimizable = False\n for initializer in initializers:\n if initializer.name == weight_name:\n weight_array = tensor_proto_to_numpy(initializer)\n weight_array_shape = weight_array.shape\n if len(weight_array_shape) != 4:\n continue\n cinpg_kh_kw = weight_array_shape[1] * weight_array_shape[2] * weight_array_shape[3]\n cout = weight_array_shape[0]\n if self.webgl2 and cinpg_kh_kw % 4 == 0:\n if cinpg_kh_kw <= self.max_texture_size * 4 and cout <= self.max_texture_size:\n optimizable = True\n else:\n if cinpg_kh_kw <= self.max_texture_size and cout <= self.max_texture_size:\n optimizable = True\n if not optimizable:\n continue\n changed = True\n # optimize it to ConvReshapeWebGL\n node.op_type = \"ConvReshapeWebGL\"\n # add hint to use RGBA texture for weight\n if self.webgl2 and cinpg_kh_kw % 4 == 0:\n result.tensor_move_options[node.input[1]] = {\"dimPerPixel\": 4, \"textureShape\": [cout, cinpg_kh_kw // 4]}\n else:\n result.tensor_move_options[node.input[1]] = {\"dimPerPixel\": 1, \"textureShape\": [cout, cinpg_kh_kw]}\n result.operator_shaders[\"convreshapewebgl\"] = OperatorShaderWebGL(SHADER_CODE)\n # TODO: check weight is not used by other operator\n return result if changed else None\n" }, { "alpha_fraction": 0.6077844500541687, "alphanum_fraction": 0.6220059990882874, "avg_line_length": 28.688888549804688, "blob_id": "5b8b33f60efdf2bc4d2816458aa9ac76774b76ef", "content_id": "499a6a7a12c90ddb8036c22e761c183f0f9a3b9a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1336, "license_type": "permissive", "max_line_length": 89, "num_lines": 45, "path": "/src/descriptor_runner/operators/webgpu/operators/standard/unary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNWebGPUContext } from \"../../../../interface/backend/webgpu/webgpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { webgpuShaders } from \"../../shaders\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class WebGPUUnary extends OperatorImpl {\n constructor(public shaderName: string, private shaderBinary: Uint32Array) {\n super(\"webgpu\");\n }\n\n async run(context: WebDNNWebGPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error();\n }\n const outputTensor = context.emptyTensor(input.dims, \"float32\");\n\n if (!context.hasPipeline(this.shaderName)) {\n context.createPipeline(this.shaderName, this.shaderBinary, 3);\n }\n\n await context.run({\n pipelineName: this.shaderName,\n tensors: [input, outputTensor],\n meta: {\n elements: [{ value: input.length, type: \"uint32\" }],\n },\n workGroups: { x: 4096 / 64, y: 1, z: 1 },\n });\n\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Relu\",\n backend: \"webgpu\",\n opsetMin: 1,\n factory: () => new WebGPUUnary(\"relu\", webgpuShaders.relu),\n },\n ];\n}\n" }, { "alpha_fraction": 0.611940324306488, "alphanum_fraction": 0.619841992855072, "avg_line_length": 30.63888931274414, "blob_id": "c8ec20cdb2a717ecb57c477e62e2ad7ac0edfe70", "content_id": "be4eaf75673a14ba01b16c99dc2d0c123c9877ac", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1153, "license_type": "permissive", "max_line_length": 75, "num_lines": 36, "path": "/src/descriptor_runner/operators/base/transpose.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { getAttrInts } from \"../operatorUtil\";\n\n// Opset 1 (13はdifferentiableがついただけ)\nexport abstract class Transpose extends OperatorImpl {\n perm!: number[];\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.perm = getAttrInts(attribute, \"perm\", []);\n }\n\n protected calcShape(input: Tensor): {\n outShape: number[];\n inStrides: number[];\n } {\n // Default perm: [ndim-1, ndim-2, ..., 0]\n const perm =\n this.perm.length > 0\n ? this.perm\n : Array.from({ length: input.ndim }, (v, i) => input.ndim - 1 - i);\n if (perm.length !== input.ndim) {\n throw new Error();\n }\n const outShape: number[] = new Array(input.ndim),\n inStrides: number[] = new Array(input.ndim);\n for (let outAxis = 0; outAxis < input.ndim; outAxis++) {\n const inAxis = perm[outAxis];\n outShape[outAxis] = input.dims[inAxis];\n inStrides[outAxis] = input.strides[inAxis];\n }\n return { outShape, inStrides };\n }\n}\n" }, { "alpha_fraction": 0.5451708436012268, "alphanum_fraction": 0.5617454648017883, "avg_line_length": 25.668485641479492, "blob_id": "0f9529eaf7a7bb08d09c1e757acf80cc0eba6f0c", "content_id": "570130548443f408b321e1ec01c84c7d05b8dbcc", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 19578, "license_type": "permissive", "max_line_length": 107, "num_lines": 733, "path": "/src/descriptor_runner/operators/webgl/operators/standard/conv.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { arange } from \"../../../../util\";\nimport { Conv } from \"../../../base/conv\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\n\nconst IM2COL_SPLIT_NUMEL = 4194304;\n\nexport class WebGLConv extends Conv {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"Conv other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims);\n if (\n inputX.dimPerPixel !== 1 ||\n inputW.dimPerPixel !== 1 ||\n (inputB && inputB.dimPerPixel !== 1)\n ) {\n throw new Error();\n }\n\n const im2colLengthPerOutRow =\n group *\n batch *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1];\n const im2colLength = im2colLengthPerOutRow * outShape[0];\n let matmulData: WebGLTensor;\n if (im2colLength > IM2COL_SPLIT_NUMEL) {\n const chunkCount = Math.ceil(im2colLength / IM2COL_SPLIT_NUMEL);\n const defaultChunkSize = Math.ceil(outShape[0] / chunkCount);\n const chunkInfos: { offset: number; length: number }[] = [];\n const matmulOutputs: WebGLTensor[] = [];\n // split by outShape0 -> im2col -> matmul -> concat\n for (let chunk = 0; chunk < chunkCount; chunk++) {\n const chunkOffset = chunk * defaultChunkSize;\n const chunkSize = Math.min(defaultChunkSize, outShape[0] - chunkOffset);\n chunkInfos.push({ offset: chunkOffset, length: chunkSize });\n const im2colData = context.emptyTensor([\n im2colLengthPerOutRow * chunkSize,\n ]);\n await this.im2colSplit(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n chunkOffset,\n chunkSize\n );\n const matmulChunkData = context.emptyTensor([\n group * batch * chunkSize * outShape[1] * chOutPerGroup,\n ]);\n await this.matmul(\n context,\n im2colData,\n inputW,\n matmulChunkData,\n group,\n batch * chunkSize * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n matmulOutputs.push(matmulChunkData);\n }\n matmulData = context.emptyTensor([\n group * batch * outShape[0] * outShape[1] * chOutPerGroup,\n ]);\n await this.concat(\n context,\n matmulOutputs,\n matmulData,\n group * batch,\n outShape[0],\n outShape[1] * chOutPerGroup,\n chunkInfos\n );\n matmulOutputs.forEach((mO) => mO.dispose());\n } else {\n const im2colData = context.emptyTensor([\n group *\n batch *\n outShape[0] *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ]);\n await this.im2col(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup\n );\n matmulData = context.emptyTensor([\n group * batch * outShape[0] * outShape[1] * chOutPerGroup,\n ]);\n await this.matmul(\n context,\n im2colData,\n inputW,\n matmulData,\n group,\n batch * outShape[0] * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n }\n\n const output = context.emptyTensor([\n batch,\n chOut,\n outShape[0],\n outShape[1],\n ]);\n if (inputB) {\n const transposeData = context.emptyTensor([\n batch * chOut * outShape[0] * outShape[1],\n ]);\n\n await this.transpose(\n context,\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n matmulData.dispose();\n await this.bias(\n context,\n transposeData,\n inputB,\n output,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n transposeData.dispose();\n } else {\n await this.transpose(\n context,\n matmulData,\n output,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n matmulData.dispose();\n }\n return [output];\n }\n\n private async im2col(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dI: WebGLTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number,\n chOut: number,\n chOutPerGroup: number\n ) {\n const kernelName = `conv_im2col`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int O0;\n uniform int O1;\n uniform int CI;\n uniform int CIPG;\n uniform int K0;\n uniform int K1;\n uniform int S0;\n uniform int S1;\n uniform int P0;\n uniform int P1;\n uniform int D0;\n uniform int D1;\n uniform int IS0;\n uniform int IS1;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / K0;\n int k1 = rem - quo * K1;\n rem = quo;\n quo = rem / K0;\n int k0 = rem - quo * K0;\n rem = quo;\n quo = rem / CIPG;\n int ci = rem - quo * CIPG;\n rem = quo;\n quo = rem / O1;\n int o1 = rem - quo * O1;\n rem = quo;\n quo = rem / O0;\n int o0 = rem - quo * O0;\n rem = quo;\n quo = rem / BATCH;\n int b = rem - quo * BATCH;\n int g = quo;\n \n int in0 = o0 * S0 - P0 + k0 * D0;\n int in1 = o1 * S1 - P1 + k1 * D1;\n float s = 0.0;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n s = get_tex_input(((b * CI + g * CIPG + ci) * IS0 + in0) * IS1 + in1);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dX, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dI.length], dI, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"O0\", type: \"int\", value: outShape[0] },\n { name: \"O1\", type: \"int\", value: outShape[1] },\n { name: \"CI\", type: \"int\", value: chIn },\n { name: \"CIPG\", type: \"int\", value: chInPerGroup },\n { name: \"K0\", type: \"int\", value: kernelShape[0] },\n { name: \"K1\", type: \"int\", value: kernelShape[1] },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"D0\", type: \"int\", value: dilations[0] },\n { name: \"D1\", type: \"int\", value: dilations[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dI,\n uniforms\n );\n }\n\n private async im2colSplit(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dI: WebGLTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number,\n chOut: number,\n chOutPerGroup: number,\n outShape0Offset: number,\n outShape0ChunkSize: number\n ) {\n const kernelName = `conv_im2col_split`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int O0;\n uniform int O1;\n uniform int CI;\n uniform int CIPG;\n uniform int K0;\n uniform int K1;\n uniform int S0;\n uniform int S1;\n uniform int P0;\n uniform int P1;\n uniform int D0;\n uniform int D1;\n uniform int IS0;\n uniform int IS1;\n uniform int O0OFS;\n uniform int O0CHUNK;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / K0;\n int k1 = rem - quo * K1;\n rem = quo;\n quo = rem / K0;\n int k0 = rem - quo * K0;\n rem = quo;\n quo = rem / CIPG;\n int ci = rem - quo * CIPG;\n rem = quo;\n quo = rem / O1;\n int o1 = rem - quo * O1;\n rem = quo;\n quo = rem / O0CHUNK;\n int o0 = rem - quo * O0CHUNK + O0OFS;\n rem = quo;\n quo = rem / BATCH;\n int b = rem - quo * BATCH;\n int g = quo;\n \n int in0 = o0 * S0 - P0 + k0 * D0;\n int in1 = o1 * S1 - P1 + k1 * D1;\n float s = 0.0;\n if (in0 >= 0 && in0 < IS0 && in1 >= 0 && in1 < IS1) {\n s = get_tex_input(((b * CI + g * CIPG + ci) * IS0 + in0) * IS1 + in1);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dX, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dI.length], dI, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"O0\", type: \"int\", value: outShape[0] },\n { name: \"O1\", type: \"int\", value: outShape[1] },\n { name: \"CI\", type: \"int\", value: chIn },\n { name: \"CIPG\", type: \"int\", value: chInPerGroup },\n { name: \"K0\", type: \"int\", value: kernelShape[0] },\n { name: \"K1\", type: \"int\", value: kernelShape[1] },\n { name: \"S0\", type: \"int\", value: strides[0] },\n { name: \"S1\", type: \"int\", value: strides[1] },\n { name: \"P0\", type: \"int\", value: pads[0] },\n { name: \"P1\", type: \"int\", value: pads[1] },\n { name: \"D0\", type: \"int\", value: dilations[0] },\n { name: \"D1\", type: \"int\", value: dilations[1] },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n { name: \"O0OFS\", type: \"int\", value: outShape0Offset },\n { name: \"O0CHUNK\", type: \"int\", value: outShape0ChunkSize },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dI,\n uniforms\n );\n }\n\n private async matmul(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dW: WebGLTensor,\n dT: WebGLTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n /*\n * DI(group, bout, cinkhkw) * dW(group, coutpergroup, cinkhkw) -> dT(group, bout, coutpergroup)\n * ループ回数は定数が必要\n */\n const kernelName = `conv_matmul_${cinkhkw}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cinkhkw ${cinkhkw}\n uniform int GROUP;\n uniform int BOUT;\n uniform int COPG;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / COPG;\n int x = rem - quo * COPG;\n rem = quo;\n quo = rem / BOUT;\n int y = rem - quo * BOUT;\n int g = quo;\n \n float s = 0.0;\n for (int ip = 0; ip < cinkhkw; ip++) {\n s += get_tex_input_i((g * BOUT + y) * cinkhkw + ip) * get_tex_input_w((g * COPG + x) * cinkhkw + ip);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dT.length], dT, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BOUT\", type: \"int\", value: bout },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dW, name: \"tex_input_w\" },\n { tensor: dI, name: \"tex_input_i\" },\n ],\n dT,\n uniforms\n );\n }\n\n private async concat(\n context: WebDNNWebGLContext,\n dCs: WebGLTensor[],\n dO: WebGLTensor,\n outerLength: number,\n concatLength: number,\n innerLength: number,\n chunks: { offset: number; length: number }[]\n ): Promise<void> {\n const kernelName = `conv_concat_${chunks.length}`;\n if (!context.hasKernel(kernelName)) {\n const getEach = arange(chunks.length)\n .map((i) => shaderGenTensorNDGet(`tex_input_${i}`, 3, context.webgl2))\n .join(\"\");\n const uniformChunks = arange(chunks.length)\n .map((i) => `uniform int CHUNK_OFS${i};`)\n .join(\"\");\n let takeCode = `\nif (tex_output_1 < CHUNK_OFS1) {\n s = get_tex_input_0(tex_output_0, tex_output_1, tex_output_2);\n}\n`;\n for (let i = 1; i < chunks.length - 1; i++) {\n takeCode += ` else if (tex_output_1 < CHUNK_OFS${i + 1}) {\n s = get_tex_input_${i}(tex_output_0, tex_output_1 - CHUNK_OFS${i}, tex_output_2);\n}\n`;\n }\n takeCode += `\nelse {\n s = get_tex_input_${\n chunks.length - 1\n }(tex_output_0, tex_output_1 - CHUNK_OFS${chunks.length - 1}, tex_output_2);\n}\n`;\n\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(3)}\n ${uniformChunks}\n \n ${getEach}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(3)}\n float s = 0.0;\n\n ${takeCode}\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorOutputUniformItem(\n [outerLength, concatLength, innerLength],\n dO,\n context.webgl2\n ),\n ];\n for (let i = 0; i < chunks.length; i++) {\n uniforms.push(\n ...shaderGenTensorNDGetUniformItem(\n `tex_input_${i}`,\n [chunks[i].length * innerLength, innerLength, 1],\n dCs[i],\n context.webgl2\n )\n );\n uniforms.push({\n name: `CHUNK_OFS${i}`,\n value: chunks[i].offset,\n type: \"int\",\n });\n }\n await context.runKernel(\n kernelName,\n dCs.map((dC, i) => ({ tensor: dC, name: `tex_input_${i}` })),\n dO,\n uniforms\n );\n }\n\n private async transpose(\n context: WebDNNWebGLContext,\n dT: WebGLTensor,\n dO: WebGLTensor,\n group: number,\n batch: number,\n outarea: number,\n chOutPerGroup: number\n ) {\n // DT(group, batch, outh, outw, choutpergroup) -> dO(batch, group, choutpergroup, outh, outw)\n\n const kernelName = `conv_transpose`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int GROUP;\n uniform int BATCH;\n uniform int COPG;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COPG;\n int c = rem - quo * COPG;\n rem = quo;\n quo = rem / GROUP;\n int g = rem - quo * GROUP;\n int b = quo;\n \n float s = 0.0;\n s = get_tex_input(((g * BATCH + b) * OUTAREA + x) * COPG + c);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dT, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dT, name: \"tex_input\" }],\n dO,\n uniforms\n );\n }\n\n private async bias(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dB: WebGLTensor,\n dO: WebGLTensor,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n const kernelName = `conv_bias`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int BATCH;\n uniform int COUT;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_b\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COUT;\n int c = rem - quo * COUT;\n int b = quo;\n \n float s = 0.0;\n s = get_tex_input_i(tex_output_flat) + get_tex_input_b(c);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n [1],\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COUT\", type: \"int\", value: chOut },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dI, name: \"tex_input_i\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dO,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Conv\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLConv(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.592616081237793, "alphanum_fraction": 0.5992711186408997, "avg_line_length": 31.035533905029297, "blob_id": "da46c405d62848062e32658b28cd501e91caaa0b", "content_id": "0d35e0bfc07357db78b0e9e2e2f2272e41943d91", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 6311, "license_type": "permissive", "max_line_length": 200, "num_lines": 197, "path": "/example/benchmark/runner/bench.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "function wait() {\n return new Promise((resolve) => {\n setTimeout(resolve, 1);\n });\n}\n\nasync function runOnce(runner, expectedTensors, validateResult, runnerOptions=undefined) {\n const inputTensors = runner\n .getInputNames()\n .map((iname) => expectedTensors.get(iname));\n const startTime = Date.now();\n const outputTensors = await runner.run(inputTensors, runnerOptions);\n const endTime = Date.now();\n let errorMessage = null;\n if (validateResult) {\n const isClose = (expected, actual, name) => {\n if (expected.dims.length !== actual.dims.length) {\n return `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`;\n }\n if (expected.dims.some((nd, i) => nd !== actual.dims[i])) {\n return `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`;\n }\n\n if (expected.data.length !== actual.data.length) {\n return `${name}: data length mismatch`;\n }\n\n let foundError = null;\n let diffSum = 0.0;\n let expectedSum = 0.0;\n for (let i = 0; i < expected.data.length; i++) {\n const e = expected.data[i];\n const a = actual.data[i];\n const diff = Math.abs(e - a);\n diffSum += diff;\n expectedSum += Math.abs(e);\n if (!(diff <= Math.abs(e) * 1e-2 + 1e-3)) {\n if (!foundError) {\n foundError = `${name}: index ${i}, expected ${e} !== actual ${a}`;\n }\n }\n }\n if (foundError) {\n foundError += ` abs mean of diff = ${diffSum / expected.data.length}, abs mean of expected = ${expectedSum / expected.data.length}`;\n }\n\n return foundError;\n };\n const outputNames = runner.getOutputNames();\n for (let i = 0; i < outputNames.length; i++) {\n const oname = outputNames[i];\n errorMessage = isClose(\n expectedTensors.get(oname),\n outputTensors[i],\n oname\n );\n if (errorMessage) {\n break;\n }\n }\n }\n\n return { time: endTime - startTime, validationError: errorMessage };\n}\n\nfunction displayMessage(message) {\n document.getElementById(\"result\").innerText = message;\n}\n\nasync function runBenchmark(optimized, measure) {\n try {\n const backend = document.getElementById(\"backend\").value;\n const model = document.getElementById(\"model\").value;\n if (!model || !backend) {\n return;\n }\n const webgl_max_allocation_bytes = document.getElementById(\n \"webgl_max_allocation_bytes\"\n ).value;\n const webgl_deallocate_to_bytes = document.getElementById(\n \"webgl_deallocate_to_bytes\"\n ).value;\n const webgl_version = document.getElementById(\"webgl_version\").value;\n location.hash = `#backend=${backend}&model=${model}&webgl_max_allocation_bytes=${webgl_max_allocation_bytes}&webgl_deallocate_to_bytes=${webgl_deallocate_to_bytes}&webgl_version=${webgl_version}`;\n const validateResult = document.getElementById(\n \"enableValidateResult\"\n ).checked;\n displayMessage(\"Running benchmark\");\n\n const backendOrder = backend === \"cpu\" ? [backend] : [backend, \"cpu\"];\n const directory = `./model/${model}/`;\n\n if (measure) {\n const logging = WebDNN.Logging.getInstance();\n logging.config({\n adapters: {\n console: {\n adapter: \"console\",\n loglevel: {\n \"\": WebDNN.Logging.WARN,\n },\n },\n file: {\n adapter: \"file\",\n loglevel: {\n \"\": WebDNN.Logging.DEBUG,\n },\n },\n },\n });\n }\n\n const backendOptions = {\n webgl: {\n maxAllocationBytes: Number(webgl_max_allocation_bytes) * 1024 * 1024,\n deallocateToBytes: Number(webgl_deallocate_to_bytes) * 1024 * 1024,\n versionOrder: webgl_version ? [webgl_version] : undefined,\n },\n };\n\n const runner = await WebDNN.load(\n optimized ? `${directory}optimized/` : directory,\n { backendOrder, optimized, backendOptions }\n );\n\n const expectedTensors = await runner\n .getTensorLoader(directory + \"expected.bin\")\n .loadAll();\n\n // warm up\n // run multiple times makes JIT optimize JavaScript part\n for (let i = 0; i < 3; i++) {\n console.log(`Warmup ${i}`);\n const warmupResult = await runOnce(\n runner,\n expectedTensors,\n validateResult\n );\n if (warmupResult.validationError) {\n displayMessage(\n `Output validation error: ${warmupResult.validationError}`\n );\n return;\n }\n await wait();\n }\n\n if (measure) {\n const logging = WebDNN.Logging.getInstance();\n logging.clear();\n }\n\n const nTrial = measure ? 1 : 10;\n const times = [];\n for (let i = 0; i < nTrial; i++) {\n console.log(`Trial ${i}`);\n const trialResult = await runOnce(runner, expectedTensors, false, {measurePerformance: !!measure});\n await wait();\n times.push(trialResult.time);\n }\n\n const avg = times.reduce((p, c) => p + c, 0) / nTrial;\n const max = Math.max(...times);\n const min = Math.min(...times);\n displayMessage(\n `Model ${model}, backend ${backend}, average ${avg} ms, min ${min} ms, max ${max} ms`\n );\n\n if (measure) {\n const logging = WebDNN.Logging.getInstance();\n logging.adapters.file.saveToLocalFile();\n }\n } catch (error) {\n alert(`Error: ${error.message}`);\n displayMessage(`Error: ${error.message}`);\n }\n}\n\nwindow.addEventListener(\"DOMContentLoaded\", async () => {\n const cases = await (await fetch(\"./model/cases.json\")).json();\n const modelDom = document.getElementById(\"model\");\n for (const caseName of cases) {\n const opt = document.createElement(\"option\");\n opt.value = caseName;\n opt.innerText = caseName;\n modelDom.appendChild(opt);\n }\n const usp = new URLSearchParams(location.hash.substring(1));\n document.getElementById(\"backend\").value = usp.get(\"backend\") || \"webgl\";\n document.getElementById(\"model\").value = usp.get(\"model\") || \"\";\n document.getElementById(\"webgl_max_allocation_bytes\").value =\n usp.get(\"webgl_max_allocation_bytes\") || \"1024\";\n document.getElementById(\"webgl_deallocate_to_bytes\").value =\n usp.get(\"webgl_deallocate_to_bytes\") || \"512\";\n document.getElementById(\"webgl_version\").value =\n usp.get(\"webgl_version\") || \"\";\n});\n" }, { "alpha_fraction": 0.637714684009552, "alphanum_fraction": 0.6416776776313782, "avg_line_length": 31.913043975830078, "blob_id": "eee0555f344de294bb7396601e5c4858eb2eee4a", "content_id": "4780dc3b419d0b363a1450e5d33ecc2935e2258b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3028, "license_type": "permissive", "max_line_length": 154, "num_lines": 92, "path": "/src/graph_transpiler/webdnn/model.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Parses ONNX model into internal representation for optimization.\n# TODO: implement them\n\nfrom typing import Dict, List, Optional\nimport numpy as np\nimport onnx\nfrom webdnn.util import make_random_identifier\n\ndef onnx_data_type_to_np(data_type: int):\n if data_type == onnx.TensorProto.FLOAT:\n return np.float32\n elif data_type == onnx.TensorProto.INT64:\n return np.int64\n elif data_type == onnx.TensorProto.INT32:\n return np.int32\n raise ValueError\n\nclass Variable:\n name: str\n dims: Optional[List[int]]# TODO: support dynamic shape\n data_type: Optional[int]# onnx.TensorProto.{FLOAT, INT64, ...}\n axis_order: Optional[List[int]]# for optimization\n input_to: List[\"Operator\"]\n output_from: Optional[\"Operator\"]\n\n def __init__(self, name: Optional[str], dims: Optional[List[int]], data_type: Optional[int], *, axis_order: Optional[List[int]]=None) -> None:\n self.name = name if name is not None else make_random_identifier()\n self.dims = dims\n self.data_type = data_type\n if axis_order is None:\n if dims is not None:\n axis_order = list(range(len(dims)))\n self.axis_order = axis_order\n self.input_to = []\n self.output_from = None\n \n @property\n def default_order(self) -> bool:\n raise NotImplementedError\n\nclass ConstantVariable(Variable):\n data: np.ndarray\n def __init__(self, dims: List[int], data_type: int, *, axis_order: Optional[List[int]]=None, data: Optional[np.ndarray]=None) -> None:\n super().__init__(dims, data_type, axis_order)\n if data is None:\n data = np.zeros(dims, dtype=onnx_data_type_to_np(data_type))\n self.data = data\n\nclass OperatorAttribute:\n f: Optional[float]\n i: Optional[int]\n s: Optional[str]\n t: ConstantVariable\n floats: Optional[List[float]]\n ints: Optional[List[int]]\n strings: Optional[List[str]]\n tensors: Optional[List[ConstantVariable]]\n # g, graphs is not supported\n\nclass Operator:\n name: str\n inputs: List[Variable]\n outputs: List[Variable]\n op_type: str\n domain: Optional[str]\n attributes: Dict[str, OperatorAttribute]\n\n def __init__(self, name: Optional[str], op_type: str, *, domain: Optional[str]=None, attributes: Optional[Dict[str, OperatorAttribute]]=None) -> None:\n self.name = name if name is not None else make_random_identifier()\n self.op_type = op_type\n self.domain = domain\n self.attributes = attributes if attributes is not None else {}\n self.inputs = []\n self.outputs = []\n\nclass Graph:\n operators: List[Operator]\n inputs: List[Variable]\n outputs: List[Variable]\n\n def __init__(self) -> None:\n self.operators = []\n self.inputs = []\n self.outputs = []\n\nclass Model:\n graph: Graph\n opset_import_version: int\n\n def __init__(self, graph: Graph, opset_import_version: int) -> None:\n self.graph = graph\n self.opset_import_version = opset_import_version\n" }, { "alpha_fraction": 0.7596153616905212, "alphanum_fraction": 0.7852563858032227, "avg_line_length": 19.799999237060547, "blob_id": "6ddc4915b53062f4d1b8f52b7c1ab8e859b6bc18", "content_id": "ef89a7f92588d20c869fc24cc2706d1f92335db3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 544, "license_type": "permissive", "max_line_length": 102, "num_lines": 15, "path": "/example/minimum/README.ja.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# 最小の実行サンプル\n\n既存のONNXモデルをWebDNNを用いて実行する最小のサンプル。\n\nこのサンプルは、`model/model.onnx`を実行する。このモデルには、`Relu`オペレータだけが含まれている。`make_model.py`に生成方法が記載されている(PyTorchを使用)。\n\n## Webブラウザ上での実行\n\nrepository rootにて\n\n```\nyarn server\n```\n\nを実行。この状態で、Webブラウザで[http://localhost:8080/example/minimum/](http://localhost:8080/example/minimum/)を開く。\n" }, { "alpha_fraction": 0.6203252077102661, "alphanum_fraction": 0.6268292665481567, "avg_line_length": 27.604650497436523, "blob_id": "361e3a4dcf90f8a83184e8ea6ea50ae9eebc029c", "content_id": "4d4b7c4bb4f34c65ce15a440cf4d42b53bcb6b9e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1230, "license_type": "permissive", "max_line_length": 86, "num_lines": 43, "path": "/src/descriptor_runner/operators/webgl/operators/standard/reshape5.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { Backend } from \"../../../../interface/core/constants\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Reshape5 } from \"../../../base/reshape5\";\n\nexport class WebGLReshape5 extends Reshape5 {\n constructor() {\n super(\"webgl\");\n }\n\n getTensorBackendRequirement(\n nInputs: number,\n nOutputs: number\n ): (Backend | null)[] {\n return [this.backend, \"cpu\"];\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n const input = inputs[0],\n shapeTensor = inputs[1];\n if (!context.cpuContext.isCPUTensor(shapeTensor)) {\n throw new Error(`Reshape: shapeTensor is not on cpu.`);\n }\n if (!context.isWebGLTensor(input)) {\n throw new Error(\"Reshape: input is not on webgl.\");\n }\n const computedShape = this.calcShape(input, shapeTensor);\n\n return [input.alias(computedShape)];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Reshape\",\n backend: \"webgl\",\n opsetMin: 5,\n factory: () => new WebGLReshape5(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6156748533248901, "alphanum_fraction": 0.6165323257446289, "avg_line_length": 22.897541046142578, "blob_id": "54fba6f4d7d20e66523ab252659c55f82da11da4", "content_id": "1e4e54073a29cefea8fdf5e1d627e53a87451e50", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5831, "license_type": "permissive", "max_line_length": 79, "num_lines": 244, "path": "/src/descriptor_runner/logging.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "declare global {\n interface Window {\n WebDNNLoggingManagerInstance: WebDNNLogging;\n }\n}\n\nclass WebDNNLogger {\n constructor(public category: string, public logging: WebDNNLogging) {}\n debug(message?: any, ...optionalParams: any[]) {\n this.logging.emit(\n this.category,\n WebDNNLogging.DEBUG,\n message,\n optionalParams\n );\n }\n info(message?: any, ...optionalParams: any[]) {\n this.logging.emit(\n this.category,\n WebDNNLogging.INFO,\n message,\n optionalParams\n );\n }\n warn(message?: any, ...optionalParams: any[]) {\n this.logging.emit(\n this.category,\n WebDNNLogging.WARN,\n message,\n optionalParams\n );\n }\n error(message?: any, ...optionalParams: any[]) {\n this.logging.emit(\n this.category,\n WebDNNLogging.ERROR,\n message,\n optionalParams\n );\n }\n fatal(message?: any, ...optionalParams: any[]) {\n this.logging.emit(\n this.category,\n WebDNNLogging.FATAL,\n message,\n optionalParams\n );\n }\n}\n\ninterface WebDNNLoggingAdapter {\n emit(\n category: string,\n severity: number,\n message: any,\n optionalParams: any[]\n ): void;\n\n clear(): void;\n}\n\nclass WebDNNLoggingAdapterConsole implements WebDNNLoggingAdapter {\n emit(\n category: string,\n severity: number,\n message: any,\n optionalParams: any[]\n ): void {\n const messageWithCategory = `${category}: ${message}`;\n switch (severity) {\n case WebDNNLogging.FATAL:\n console.error(messageWithCategory, ...optionalParams);\n break;\n case WebDNNLogging.ERROR:\n console.error(messageWithCategory, ...optionalParams);\n break;\n case WebDNNLogging.WARN:\n console.warn(messageWithCategory, ...optionalParams);\n break;\n case WebDNNLogging.INFO:\n console.info(messageWithCategory, ...optionalParams);\n break;\n case WebDNNLogging.DEBUG:\n console.debug(messageWithCategory, ...optionalParams);\n break;\n }\n }\n\n // eslint-disable-next-line @typescript-eslint/no-empty-function\n clear(): void {}\n}\n\nclass WebDNNLoggingAdapterFile implements WebDNNLoggingAdapter {\n buffer: {\n category: string;\n severity: number;\n message: any;\n optionalParams: any[];\n }[];\n constructor() {\n this.buffer = [];\n }\n\n emit(\n category: string,\n severity: number,\n message: any,\n optionalParams: any[]\n ): void {\n this.buffer.push({ category, severity, message, optionalParams });\n }\n\n clear(): void {\n this.buffer = [];\n }\n\n saveToLocalFile(): void {\n const content: string[] = this.buffer.map(\n (item) => JSON.stringify(item) + \"\\n\"\n );\n const a = document.createElement(\"a\");\n a.href = URL.createObjectURL(new Blob(content, { type: \"text/plain\" }));\n a.download = \"logging.log\";\n\n a.style.display = \"none\";\n document.body.appendChild(a);\n a.click();\n document.body.removeChild(a);\n }\n}\n\ninterface WebDNNLoggingConfigAdapter {\n adapter: string;\n adapterParams?: unknown[];\n loglevel?: Record<string, number>;\n}\n\ninterface WebDNNLoggingConfig {\n adapters: Record<string, WebDNNLoggingConfigAdapter>;\n}\n\nexport class WebDNNLogging {\n static readonly FATAL = 0;\n static readonly ERROR = 1;\n static readonly WARN = 2;\n static readonly INFO = 3;\n static readonly DEBUG = 4;\n\n adapters: Record<string, WebDNNLoggingAdapter>;\n adapterFactories: Record<string, (...params: any[]) => WebDNNLoggingAdapter>;\n currentConfig!: WebDNNLoggingConfig;\n\n constructor() {\n this.adapters = {};\n this.adapterFactories = {\n console: () => new WebDNNLoggingAdapterConsole(),\n file: () => new WebDNNLoggingAdapterFile(),\n };\n this.config({\n adapters: {\n console: {\n adapter: \"console\",\n loglevel: {\n \"\": WebDNNLogging.WARN,\n },\n },\n },\n });\n }\n\n config(config: WebDNNLoggingConfig): void {\n // generate adapters\n this.currentConfig = config;\n const adapters = config.adapters;\n this.adapters = {};\n for (const key of Object.keys(adapters)) {\n const ad = adapters[key];\n const factory = this.adapterFactories[ad.adapter];\n if (!factory) {\n console.error(`Logging adapter ${ad.adapter} not found.`);\n continue;\n }\n try {\n const adinstance = factory(...(ad.adapterParams || []));\n this.adapters[key] = adinstance;\n } catch {\n console.error(`Logging adapter ${ad.adapter} constructor error.`);\n continue;\n }\n }\n }\n\n static getInstance(): WebDNNLogging {\n return window.WebDNNLoggingManagerInstance;\n }\n\n static getLogger(category: string, logging?: WebDNNLogging): WebDNNLogger {\n if (!logging) {\n logging = WebDNNLogging.getInstance();\n }\n return new WebDNNLogger(category, logging);\n }\n\n emit(\n category: string,\n severity: number,\n message: any,\n optionalParams: any[]\n ): void {\n for (const key of Object.keys(this.adapters)) {\n // TODO: filter by category\n const ad = this.adapters[key];\n const config = this.currentConfig.adapters[key];\n let match = true;\n const ll = config.loglevel;\n if (ll) {\n const rootLoglevel = ll[\"\"];\n if (rootLoglevel !== undefined) {\n if (severity > rootLoglevel) {\n // high severity value = unimportant\n match = false;\n }\n }\n }\n if (match) {\n ad.emit(category, severity, message, optionalParams);\n }\n }\n }\n\n /**\n * Clear buffered messages\n */\n clear(): void {\n for (const key of Object.keys(this.adapters)) {\n const ad = this.adapters[key];\n ad.clear();\n }\n }\n}\n\nif (typeof window.WebDNNLoggingManagerInstance === \"undefined\") {\n window.WebDNNLoggingManagerInstance = new WebDNNLogging();\n}\n" }, { "alpha_fraction": 0.737193763256073, "alphanum_fraction": 0.7550111413002014, "avg_line_length": 28.933332443237305, "blob_id": "871d885d6d91a02820dee8ffa1535e3cdf21a198", "content_id": "e050fcc135671fff33fd4ad79985ab3b55b110e9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 449, "license_type": "permissive", "max_line_length": 154, "num_lines": 15, "path": "/example/minimum/README.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Minimum running example\n\nA minimal sample of running an existing ONNX model using WebDNN.\n\nThis example runs `model/model.onnx`. This model contains only the `Relu` operator; the generation method is described in `make_model.py` (using PyTorch).\n\n## Run on a web browser\n\nAt repository root, execute\n\n```\nyarn server\n```\n\nWith this running, open [http://localhost:8080/example/minimum/](http://localhost:8080/example/minimum/) with a web browser.\n" }, { "alpha_fraction": 0.382199764251709, "alphanum_fraction": 0.43409767746925354, "avg_line_length": 29.558195114135742, "blob_id": "70c3e7afbb65a3a6d3fc3f1d862cebae65c34143", "content_id": "740735ea4fc5d79fc9df5478d815ee2610dca454", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 38607, "license_type": "permissive", "max_line_length": 80, "num_lines": 1263, "path": "/src/descriptor_runner/operators/cpu/operators/standard/pad11.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes } from \"../../../../interface/core/constants\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { onnx } from \"onnx-proto\";\nimport { getAttrString } from \"../../../operatorUtil\";\nimport { Pad11 } from \"../../../base/pad11\";\n\n/*\n * Opset 11\n * opset 2は互換性なし\n */\nclass CPUPad11 extends Pad11 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const [input, shapeTensor, constantValueTensor] = inputs;\n const { outputShape, pads } = this.calcShape(input, shapeTensor);\n let constantValue = 0;\n if (constantValueTensor) {\n constantValue = constantValueTensor.data[0];\n }\n\n // edge:\n // [0,1,2,3] -> pad (3,3) -> [0,0,0,*0,1,2,3*,3,3,3]\n // [0,1,2,3] -> pad (6,6) -> [0,0,0,0,0,0,*0,1,2,3*,3,3,3,3,3,3]\n // reflect:\n // [0,1,2,3] -> pad (3,3) -> [3,2,1,*0,1,2,3*,2,1,0]\n // [0,1,2,3] -> pad (6,6) -> [0,1,2,3,2,1,*0,1,2,3*,2,1,0,1,2,3]\n // [0,1,2,3] -> pad (8,8) -> [2,1,0,1,2,3,2,1,*0,1,2,3*,2,1,0,1,2,3,2,1]\n const output = context.emptyTensor(outputShape, input.dataType);\n let func;\n switch (this.mode) {\n case \"constant\":\n switch (input.ndim) {\n case 1:\n func = this.constCopy1d;\n break;\n case 2:\n func = this.constCopy2d;\n break;\n case 3:\n func = this.constCopy3d;\n break;\n case 4:\n func = this.constCopy4d;\n break;\n case 5:\n func = this.constCopy5d;\n break;\n case 6:\n func = this.constCopy6d;\n break;\n default:\n throw new Error(\n `Pad: input.ndim = ${input.ndim} > 6 is not yet supported`\n );\n }\n break;\n case \"reflect\":\n switch (input.ndim) {\n case 1:\n func = this.reflectCopy1d;\n break;\n case 2:\n func = this.reflectCopy2d;\n break;\n case 3:\n func = this.reflectCopy3d;\n break;\n case 4:\n func = this.reflectCopy4d;\n break;\n case 5:\n func = this.reflectCopy5d;\n break;\n case 6:\n func = this.reflectCopy6d;\n break;\n default:\n throw new Error(\n `Pad: input.ndim = ${input.ndim} > 6 is not yet supported`\n );\n }\n break;\n case \"edge\":\n switch (input.ndim) {\n case 1:\n func = this.edgeCopy1d;\n break;\n case 2:\n func = this.edgeCopy2d;\n break;\n case 3:\n func = this.edgeCopy3d;\n break;\n case 4:\n func = this.edgeCopy4d;\n break;\n case 5:\n func = this.edgeCopy5d;\n break;\n case 6:\n func = this.edgeCopy6d;\n break;\n default:\n throw new Error(\n `Pad: input.ndim = ${input.ndim} > 6 is not yet supported`\n );\n }\n break;\n }\n func(\n input.data,\n output.data,\n input.dims,\n outputShape,\n input.strides,\n output.strides,\n pads,\n constantValue\n );\n return [output];\n }\n\n constCopy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n const i0 = d0 - pads[0];\n let v: number;\n if (i0 < 0 || i0 >= inputShape[0]) {\n v = constantValue;\n } else {\n v = dI[i0 * inputStrides[0]];\n }\n dO[d0 * outputStrides[0]] = v;\n }\n }\n\n constCopy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n const i0 = d0 - pads[0],\n i1 = d1 - pads[1];\n let v: number;\n if (i0 < 0 || i0 >= inputShape[0] || i1 < 0 || i1 >= inputShape[1]) {\n v = constantValue;\n } else {\n v = dI[i0 * inputStrides[0] + i1 * inputStrides[1]];\n }\n dO[d0 * outputStrides[0] + d1 * outputStrides[1]] = v;\n }\n }\n }\n\n constCopy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n const i0 = d0 - pads[0],\n i1 = d1 - pads[1],\n i2 = d2 - pads[2];\n let v: number;\n if (\n i0 < 0 ||\n i0 >= inputShape[0] ||\n i1 < 0 ||\n i1 >= inputShape[1] ||\n i2 < 0 ||\n i2 >= inputShape[2]\n ) {\n v = constantValue;\n } else {\n v =\n dI[\n i0 * inputStrides[0] +\n i1 * inputStrides[1] +\n i2 * inputStrides[2]\n ];\n }\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2]\n ] = v;\n }\n }\n }\n }\n\n constCopy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n for (let d3 = 0; d3 < outputShape[3]; d3++) {\n const i0 = d0 - pads[0],\n i1 = d1 - pads[1],\n i2 = d2 - pads[2],\n i3 = d3 - pads[3];\n let v: number;\n if (\n i0 < 0 ||\n i0 >= inputShape[0] ||\n i1 < 0 ||\n i1 >= inputShape[1] ||\n i2 < 0 ||\n i2 >= inputShape[2] ||\n i3 < 0 ||\n i3 >= inputShape[3]\n ) {\n v = constantValue;\n } else {\n v =\n dI[\n i0 * inputStrides[0] +\n i1 * inputStrides[1] +\n i2 * inputStrides[2] +\n i3 * inputStrides[3]\n ];\n }\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2] +\n d3 * outputStrides[3]\n ] = v;\n }\n }\n }\n }\n }\n\n constCopy5d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n for (let d3 = 0; d3 < outputShape[3]; d3++) {\n for (let d4 = 0; d4 < outputShape[4]; d4++) {\n const i0 = d0 - pads[0],\n i1 = d1 - pads[1],\n i2 = d2 - pads[2],\n i3 = d3 - pads[3],\n i4 = d4 - pads[4];\n let v: number;\n if (\n i0 < 0 ||\n i0 >= inputShape[0] ||\n i1 < 0 ||\n i1 >= inputShape[1] ||\n i2 < 0 ||\n i2 >= inputShape[2] ||\n i3 < 0 ||\n i3 >= inputShape[3] ||\n i4 < 0 ||\n i4 >= inputShape[4]\n ) {\n v = constantValue;\n } else {\n v =\n dI[\n i0 * inputStrides[0] +\n i1 * inputStrides[1] +\n i2 * inputStrides[2] +\n i3 * inputStrides[3] +\n i4 * inputStrides[4]\n ];\n }\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2] +\n d3 * outputStrides[3] +\n d4 * outputStrides[4]\n ] = v;\n }\n }\n }\n }\n }\n }\n\n constCopy6d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>,\n constantValue: number\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n for (let d3 = 0; d3 < outputShape[3]; d3++) {\n for (let d4 = 0; d4 < outputShape[4]; d4++) {\n for (let d5 = 0; d5 < outputShape[5]; d5++) {\n const i0 = d0 - pads[0],\n i1 = d1 - pads[1],\n i2 = d2 - pads[2],\n i3 = d3 - pads[3],\n i4 = d4 - pads[4],\n i5 = d5 - pads[5];\n let v: number;\n if (\n i0 < 0 ||\n i0 >= inputShape[0] ||\n i1 < 0 ||\n i1 >= inputShape[1] ||\n i2 < 0 ||\n i2 >= inputShape[2] ||\n i3 < 0 ||\n i3 >= inputShape[3] ||\n i4 < 0 ||\n i4 >= inputShape[4] ||\n i5 < 0 ||\n i5 >= inputShape[5]\n ) {\n v = constantValue;\n } else {\n v =\n dI[\n i0 * inputStrides[0] +\n i1 * inputStrides[1] +\n i2 * inputStrides[2] +\n i3 * inputStrides[3] +\n i4 * inputStrides[4] +\n i5 * inputStrides[5]\n ];\n }\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2] +\n d3 * outputStrides[3] +\n d4 * outputStrides[4] +\n d5 * outputStrides[5]\n ] = v;\n }\n }\n }\n }\n }\n }\n }\n\n reflectCopy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0] = inputShape;\n const [outputShape0] = outputShape;\n const [inputStrides0] = inputStrides;\n const [outputStrides0] = outputStrides;\n const [pads0] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n let i0 = d0 - pads0;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n const v = dI[i0 * inputStrides0];\n dO[d0 * outputStrides0] = v;\n }\n }\n\n reflectCopy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1] = inputShape;\n const [outputShape0, outputShape1] = outputShape;\n const [inputStrides0, inputStrides1] = inputStrides;\n const [outputStrides0, outputStrides1] = outputStrides;\n const [pads0, pads1] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n if (i1 < 0) {\n i1 = -i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n } else if (i1 >= inputShape1) {\n i1 = i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n }\n const v = dI[i0 * inputStrides0 + i1 * inputStrides1];\n dO[d0 * outputStrides0 + d1 * outputStrides1] = v;\n }\n }\n }\n\n reflectCopy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2] = inputShape;\n const [outputShape0, outputShape1, outputShape2] = outputShape;\n const [inputStrides0, inputStrides1, inputStrides2] = inputStrides;\n const [outputStrides0, outputStrides1, outputStrides2] = outputStrides;\n const [pads0, pads1, pads2] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n if (i1 < 0) {\n i1 = -i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n } else if (i1 >= inputShape1) {\n i1 = i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n }\n if (i2 < 0) {\n i2 = -i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n } else if (i2 >= inputShape2) {\n i2 = i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n }\n const v =\n dI[i0 * inputStrides0 + i1 * inputStrides1 + i2 * inputStrides2];\n dO[d0 * outputStrides0 + d1 * outputStrides1 + d2 * outputStrides2] =\n v;\n }\n }\n }\n }\n\n reflectCopy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2, inputShape3] = inputShape;\n const [outputShape0, outputShape1, outputShape2, outputShape3] =\n outputShape;\n const [inputStrides0, inputStrides1, inputStrides2, inputStrides3] =\n inputStrides;\n const [outputStrides0, outputStrides1, outputStrides2, outputStrides3] =\n outputStrides;\n const [pads0, pads1, pads2, pads3] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n if (i1 < 0) {\n i1 = -i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n } else if (i1 >= inputShape1) {\n i1 = i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n }\n if (i2 < 0) {\n i2 = -i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n } else if (i2 >= inputShape2) {\n i2 = i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n }\n if (i3 < 0) {\n i3 = -i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n } else if (i3 >= inputShape3) {\n i3 = i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3\n ] = v;\n }\n }\n }\n }\n }\n reflectCopy5d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2, inputShape3, inputShape4] =\n inputShape;\n const [\n outputShape0,\n outputShape1,\n outputShape2,\n outputShape3,\n outputShape4,\n ] = outputShape;\n const [\n inputStrides0,\n inputStrides1,\n inputStrides2,\n inputStrides3,\n inputStrides4,\n ] = inputStrides;\n const [\n outputStrides0,\n outputStrides1,\n outputStrides2,\n outputStrides3,\n outputStrides4,\n ] = outputStrides;\n const [pads0, pads1, pads2, pads3, pads4] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n for (let d4 = 0; d4 < outputShape4; d4++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3,\n i4 = d4 - pads4;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n if (i1 < 0) {\n i1 = -i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n } else if (i1 >= inputShape1) {\n i1 = i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n }\n if (i2 < 0) {\n i2 = -i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n } else if (i2 >= inputShape2) {\n i2 = i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n }\n if (i3 < 0) {\n i3 = -i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n } else if (i3 >= inputShape3) {\n i3 = i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n }\n if (i4 < 0) {\n i4 = -i4 % (inputShape4 * 2 - 2);\n if (i4 >= inputShape4) {\n i4 = inputShape4 * 2 - i4 - 2;\n }\n } else if (i4 >= inputShape4) {\n i4 = i4 % (inputShape4 * 2 - 2);\n if (i4 >= inputShape4) {\n i4 = inputShape4 * 2 - i4 - 2;\n }\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3 +\n i4 * inputStrides4\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3 +\n d4 * outputStrides4\n ] = v;\n }\n }\n }\n }\n }\n }\n\n reflectCopy6d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [\n inputShape0,\n inputShape1,\n inputShape2,\n inputShape3,\n inputShape4,\n inputShape5,\n ] = inputShape;\n const [\n outputShape0,\n outputShape1,\n outputShape2,\n outputShape3,\n outputShape4,\n outputShape5,\n ] = outputShape;\n const [\n inputStrides0,\n inputStrides1,\n inputStrides2,\n inputStrides3,\n inputStrides4,\n inputStrides5,\n ] = inputStrides;\n const [\n outputStrides0,\n outputStrides1,\n outputStrides2,\n outputStrides3,\n outputStrides4,\n outputStrides5,\n ] = outputStrides;\n const [pads0, pads1, pads2, pads3, pads4, pads5] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n for (let d4 = 0; d4 < outputShape4; d4++) {\n for (let d5 = 0; d5 < outputShape5; d5++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3,\n i4 = d4 - pads4,\n i5 = d5 - pads5;\n if (i0 < 0) {\n i0 = -i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n } else if (i0 >= inputShape0) {\n i0 = i0 % (inputShape0 * 2 - 2);\n if (i0 >= inputShape0) {\n i0 = inputShape0 * 2 - i0 - 2;\n }\n }\n if (i1 < 0) {\n i1 = -i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n } else if (i1 >= inputShape1) {\n i1 = i1 % (inputShape1 * 2 - 2);\n if (i1 >= inputShape1) {\n i1 = inputShape1 * 2 - i1 - 2;\n }\n }\n if (i2 < 0) {\n i2 = -i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n } else if (i2 >= inputShape2) {\n i2 = i2 % (inputShape2 * 2 - 2);\n if (i2 >= inputShape2) {\n i2 = inputShape2 * 2 - i2 - 2;\n }\n }\n if (i3 < 0) {\n i3 = -i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n } else if (i3 >= inputShape3) {\n i3 = i3 % (inputShape3 * 2 - 2);\n if (i3 >= inputShape3) {\n i3 = inputShape3 * 2 - i3 - 2;\n }\n }\n if (i4 < 0) {\n i4 = -i4 % (inputShape4 * 2 - 2);\n if (i4 >= inputShape4) {\n i4 = inputShape4 * 2 - i4 - 2;\n }\n } else if (i4 >= inputShape4) {\n i4 = i4 % (inputShape4 * 2 - 2);\n if (i4 >= inputShape4) {\n i4 = inputShape4 * 2 - i4 - 2;\n }\n }\n if (i5 < 0) {\n i5 = -i5 % (inputShape5 * 2 - 2);\n if (i5 >= inputShape5) {\n i5 = inputShape5 * 2 - i5 - 2;\n }\n } else if (i5 >= inputShape5) {\n i5 = i5 % (inputShape5 * 2 - 2);\n if (i5 >= inputShape5) {\n i5 = inputShape5 * 2 - i5 - 2;\n }\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3 +\n i4 * inputStrides4 +\n i5 * inputStrides5\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3 +\n d4 * outputStrides4 +\n d5 * outputStrides5\n ] = v;\n }\n }\n }\n }\n }\n }\n }\n\n edgeCopy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0] = inputShape;\n const [outputShape0] = outputShape;\n const [inputStrides0] = inputStrides;\n const [outputStrides0] = outputStrides;\n const [pads0] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n let i0 = d0 - pads0;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n const v = dI[i0 * inputStrides0];\n dO[d0 * outputStrides0] = v;\n }\n }\n\n edgeCopy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1] = inputShape;\n const [outputShape0, outputShape1] = outputShape;\n const [inputStrides0, inputStrides1] = inputStrides;\n const [outputStrides0, outputStrides1] = outputStrides;\n const [pads0, pads1] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n if (i1 < 0) {\n i1 = 0;\n } else if (i1 >= inputShape1) {\n i1 = inputShape1 - 1;\n }\n const v = dI[i0 * inputStrides0 + i1 * inputStrides1];\n dO[d0 * outputStrides0 + d1 * outputStrides1] = v;\n }\n }\n }\n\n edgeCopy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2] = inputShape;\n const [outputShape0, outputShape1, outputShape2] = outputShape;\n const [inputStrides0, inputStrides1, inputStrides2] = inputStrides;\n const [outputStrides0, outputStrides1, outputStrides2] = outputStrides;\n const [pads0, pads1, pads2] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n if (i1 < 0) {\n i1 = 0;\n } else if (i1 >= inputShape1) {\n i1 = inputShape1 - 1;\n }\n if (i2 < 0) {\n i2 = 0;\n } else if (i2 >= inputShape2) {\n i2 = inputShape2 - 1;\n }\n const v =\n dI[i0 * inputStrides0 + i1 * inputStrides1 + i2 * inputStrides2];\n dO[d0 * outputStrides0 + d1 * outputStrides1 + d2 * outputStrides2] =\n v;\n }\n }\n }\n }\n\n edgeCopy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2, inputShape3] = inputShape;\n const [outputShape0, outputShape1, outputShape2, outputShape3] =\n outputShape;\n const [inputStrides0, inputStrides1, inputStrides2, inputStrides3] =\n inputStrides;\n const [outputStrides0, outputStrides1, outputStrides2, outputStrides3] =\n outputStrides;\n const [pads0, pads1, pads2, pads3] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n if (i1 < 0) {\n i1 = 0;\n } else if (i1 >= inputShape1) {\n i1 = inputShape1 - 1;\n }\n if (i2 < 0) {\n i2 = 0;\n } else if (i2 >= inputShape2) {\n i2 = inputShape2 - 1;\n }\n if (i3 < 0) {\n i3 = 0;\n } else if (i3 >= inputShape3) {\n i3 = inputShape3 - 1;\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3\n ] = v;\n }\n }\n }\n }\n }\n\n edgeCopy5d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [inputShape0, inputShape1, inputShape2, inputShape3, inputShape4] =\n inputShape;\n const [\n outputShape0,\n outputShape1,\n outputShape2,\n outputShape3,\n outputShape4,\n ] = outputShape;\n const [\n inputStrides0,\n inputStrides1,\n inputStrides2,\n inputStrides3,\n inputStrides4,\n ] = inputStrides;\n const [\n outputStrides0,\n outputStrides1,\n outputStrides2,\n outputStrides3,\n outputStrides4,\n ] = outputStrides;\n const [pads0, pads1, pads2, pads3, pads4] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n for (let d4 = 0; d4 < outputShape4; d4++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3,\n i4 = d4 - pads4;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n if (i1 < 0) {\n i1 = 0;\n } else if (i1 >= inputShape1) {\n i1 = inputShape1 - 1;\n }\n if (i2 < 0) {\n i2 = 0;\n } else if (i2 >= inputShape2) {\n i2 = inputShape2 - 1;\n }\n if (i3 < 0) {\n i3 = 0;\n } else if (i3 >= inputShape3) {\n i3 = inputShape3 - 1;\n }\n if (i4 < 0) {\n i4 = 0;\n } else if (i4 >= inputShape4) {\n i4 = inputShape4 - 1;\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3 +\n i4 * inputStrides4\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3 +\n d4 * outputStrides4\n ] = v;\n }\n }\n }\n }\n }\n }\n\n edgeCopy6d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>,\n pads: ReadonlyArray<number>\n ) {\n const [\n inputShape0,\n inputShape1,\n inputShape2,\n inputShape3,\n inputShape4,\n inputShape5,\n ] = inputShape;\n const [\n outputShape0,\n outputShape1,\n outputShape2,\n outputShape3,\n outputShape4,\n outputShape5,\n ] = outputShape;\n const [\n inputStrides0,\n inputStrides1,\n inputStrides2,\n inputStrides3,\n inputStrides4,\n inputStrides5,\n ] = inputStrides;\n const [\n outputStrides0,\n outputStrides1,\n outputStrides2,\n outputStrides3,\n outputStrides4,\n outputStrides5,\n ] = outputStrides;\n const [pads0, pads1, pads2, pads3, pads4, pads5] = pads;\n for (let d0 = 0; d0 < outputShape0; d0++) {\n for (let d1 = 0; d1 < outputShape1; d1++) {\n for (let d2 = 0; d2 < outputShape2; d2++) {\n for (let d3 = 0; d3 < outputShape3; d3++) {\n for (let d4 = 0; d4 < outputShape4; d4++) {\n for (let d5 = 0; d5 < outputShape5; d5++) {\n let i0 = d0 - pads0,\n i1 = d1 - pads1,\n i2 = d2 - pads2,\n i3 = d3 - pads3,\n i4 = d4 - pads4,\n i5 = d5 - pads5;\n if (i0 < 0) {\n i0 = 0;\n } else if (i0 >= inputShape0) {\n i0 = inputShape0 - 1;\n }\n if (i1 < 0) {\n i1 = 0;\n } else if (i1 >= inputShape1) {\n i1 = inputShape1 - 1;\n }\n if (i2 < 0) {\n i2 = 0;\n } else if (i2 >= inputShape2) {\n i2 = inputShape2 - 1;\n }\n if (i3 < 0) {\n i3 = 0;\n } else if (i3 >= inputShape3) {\n i3 = inputShape3 - 1;\n }\n if (i4 < 0) {\n i4 = 0;\n } else if (i4 >= inputShape4) {\n i4 = inputShape4 - 1;\n }\n if (i5 < 0) {\n i5 = 0;\n } else if (i5 >= inputShape5) {\n i5 = inputShape5 - 1;\n }\n const v =\n dI[\n i0 * inputStrides0 +\n i1 * inputStrides1 +\n i2 * inputStrides2 +\n i3 * inputStrides3 +\n i4 * inputStrides4 +\n i5 * inputStrides5\n ];\n dO[\n d0 * outputStrides0 +\n d1 * outputStrides1 +\n d2 * outputStrides2 +\n d3 * outputStrides3 +\n d4 * outputStrides4 +\n d5 * outputStrides5\n ] = v;\n }\n }\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Pad\",\n backend: \"cpu\",\n opsetMin: 11,\n factory: () => new CPUPad11(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5940819382667542, "alphanum_fraction": 0.661987841129303, "avg_line_length": 23.407407760620117, "blob_id": "4e4a5471717c653c1a0d161e5ff6074c90a62817", "content_id": "80c36af04fa06e94db125ef44f3d0440aa37ac18", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2654, "license_type": "permissive", "max_line_length": 74, "num_lines": 108, "path": "/src/descriptor_runner/backend/webgl/pack.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "export function packToFloat32Array(\n src: ArrayLike<number>,\n length: number\n): Float32Array {\n const buffer = new Float32Array(length);\n buffer.set(src);\n return buffer;\n}\n\nexport function packToFloat16Array(\n src: ArrayLike<number>,\n length: number\n): Uint16Array {\n const srcLength = src.length;\n let srcUInt32: Uint32Array;\n if (src instanceof Float32Array) {\n srcUInt32 = new Uint32Array(src.buffer, src.byteOffset, srcLength);\n } else {\n const srcFloat32 = new Float32Array(srcLength);\n srcFloat32.set(src);\n srcUInt32 = new Uint32Array(srcFloat32.buffer);\n }\n\n const buffer = new Uint16Array(length);\n for (let i = 0; i < srcLength; i++) {\n const x = srcUInt32[i];\n // 非正規化数, NaNは不正確\n let exp = ((x >> 13) & 0x3fc00) - 0x1c000;\n if (exp < 0) {\n exp = 0;\n } else if (exp > 0x7c00) {\n exp = 0x7c00;\n }\n const packed = ((x >> 16) & 0x8000) | exp | ((x >> 13) & 0x3ff);\n buffer[i] = packed;\n }\n return buffer;\n}\n\nexport function packToInt32Array(\n src: ArrayLike<number>,\n length: number\n): Int32Array {\n const buffer = new Int32Array(length);\n buffer.set(src);\n return buffer;\n}\n\nexport function packToUint8Array(\n src: ArrayLike<number>,\n length: number\n): Uint8Array {\n const buffer = new Uint8Array(length);\n buffer.set(src);\n return buffer;\n}\n\nexport function unpackFromFloat32Array(\n src: Float32Array,\n length: number\n): Float32Array {\n const buffer = new Float32Array(length);\n const srcView = new Float32Array(src.buffer, src.byteOffset, length);\n buffer.set(srcView);\n return buffer;\n}\n\nexport function unpackFromFloat16Array(\n src: Uint16Array,\n length: number\n): Float32Array {\n const buffer = new Float32Array(length);\n const bufferUInt32 = new Uint32Array(buffer.buffer);\n for (let i = 0; i < length; i++) {\n const h = src[i];\n let exp = ((h << 13) & 0xf800000) + 0x38000000;\n if (exp === 0x38000000) {\n // 0\n exp = 0;\n } else if (exp === 0x47800000) {\n // inf\n exp = 0x7f800000;\n }\n const unpacked = ((h << 16) & 0x80000000) | exp | ((h & 0x3ff) << 13);\n bufferUInt32[i] = unpacked;\n }\n return buffer;\n}\n\nexport function unpackFromInt32Array(\n src: Int32Array,\n length: number\n): Int32Array {\n const buffer = new Int32Array(length);\n const srcView = new Int32Array(src.buffer, src.byteOffset, length);\n buffer.set(srcView);\n return buffer;\n}\n\nexport function unpackFromUint8Array(\n src: Uint8Array,\n length: number\n): Uint8Array {\n const buffer = new Uint8Array(length);\n const srcView = new Uint8Array(src.buffer, src.byteOffset, length);\n buffer.set(srcView);\n return buffer;\n}\n" }, { "alpha_fraction": 0.5578693747520447, "alphanum_fraction": 0.5938518047332764, "avg_line_length": 27.40643882751465, "blob_id": "463f63086d07fc05969107fd78816eea4736bb34", "content_id": "3b0bd643f66a2dffb58048404bbd38190b3dd750", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 14118, "license_type": "permissive", "max_line_length": 163, "num_lines": 497, "path": "/src/descriptor_runner/operators/webgl/shaderHelper.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebGLUniformItem } from \"../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../interface/backend/webgl/webglTensor\";\nimport { Tensor } from \"../../interface/core/tensor\";\n\n// Float encode: https://community.khronos.org/t/packing-multiple-floats-into-a-single-float-value/59320/3\nexport const shaderFloatPack = `\nvec4 encode_float (float val) {\n if (val == 0.0) return vec4(0, 0, 0, 0);\n float sign = val > 0.0 ? 192.0 : 64.0;\n float absval = abs(val);\n float exponent = ceil(log2(absval) + 0.0001);\n float scaled = absval * exp2(-exponent);\n vec3 enc = vec3(1.0, 255.0, 65025.0) * scaled;\n enc = fract(enc);\n enc -= enc.yzz * vec3(1.0/255.0, 1.0/255.0, 0.0);\n return vec4((sign + clamp(exponent, -63.0, 63.0)) * (1.0 / 255.0), enc.x, enc.y, enc.z);\n}\n\nfloat decode_float(vec4 code) {\n if (code.x == 0.0) {\n return 0.0;\n }\n float ebyte = code.x * 255.0;\n float sign, exponent;\n if (ebyte >= 128.0) {\n sign = 1.0;\n exponent = ebyte - 192.0;\n } else {\n sign = -1.0;\n exponent = ebyte - 64.0;\n }\n float scaled = code.w * (1.0 / 65025.0) + code.z * (1.0 / 255.0) + code.y;\n float value = scaled * exp2(exponent) * sign;\n return value;\n}\n`;\n\nexport const shaderHeaderWebGL1 = `#version 100\nprecision highp float;\nprecision highp int;\nprecision highp sampler2D;\n`;\n\nexport const shaderHeaderWebGL2 = `#version 300 es\nprecision highp float;\nprecision highp int;\nprecision highp sampler2D;\nout vec4 fragColor;\n`;\n\nexport function shaderGenHeader(webgl2: boolean): string {\n if (webgl2) {\n return shaderHeaderWebGL2;\n }\n return shaderHeaderWebGL1 + shaderFloatPack;\n}\n\nexport function shaderGenOutput(expr: string, webgl2: boolean): string {\n if (webgl2) {\n return `fragColor = vec4((${expr}), 0.0, 0.0, 0.0);`;\n }\n return `gl_FragColor = encode_float(${expr});`;\n}\n\nexport function shaderGenOutputVec4(expr: string, webgl2: boolean): string {\n if (webgl2) {\n return `fragColor = (${expr});`;\n }\n throw new Error(\"shaderGenOutputVec4 is only for WebGL2\");\n}\n\nexport function shaderGenTensorNDGet(\n name: string,\n ndim: number,\n webgl2: boolean\n): string {\n let args: string, flat_index: string, uniforms: string;\n switch (ndim) {\n case 0:\n uniforms = \"\";\n args = \"\";\n flat_index = \"0\";\n break;\n case 1:\n uniforms = `\n uniform int ${name}_stride_0;\n `;\n args = \"int d0\";\n flat_index = `d0 * ${name}_stride_0`;\n break;\n case 2:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n `;\n args = \"int d0, int d1\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1`;\n break;\n case 3:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n `;\n args = \"int d0, int d1, int d2\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2`;\n break;\n case 4:\n uniforms = `\nuniform int ${name}_stride_0;\nuniform int ${name}_stride_1;\nuniform int ${name}_stride_2;\nuniform int ${name}_stride_3;\n `;\n args = \"int d0, int d1, int d2, int d3\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3`;\n break;\n case 5:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n uniform int ${name}_stride_3;\n uniform int ${name}_stride_4;\n `;\n args = \"int d0, int d1, int d2, int d3, int d4\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3 + d4 * ${name}_stride_4`;\n break;\n case 6:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n uniform int ${name}_stride_3;\n uniform int ${name}_stride_4;\n uniform int ${name}_stride_5;\n `;\n args = \"int d0, int d1, int d2, int d3, int d4, int d5\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3 + d4 * ${name}_stride_4 + d5 * ${name}_stride_5`;\n break;\n default:\n throw new Error();\n }\n if (webgl2) {\n return `\nuniform sampler2D ${name};\n${uniforms}\n\nfloat get_${name}(${args}) {\nint flat_index = ${flat_index};\nint texture_w = textureSize(${name}, 0).x;\nint y = flat_index / texture_w;\nint x = flat_index - y * texture_w;\nreturn texelFetch(${name}, ivec2(x, y), 0).r;\n}\n`;\n }\n return `\n uniform sampler2D ${name};\n ${uniforms}\n uniform int ${name}_texture_w;\n uniform int ${name}_texture_h;\n \n float get_${name}(${args}) {\n int flat_index = ${flat_index};\n int texture_w = ${name}_texture_w;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n vec4 p = texture2D(${name}, vec2((float(x) + 0.5) / float(${name}_texture_w), (float(y) + 0.5) / float(${name}_texture_h)));\n return decode_float(p);\n }\n`;\n}\n\nexport function shaderGenTensorNDGetVec4(\n name: string,\n ndim: number,\n webgl2: boolean\n): string {\n let args: string, flat_index: string, uniforms: string;\n switch (ndim) {\n case 0:\n uniforms = \"\";\n args = \"\";\n flat_index = \"0\";\n break;\n case 1:\n uniforms = `\n uniform int ${name}_stride_0;\n `;\n args = \"int d0\";\n flat_index = `d0 * ${name}_stride_0`;\n break;\n case 2:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n `;\n args = \"int d0, int d1\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1`;\n break;\n case 3:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n `;\n args = \"int d0, int d1, int d2\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2`;\n break;\n case 4:\n uniforms = `\nuniform int ${name}_stride_0;\nuniform int ${name}_stride_1;\nuniform int ${name}_stride_2;\nuniform int ${name}_stride_3;\n `;\n args = \"int d0, int d1, int d2, int d3\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3`;\n break;\n case 5:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n uniform int ${name}_stride_3;\n uniform int ${name}_stride_4;\n `;\n args = \"int d0, int d1, int d2, int d3, int d4\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3 + d4 * ${name}_stride_4`;\n break;\n case 6:\n uniforms = `\n uniform int ${name}_stride_0;\n uniform int ${name}_stride_1;\n uniform int ${name}_stride_2;\n uniform int ${name}_stride_3;\n uniform int ${name}_stride_4;\n uniform int ${name}_stride_5;\n `;\n args = \"int d0, int d1, int d2, int d3, int d4, int d5\";\n flat_index = `d0 * ${name}_stride_0 + d1 * ${name}_stride_1 + d2 * ${name}_stride_2 + d3 * ${name}_stride_3 + d4 * ${name}_stride_4 + d5 * ${name}_stride_5`;\n break;\n default:\n throw new Error();\n }\n if (webgl2) {\n return `\nuniform sampler2D ${name};\n${uniforms}\n\nvec4 get_vec4_${name}(${args}) {\nint flat_index = ${flat_index};\nint texture_w = textureSize(${name}, 0).x;\nint y = flat_index / texture_w;\nint x = flat_index - y * texture_w;\nreturn texelFetch(${name}, ivec2(x, y), 0);\n}\n`;\n }\n throw new Error(\"shaderGenTensorNDGetVec4 is only for WebGL2\");\n}\n\nfunction isWebGLTensor(tensor: unknown): tensor is WebGLTensor {\n return typeof tensor === \"object\" && (tensor as Tensor).backend === \"webgl\";\n}\n\nexport function shaderGenTensorNDGetUniformItem(\n name: string,\n strides: ReadonlyArray<number>,\n textureShape: ReadonlyArray<number> | WebGLTensor,\n webgl2: boolean\n): WebGLUniformItem[] {\n let textureShapeArray: ReadonlyArray<number>;\n if (isWebGLTensor(textureShape)) {\n textureShapeArray = [textureShape.textureHeight, textureShape.textureWidth];\n } else {\n textureShapeArray = textureShape;\n }\n const uniforms: WebGLUniformItem[] = [];\n for (let i = 0; i < strides.length; i++) {\n uniforms.push({\n name: `${name}_stride_${i}`,\n type: \"int\",\n value: strides[i],\n });\n }\n if (!webgl2) {\n uniforms.push({\n name: `${name}_texture_h`,\n type: \"int\",\n value: textureShapeArray[0],\n });\n uniforms.push({\n name: `${name}_texture_w`,\n type: \"int\",\n value: textureShapeArray[1],\n });\n }\n return uniforms;\n}\n\nexport function shaderGenTensorOutputUniformItem(\n shape: ReadonlyArray<number>,\n textureShape: ReadonlyArray<number> | WebGLTensor,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n webgl2: boolean\n): WebGLUniformItem[] {\n let textureShapeArray: ReadonlyArray<number>;\n if (isWebGLTensor(textureShape)) {\n textureShapeArray = [textureShape.textureHeight, textureShape.textureWidth];\n } else {\n textureShapeArray = textureShape;\n }\n const name = \"tex_output\",\n uniforms: WebGLUniformItem[] = [];\n for (let i = 0; i < shape.length; i++) {\n uniforms.push({\n name: `${name}_shape_${i}`,\n type: \"int\",\n value: shape[i],\n });\n }\n uniforms.push({\n name: `${name}_texture_w`,\n type: \"int\",\n value: textureShapeArray[1],\n });\n return uniforms;\n}\n\nexport function shaderGenTensorOutputUniform(ndim: number): string {\n let source = `\n uniform int tex_output_texture_w;\n`;\n for (let i = 0; i < ndim; i++) {\n source += `uniform int tex_output_shape_${i};`;\n }\n return source;\n}\n\nexport function shaderGenTensorOutputCoordsWithReturn(ndim: number): string {\n let source: string;\n switch (ndim) {\n case 0:\n source = `\n int tex_output_0 = 0;\n if (tex_output_0 >= 1) {\n return;\n }\n `;\n break;\n case 1:\n source = `\n int tex_output_0 = tex_output_flat;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n case 2:\n source = `\n int tmp1 = tex_output_flat / tex_output_shape_1;\n int tex_output_1 = tex_output_flat - tmp1 * tex_output_shape_1;\n int tex_output_0 = tmp1;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n case 3:\n source = `\n int tmp2 = tex_output_flat / tex_output_shape_2;\n int tex_output_2 = tex_output_flat - tmp2 * tex_output_shape_2;\n int tmp1 = tmp2 / tex_output_shape_1;\n int tex_output_1 = tmp2 - tmp1 * tex_output_shape_1;\n int tex_output_0 = tmp1;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n case 4:\n source = `\n int tmp3 = tex_output_flat / tex_output_shape_3;\n int tex_output_3 = tex_output_flat - tmp3 * tex_output_shape_3;\n int tmp2 = tmp3 / tex_output_shape_2;\n int tex_output_2 = tmp3 - tmp2 * tex_output_shape_2;\n int tmp1 = tmp2 / tex_output_shape_1;\n int tex_output_1 = tmp2 - tmp1 * tex_output_shape_1;\n int tex_output_0 = tmp1;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n case 5:\n source = `\n int tmp4 = tex_output_flat / tex_output_shape_4;\n int tex_output_4 = tex_output_flat - tmp4 * tex_output_shape_4;\n int tmp3 = tmp4 / tex_output_shape_3;\n int tex_output_3 = tmp4 - tmp3 * tex_output_shape_3;\n int tmp2 = tmp3 / tex_output_shape_2;\n int tex_output_2 = tmp3 - tmp2 * tex_output_shape_2;\n int tmp1 = tmp2 / tex_output_shape_1;\n int tex_output_1 = tmp2 - tmp1 * tex_output_shape_1;\n int tex_output_0 = tmp1;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n case 6:\n source = `\n int tmp5 = tex_output_flat / tex_output_shape_5;\n int tex_output_5 = tex_output_flat - tmp5 * tex_output_shape_5;\n int tmp4 = tmp5 / tex_output_shape_4;\n int tex_output_4 = tmp5 - tmp4 * tex_output_shape_4;\n int tmp3 = tmp4 / tex_output_shape_3;\n int tex_output_3 = tmp4 - tmp3 * tex_output_shape_3;\n int tmp2 = tmp3 / tex_output_shape_2;\n int tex_output_2 = tmp3 - tmp2 * tex_output_shape_2;\n int tmp1 = tmp2 / tex_output_shape_1;\n int tex_output_1 = tmp2 - tmp1 * tex_output_shape_1;\n int tex_output_0 = tmp1;\n if (tex_output_0 >= tex_output_shape_0) {\n return;\n }\n `;\n break;\n default:\n throw new Error();\n }\n\n /*\n * Gl_FragCoord.x 's precision is mediump, which only has 10bit precision\n * force casting to highp is needed in iOS. Also, \"-0.5\" cannot be removed.\n */\n return `\n highp float helper_gfcx = gl_FragCoord.x;\n highp float helper_gfcy = gl_FragCoord.y;\n int tex_output_flat = int(helper_gfcx - 0.5) + tex_output_texture_w * int(helper_gfcy - 0.5);\n ${source}\n `;\n}\n\nexport function shaderGenTensorElementwiseGet(\n name: string,\n webgl2: boolean\n): string {\n if (webgl2) {\n return `\nuniform sampler2D ${name};\n\nfloat get_${name}() {\n return texelFetch(${name}, ivec2(int(gl_FragCoord.x), int(gl_FragCoord.y)), 0).r;\n}\n`;\n }\n return `\nuniform sampler2D ${name};\nuniform int ${name}_texture_w;\nuniform int ${name}_texture_h;\n\nfloat get_${name}() {\n vec4 p = texture2D(${name}, vec2(gl_FragCoord.x / float(${name}_texture_w), gl_FragCoord.y / float(${name}_texture_h)));\n return decode_float(p);\n}\n`;\n}\n\nexport function shaderGenTensorElementwiseGetUniformItem(\n name: string,\n textureShape: ReadonlyArray<number> | WebGLTensor,\n webgl2: boolean\n): WebGLUniformItem[] {\n let textureShapeArray: ReadonlyArray<number>;\n if (isWebGLTensor(textureShape)) {\n textureShapeArray = [textureShape.textureHeight, textureShape.textureWidth];\n } else {\n textureShapeArray = textureShape;\n }\n const uniforms: WebGLUniformItem[] = [];\n if (!webgl2) {\n uniforms.push({\n name: `${name}_texture_h`,\n type: \"int\",\n value: textureShapeArray[0],\n });\n uniforms.push({\n name: `${name}_texture_w`,\n type: \"int\",\n value: textureShapeArray[1],\n });\n }\n return uniforms;\n}\n" }, { "alpha_fraction": 0.5994513034820557, "alphanum_fraction": 0.6104252338409424, "avg_line_length": 39.5, "blob_id": "cbb3bacf35a1926bfbf2cb1b6a6515aa722f80af", "content_id": "0a48434edd9e59b3cd86d1c7a49a060846fefc8e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 729, "license_type": "permissive", "max_line_length": 116, "num_lines": 18, "path": "/src/shader/wasm/src/kernels/standard/unarys.cpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#include <algorithm>\n#include <cmath>\n#include \"../../common/kernel.hpp\"\n#include \"../../common/unary.hpp\"\n\nextern \"C\"\n{\n \n#define DEFINE_UNARY(name, func) \\\nvoid WEBDNN_KERNEL kernel_##name(const float *src, float *dst, int length) { webdnn_unary(src, dst, length, func); }\n DEFINE_UNARY(ceil, [](float s) { return std::ceil(s); });\n DEFINE_UNARY(exp, [](float s) { return std::exp(s); });\n DEFINE_UNARY(floor, [](float s) { return std::floor(s); });\n DEFINE_UNARY(relu, [](float s) { return std::max(s, 0.0f); });\n DEFINE_UNARY(sigmoid, [](float s) { return (std::tanh(s * 0.5f) + 1.0f) * 0.5f; });\n DEFINE_UNARY(sqrt, [](float s) { return std::sqrt(s); });\n DEFINE_UNARY(tanh, [](float s) { return std::tanh(s); });\n}\n" }, { "alpha_fraction": 0.6668707728385925, "alphanum_fraction": 0.6754439473152161, "avg_line_length": 27.66666603088379, "blob_id": "50d91334b0e33c160f410dd99e580b260deb8870", "content_id": "4d8fd9bd610a069af655ed635350867121c3ab4a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1635, "license_type": "permissive", "max_line_length": 74, "num_lines": 57, "path": "/example/custom_operator/index.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "let runner;\n\nwindow.addEventListener(\"DOMContentLoaded\", async () => {\n srcCanvas = document.getElementById(\"source\");\n resultCanvas = document.getElementById(\"result\");\n updateMessage(\"Loading model\");\n\n runner = await WebDNN.load(\"output/\", {optimized: true});\n updateMessage(`Model loaded (backend: ${runner.backendName})`);\n});\n\nfunction updateMessage(message) {\n document.getElementById(\"msg\").innerText = message;\n}\n\nasync function run() {\n const cols = 4, rows = 3;\n const inputArray = new Float32Array(cols * rows);\n for (let i = 0; i < inputArray.length; i++) {\n inputArray[i] = Math.random() - 0.5;\n }\n const input = new WebDNN.CPUTensor([rows, cols], \"float32\", inputArray);\n\n const [output] = await runner.run([input]);\n\n displayResult(input, output);\n\n updateMessage(`Completed`);\n}\n\nfunction displayResult(input, output) {\n const resultDom = document.getElementById(\"result\");\n while (resultDom.firstChild) {\n resultDom.removeChild(resultDom.firstChild);\n }\n\n const inputTensorDom = displayTensor(input);\n const outputTensorDom = displayTensor(output);\n resultDom.appendChild(inputTensorDom);\n resultDom.appendChild(document.createTextNode(\"⇒\"));\n resultDom.appendChild(outputTensorDom);\n}\n\nfunction displayTensor(tensor) {\n const element = document.createElement(\"table\");\n let html = \"<tbody>\";\n for (let row = 0; row < tensor.dims[0]; row++) {\n html += \"<tr>\";\n for (let col = 0; col < tensor.dims[1]; col++) {\n html += `<td>${tensor.getValue([row, col]).toFixed(2)}</td>`;\n }\n html += \"</tr>\";\n }\n html += \"</tbody>\";\n element.innerHTML = html;\n return element;\n}" }, { "alpha_fraction": 0.7275862097740173, "alphanum_fraction": 0.7275862097740173, "avg_line_length": 35.25, "blob_id": "0943add42aa1e3e8e59c5649883d3ad656589249", "content_id": "59f22b3aecdcfbd4b15786146b48217f5b9482b8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 290, "license_type": "permissive", "max_line_length": 75, "num_lines": 8, "path": "/src/descriptor_runner/interface/core/backendContext.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend, DataType } from \"./constants\";\nimport { Tensor } from \"./tensor\";\n\nexport interface BackendContext {\n backend: Backend;\n emptyTensor(dims: ReadonlyArray<number>, dataType?: DataType): Tensor;\n moveTensor(tensor: Tensor, option: Record<string, any>): Promise<Tensor>;\n}\n" }, { "alpha_fraction": 0.600907027721405, "alphanum_fraction": 0.622732400894165, "avg_line_length": 23.33103370666504, "blob_id": "fb5b6c17a0462421f276b231fbb9356927d6f059", "content_id": "9f8add1227639ff546e78b5feca314fe26b8680b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3732, "license_type": "permissive", "max_line_length": 80, "num_lines": 145, "path": "/example/detr/test.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "function isClose(expected, actual, name, rtol = 1e-3, atol = 1e-5) {\n if (expected.dims.length !== actual.dims.length) {\n console.error(\n `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`\n );\n return false;\n }\n if (expected.dims.some((nd, i) => nd !== actual.dims[i])) {\n console.error(\n `${name}: expected.dims(${expected.dims}) !== actual.dims(${actual.dims})`\n );\n return false;\n }\n\n if (expected.data.length !== actual.data.length) {\n console.error(`${name}: data length mismatch`);\n return false;\n }\n\n for (let i = 0; i < expected.data.length; i++) {\n const e = expected.data[i];\n const a = actual.data[i];\n // NaNの場合にエラーになるようにしている\n if (!(Math.abs(e - a) <= Math.abs(e) * rtol + atol)) {\n console.error(`${name}: index ${i}, expected ${e} !== actual ${a}`);\n return false;\n }\n }\n\n return true;\n}\n\nlet testCaseTensors;\n\nwindow.addEventListener(\"DOMContentLoaded\", async () => {\n srcCanvas = document.getElementById(\"source\");\n resultCanvas = document.getElementById(\"result\");\n updateMessage(\"Loading model\");\n\n loadDefaultImage();\n await loadModel(\"output/\");\n testCaseTensors = await runner.getTensorLoader(\"output/test.bin\").loadAll();\n updateMessage(`Model loaded (backend: ${runner.backendName})`);\n});\n\nconst imageRescaleW = 1066,\n imageRescaleH = 800;\n\nasync function testEmbedding() {\n console.log(\"Testing position embedding generation\");\n const embed = makeEmbed(\n imageRescaleH,\n imageRescaleW,\n inputArrays.get(\"row_embed\"),\n inputArrays.get(\"col_embed\")\n );\n console.log(\n \"Match:\",\n isClose(testCaseTensors.get(\"input_embed_const\"), embed, \"pos\")\n );\n}\n\nasync function testDNN() {\n console.log(\"Testing DNN core only\");\n const [pred_logits, pred_boxes] = await runner.run([\n testCaseTensors.get(\"input_0\"),\n testCaseTensors.get(\"input_embed_const\"),\n inputArrays.get(\"query_pos_us\"),\n ]);\n\n console.log(\n \"Match of pred_logits:\",\n isClose(\n testCaseTensors.get(\"output_logits\"),\n pred_logits,\n \"pred_logits\",\n 1e-2,\n 1e-3\n )\n );\n console.log(\n \"Match of pred_boxes:\",\n isClose(\n testCaseTensors.get(\"output_boxes\"),\n pred_boxes,\n \"pred_boxes\",\n 1e-2,\n 1e-3\n )\n );\n}\n\n// full pipelineは誤差が大きい\n// 画像リサイズのわずかなアルゴリズム差で入力が変化し、その結果出力が大きく変化(特に物体なしのbounding box)\n// 意味的にはあまり変わらない出力となるが、要素単位の一致をしない\nasync function testFull() {\n console.log(\"Testing full pipeline\");\n\n const imageArray = await WebDNN.Image.getImageArray(srcCanvas, {\n dstW: imageRescaleW,\n dstH: imageRescaleH,\n color: WebDNN.Image.Color.RGB,\n order: WebDNN.Image.Order.CHW,\n bias: [0.485 * 255, 0.456 * 255, 0.406 * 255],\n scale: [0.229 * 255, 0.224 * 255, 0.225 * 255],\n });\n const transformedImage = new WebDNN.CPUTensor(\n [1, 3, imageRescaleH, imageRescaleW],\n \"float32\",\n imageArray\n );\n const embedPos = makeEmbed(\n imageRescaleH,\n imageRescaleW,\n inputArrays.get(\"row_embed\"),\n inputArrays.get(\"col_embed\")\n );\n\n const [pred_logits, pred_boxes] = await runner.run([\n transformedImage,\n embedPos,\n inputArrays.get(\"query_pos_us\"),\n ]);\n\n console.log(\n \"Match of pred_logits:\",\n isClose(\n testCaseTensors.get(\"output_logits\"),\n pred_logits,\n \"pred_logits\",\n 0.1,\n 0.01\n )\n );\n console.log(\n \"Match of pred_boxes:\",\n isClose(\n testCaseTensors.get(\"output_boxes\"),\n pred_boxes,\n \"pred_boxes\",\n 0.1,\n 0.01\n )\n );\n}\n" }, { "alpha_fraction": 0.5224977135658264, "alphanum_fraction": 0.5454545617103577, "avg_line_length": 25.560976028442383, "blob_id": "10c62a15e1d8b0e8c982b19c7878138bdd85de8f", "content_id": "5b3f828f4747dc9d010256dd5f3efd8477216a3b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3285, "license_type": "permissive", "max_line_length": 80, "num_lines": 123, "path": "/src/descriptor_runner/operators/cpu/operators/standard/matmul.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { MatMul } from \"../../../base/matmul\";\n\nclass CpuMatMul extends MatMul {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error(\"only float32 is supported\");\n }\n const {\n resultLength,\n resultDims,\n resultStrides,\n resultDimsAfterSqueeze,\n stridesA,\n stridesB,\n innerProductLength,\n } = this.calcShape(inputA.dims, inputB.dims),\n newData = new Float32Array(resultLength);\n if (resultDims.length === 2) {\n this.calcDim2(\n inputA.data as Float32Array,\n inputB.data as Float32Array,\n newData,\n resultDims,\n resultStrides,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else if (resultDims.length === 3) {\n this.calcDim3(\n inputA.data as Float32Array,\n inputB.data as Float32Array,\n newData,\n resultDims,\n resultStrides,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else {\n // TODO: 4次元以上のサポート\n throw new Error();\n }\n\n const output = context.emptyTensor(\n resultDimsAfterSqueeze,\n \"float32\",\n newData\n );\n return [output];\n }\n\n private calcDim2(\n dA: Float32Array,\n dB: Float32Array,\n dC: Float32Array,\n resultDims: number[],\n resultStrides: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n for (let m = 0; m < resultDims[0]; m++) {\n for (let n = 0; n < resultDims[1]; n++) {\n let sum = 0;\n for (let k = 0; k < innerProductLength; k++) {\n sum +=\n dA[m * stridesA[0] + k * stridesA[1]] *\n dB[k * stridesB[0] + n * stridesB[1]];\n }\n dC[m * resultStrides[0] + n * resultStrides[1]] = sum;\n }\n }\n }\n\n private calcDim3(\n dA: Float32Array,\n dB: Float32Array,\n dC: Float32Array,\n resultDims: number[],\n resultStrides: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n for (let o0 = 0; o0 < resultDims[0]; o0++) {\n for (let m = 0; m < resultDims[1]; m++) {\n for (let n = 0; n < resultDims[2]; n++) {\n let sum = 0;\n for (let k = 0; k < innerProductLength; k++) {\n sum +=\n dA[o0 * stridesA[0] + m * stridesA[1] + k * stridesA[2]] *\n dB[o0 * stridesB[0] + k * stridesB[1] + n * stridesB[2]];\n }\n dC[\n o0 * resultStrides[0] + m * resultStrides[1] + n * resultStrides[2]\n ] = sum;\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"MatMul\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuMatMul(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6299999952316284, "alphanum_fraction": 0.6366666555404663, "avg_line_length": 29, "blob_id": "af69c01673a8180b46050c1b1614d1d92ea850c3", "content_id": "c83dce0a0754a5c6ca505768805f0629677bc1d4", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1200, "license_type": "permissive", "max_line_length": 80, "num_lines": 40, "path": "/src/descriptor_runner/operators/cpu/operators/standard/reshape5.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"../../../../interface/core/constants\";\nimport { Reshape5 } from \"../../../base/reshape5\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass CPUReshape5 extends Reshape5 {\n constructor() {\n super(\"cpu\");\n }\n\n getTensorBackendRequirement(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nInputs: number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nOutputs: number\n ): (Backend | null)[] {\n return [\"cpu\", \"cpu\"];\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n shapeTensor = inputs[1],\n computedShape = this.calcShape(input, shapeTensor),\n output = context.emptyTensor(computedShape, input.dataType, input.data);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Reshape\",\n backend: \"cpu\",\n opsetMin: 5,\n factory: () => new CPUReshape5(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6163522005081177, "alphanum_fraction": 0.6569908261299133, "avg_line_length": 32.33871078491211, "blob_id": "875bf15b4b85a9af0fad58ce10d408d19d275ef7", "content_id": "150f8ca2557becbcdfc3ce88e51dbb9bb3e3adf2", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2067, "license_type": "permissive", "max_line_length": 72, "num_lines": 62, "path": "/src/graph_transpiler/webdnn/onnx_util.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import numpy as np\nimport onnx\n\n\ndef get_attr_int(op: onnx.NodeProto, name, default=None):\n for attr in op.attribute:\n if attr.name == name:\n return attr.i\n return default\n\n\ndef get_attr_ints(op: onnx.NodeProto, name, default=None):\n for attr in op.attribute:\n if attr.name == name:\n return list(attr.ints)\n return default\n\n\ndef get_attr_float(op: onnx.NodeProto, name, default=None):\n for attr in op.attribute:\n if attr.name == name:\n return attr.f\n return default\n\n\nDATA_TYPE_TO_NUMPY = {\n onnx.TensorProto.FLOAT: np.float32, # 1\n onnx.TensorProto.UINT8: np.uint8, # 2\n onnx.TensorProto.INT8: np.int8, # 3\n onnx.TensorProto.UINT16: np.uint16, # 4\n onnx.TensorProto.INT16: np.int16, # 5\n onnx.TensorProto.INT32: np.int32, # 6\n onnx.TensorProto.INT64: np.int64, # 7\n onnx.TensorProto.BOOL: np.bool, # 9\n onnx.TensorProto.FLOAT16: np.float16, # 10\n onnx.TensorProto.DOUBLE: np.float64, # 11\n onnx.TensorProto.UINT32: np.uint32, # 12\n onnx.TensorProto.UINT64: np.uint64, # 13\n}\n\ndef tensor_proto_to_numpy(tensor_proto: onnx.TensorProto) -> np.ndarray:\n shape = tuple(tensor_proto.dims)\n dtype = DATA_TYPE_TO_NUMPY[tensor_proto.data_type]\n if tensor_proto.raw_data:\n array = np.frombuffer(tensor_proto.raw_data, dtype=dtype)\n elif tensor_proto.int64_data:\n array = np.array(tensor_proto.int64_data, dtype=dtype)\n elif tensor_proto.int32_data:\n array = np.array(tensor_proto.int32_data, dtype=dtype)\n elif tensor_proto.uint64_data:\n array = np.array(tensor_proto.uint64_data, dtype=dtype)\n elif tensor_proto.float_data:\n array = np.array(tensor_proto.float_data, dtype=dtype)\n elif tensor_proto.double_data:\n array = np.array(tensor_proto.double_data, dtype=dtype)\n array = array.reshape(shape)\n\n if dtype == np.int64:\n array = np.clip(array, -2**31, 2**31-1).astype(np.int32)\n elif dtype == np.uint64:\n array = np.clip(array, 0, 2**32-1).astype(np.uint32)\n return array\n" }, { "alpha_fraction": 0.5530973672866821, "alphanum_fraction": 0.5685840845108032, "avg_line_length": 26.901233673095703, "blob_id": "50e15e8969a891d56f26d1173d1d405d69bd007f", "content_id": "8bd4760192d2ca4e21a75367a6f4691ab44c2586", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2260, "license_type": "permissive", "max_line_length": 78, "num_lines": 81, "path": "/src/descriptor_runner/operators/wasm/operators/standard/binary7.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WasmKernelArgument,\n WasmKernelArgumentInt32,\n WebDNNWasmContext,\n} from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { broadcastMulti } from \"../../../operatorUtil\";\n\nclass WasmBinary7 extends OperatorImpl {\n constructor(private kernelName: string) {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWasmTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error(\"Only float32 is supported\");\n }\n\n const { dims: outShape, allStrides: inAllStrides } = broadcastMulti([\n inputA.dims,\n inputB.dims,\n ]);\n const output = context.emptyTensor(outShape, inputA.dataType);\n const args: WasmKernelArgument[] = [\n { type: \"tensor\", value: inputA },\n { type: \"tensor\", value: inputB },\n { type: \"tensor\", value: output },\n ...outShape.map(\n (v) => ({ type: \"int32\", value: v } as WasmKernelArgumentInt32)\n ),\n ...inAllStrides[0].map(\n (v) => ({ type: \"int32\", value: v } as WasmKernelArgumentInt32)\n ),\n ...inAllStrides[1].map(\n (v) => ({ type: \"int32\", value: v } as WasmKernelArgumentInt32)\n ),\n ];\n context.runKernel(`kernel_${this.kernelName}_d${outShape.length}`, args);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Add\",\n backend: \"wasm\",\n opsetMin: 7,\n factory: () => new WasmBinary7(\"add\"),\n },\n {\n opType: \"Sub\",\n backend: \"wasm\",\n opsetMin: 7,\n factory: () => new WasmBinary7(\"sub\"),\n },\n {\n opType: \"Mul\",\n backend: \"wasm\",\n opsetMin: 7,\n factory: () => new WasmBinary7(\"mul\"),\n },\n {\n opType: \"Div\",\n backend: \"wasm\",\n opsetMin: 7,\n factory: () => new WasmBinary7(\"div\"),\n },\n {\n opType: \"Pow\",\n backend: \"wasm\",\n opsetMin: 7,\n factory: () => new WasmBinary7(\"pow\"),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7737920880317688, "alphanum_fraction": 0.782576858997345, "avg_line_length": 39.776119232177734, "blob_id": "4d2f72544d09d9eca6ab67626c426c2d852cfb99", "content_id": "369229bff59a48ee8a05c53a1fe959027901a7e9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2732, "license_type": "permissive", "max_line_length": 183, "num_lines": 67, "path": "/CONTRIBUTING.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# How to contribute\nWe welcome contributions to WebDNN. This document describes the procedures and rules.\n\nKinds of contributions will be one of the following, but not restricted to:\n- Bugfix\n- Implementation of a layer\n- Implementation of a converter from a deep learning framework\n- Improvement of performance\n- Documantation\n\nFor new layer implementation, at least WebAssembly backend implementation is required. WebGPU backend can only be tested on Mac, so it is not mandatory.\n\n# Testing\nIf you have added some features, implementing tests corresponding to them is recommended.\n\n`test/webdnn_test` is for tests which can be completed within graph transpiler. `test/runtime` is for tests which generates graph descriptor and compares its behavior on web browsers.\n\nSee how to run test commands in `test/README.md`.\n\n# Pull Request\nSend pull request from your fork branch to our master branch. The project organizer checks the request and accepts or gives request for revision.\n\n# License\nWebDNN is distributed under the MIT License. Every contributor holds the copyright of his/her part.\n\nBy contributing to the mil-tokyo/webdnn repository through pull-request, comment,\nor otherwise, the contributor releases their content to the license and copyright\nterms herein.\n\n## Developer Certificate of Origin 1.1\nDeveloper Certificate of Origin\nVersion 1.1\n\nCopyright (C) 2004, 2006 The Linux Foundation and its contributors.\n1 Letterman Drive\nSuite D4700\nSan Francisco, CA, 94129\n\nEveryone is permitted to copy and distribute verbatim copies of this\nlicense document, but changing it is not allowed.\n\n\nDeveloper's Certificate of Origin 1.1\n\nBy making a contribution to this project, I certify that:\n\n(a) The contribution was created in whole or in part by me and I\n have the right to submit it under the open source license\n indicated in the file; or\n\n(b) The contribution is based upon previous work that, to the best\n of my knowledge, is covered under an appropriate open source\n license and I have the right under that license to submit that\n work with modifications, whether created in whole or in part\n by me, under the same open source license (unless I am\n permitted to submit under a different license), as indicated\n in the file; or\n\n(c) The contribution was provided directly to me by some other\n person who certified (a), (b) or (c) and I have not modified\n it.\n\n(d) I understand and agree that this project and the contribution\n are public and that a record of the contribution (including all\n personal information I submit with it, including my sign-off) is\n maintained indefinitely and may be redistributed consistent with\n this project or the open source license(s) involved.\n" }, { "alpha_fraction": 0.4796615540981293, "alphanum_fraction": 0.4842173755168915, "avg_line_length": 28.548076629638672, "blob_id": "2e1d4f8ef4e4792184117d5dd7af73cbbf614736", "content_id": "a700d40fdf23a64f7de54ca30817e05625255d52", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3073, "license_type": "permissive", "max_line_length": 75, "num_lines": 104, "path": "/src/shader/wasm/pre.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "(function () {\n // To potentially support IE11, not using ES6 syntax (const)\n var buffers = {};\n onmessage = function (event) {\n switch (event.data.type) {\n case \"alloc\":\n var result = Module._webdnn_malloc(event.data.byteLength);\n if (result === 0) {\n postMessage({ type: \"error\", message: \"Memory alloc failed\" });\n } else {\n buffers[event.data.bufferId] = {\n byteLength: event.data.byteLength,\n ptr: result,\n };\n }\n break;\n case \"destroy\":\n var buffer = buffers[event.data.bufferId];\n if (buffer) {\n Module._webdnn_free(buffer.ptr);\n delete buffers[event.data.bufferId];\n } else {\n postMessage({\n type: \"error\",\n message: \"Destroying non-existing buffer\",\n });\n }\n break;\n case \"write\":\n var buffer = buffers[event.data.bufferId];\n if (buffer) {\n var dataBufView = new Uint8Array(\n Module.HEAPU8.buffer,\n buffer.ptr,\n buffer.byteLength\n );\n dataBufView.set(event.data.data);\n } else {\n postMessage({\n type: \"error\",\n message: \"Writing non-existing buffer\",\n });\n }\n break;\n case \"read\":\n var buffer = buffers[event.data.bufferId];\n if (buffer) {\n var dataBufView = new Uint8Array(\n Module.HEAPU8.buffer,\n buffer.ptr,\n buffer.byteLength\n );\n var result = new Uint8Array(buffer.byteLength);\n result.set(dataBufView);\n postMessage({ type: \"read\", data: result });\n } else {\n postMessage({\n type: \"error\",\n message: \"Reading non-existing buffer\",\n });\n }\n break;\n case \"runKernel\":\n var kernelFunction = Module[\"_\" + event.data.name];\n if (kernelFunction) {\n var args = [];\n var ok = true;\n for (var i = 0; i < event.data.args.length; i++) {\n var arg = event.data.args[i];\n if (arg.type === \"tensor\") {\n var buffer = buffers[arg.bufferId];\n if (!buffer) {\n ok = false;\n postMessage({\n type: \"error\",\n message: \"Tensor argument of kernel call does not exist\",\n });\n }\n args.push(buffer.ptr);\n } else {\n args.push(arg.value);\n }\n }\n if (ok) {\n kernelFunction.apply(null, args);\n }\n } else {\n postMessage({ type: \"error\", message: \"Kernel not found\" });\n }\n break;\n }\n };\n\n Module.onRuntimeInitialized = function () {\n postMessage({ type: \"initializeComplete\" });\n };\n\n var bstr = atob(\"WASM_WORKER_WASM_BINARY_BASE64\");\n var ary = new Uint8Array(bstr.length);\n for (var i = 0; i < bstr.length; i++) {\n ary[i] = bstr.charCodeAt(i);\n }\n Module.wasmBinary = ary;\n})();\n" }, { "alpha_fraction": 0.5885835886001587, "alphanum_fraction": 0.5935575366020203, "avg_line_length": 41.220001220703125, "blob_id": "116b1effa970fbc1354fe20f2aaec9533692462c", "content_id": "3a12ca96ecc3dacbc7eb6960d1a6fd380d7db14f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4282, "license_type": "permissive", "max_line_length": 136, "num_lines": 100, "path": "/src/graph_transpiler/webdnn/pass_fusion_unary.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import Dict, Iterable, List, Optional\nimport onnx\n\nfrom webdnn.optimization_pass import OptimizationPass, OptimizationPassResult\nfrom webdnn.util import make_random_identifier\nfrom webdnn.operator_shader import OperatorShader\n\nUNARY_OPERATORS = [\"Ceil\", \"Exp\", \"Floor\", \"Relu\", \"Sigmoid\", \"Sqrt\", \"Tanh\"]\n\nclass PassFusionUnary(OptimizationPass):\n def optimize(self, model: onnx.ModelProto) -> Optional[OptimizationPassResult]:\n target_nodes_list = self._find_target_nodes(model)\n all_custom_shaders = {}\n for target_nodes in target_nodes_list:\n custom_shaders = self._fuse_combi(model, target_nodes)\n all_custom_shaders.update(custom_shaders)\n result = None\n if len(all_custom_shaders) > 0:\n result = self._construct_result()\n result.operator_shaders = all_custom_shaders\n return result\n \n def _construct_result(self):\n raise NotImplementedError\n \n def _find_target_nodes(self, model: onnx.ModelProto) -> List[List[onnx.NodeProto]]:\n graph = model.graph\n unary_nodes = []\n unary_input_names = set()\n unary_output_names = set()\n for node in graph.node:\n if node.op_type in UNARY_OPERATORS:\n unary_nodes.append(node)\n unary_input_names.add(node.input[0])\n unary_output_names.add(node.output[0])\n # この変数を出力とするoperatorと入力とするoperatorを連結可能\n unary_to_unary_vars = unary_input_names & unary_output_names\n merge_combis = []\n for v in unary_to_unary_vars:\n prev_node = self._find_node_which_output(v, graph.node)\n next_nodes = self._find_nodes_which_consume_input(v, graph.node)\n if len(next_nodes) > 1:\n continue\n merge_combis.append([prev_node, next_nodes[0]])\n # 3つ以上の連結を探す\n while True:\n changed = False\n for combi in merge_combis:\n head = combi[0]\n joinable = None\n for combi2 in merge_combis:\n if combi2[-1] == head:\n joinable = combi2\n break\n if joinable is not None:\n merge_combis.remove(combi)\n merge_combis.remove(joinable)\n new_combi = joinable + combi[1:]\n merge_combis.append(new_combi)\n changed = True\n break\n if not changed:\n break\n return merge_combis\n\n def _find_nodes_which_consume_input(self, name: str, nodes: Iterable[onnx.NodeProto]) -> List[onnx.NodeProto]:\n return [node for node in nodes if name in node.input]\n \n def _find_node_which_output(self, name: str, nodes: Iterable[onnx.NodeProto]) -> Optional[onnx.NodeProto]:\n rets = [node for node in nodes if name in node.output]\n assert len(rets) <= 1\n if len(rets) == 1:\n return rets[0]\n return None\n\n def _fuse_combi(self, model: onnx.ModelProto, nodes: List[onnx.NodeProto]) -> Dict[str, OperatorShader]:\n # replace nodes into custom node\n custom_node_type = make_random_identifier()\n input_name = nodes[0].input[0]\n output_name = nodes[-1].output[0]\n shader = self._make_shader(custom_node_type, nodes)\n custom_node = onnx.helper.make_node(op_type=custom_node_type, inputs=[input_name], outputs=[output_name], name=custom_node_type)\n self._remove_insert_node(model, nodes, custom_node)\n return {custom_node_type: shader}\n\n def _remove_insert_node(self, model: onnx.ModelProto, remove_nodes: List[onnx.NodeProto], insert_node: onnx.NodeProto):\n graph = model.graph\n insert_pos = 0\n for i in range(len(graph.node)):\n if graph.node[i] == remove_nodes[0]:\n insert_pos = i\n break\n else:\n raise ValueError\n for node in remove_nodes:\n graph.node.remove(node)\n graph.node.insert(insert_pos, insert_node)\n\n def _make_shader(self, custom_op_type: str, nodes: List[onnx.NodeProto]) -> OperatorShader:\n raise NotImplementedError\n" }, { "alpha_fraction": 0.5761024355888367, "alphanum_fraction": 0.5822664499282837, "avg_line_length": 31.44615364074707, "blob_id": "d8f5bcd65136aeab301813e34d4303b913ee4766", "content_id": "0759ba5b301778419897a5b2982dadad4461f785", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2109, "license_type": "permissive", "max_line_length": 80, "num_lines": 65, "path": "/src/descriptor_runner/operators/cpu/operators/standard/instancenormalization.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { arrayProd } from \"../../../../util\";\n\nclass InstanceNormalization extends OperatorImpl {\n epsilon!: number;\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.epsilon = getAttrInt(attribute, \"epsilon\", 1e-5);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const [input, scale, bias] = inputs;\n const reductionLength = arrayProd(input.dims.slice(2)),\n output = context.emptyTensor(input.dims, input.dataType),\n dI = input.data,\n dO = output.data,\n dS = scale.data,\n dB = bias.data;\n const [dimBatch, dimCh] = input.dims;\n const [strideBatch, strideCh] = input.strides;\n for (let batch = 0; batch < dimBatch; batch++) {\n for (let ch = 0; ch < dimCh; ch++) {\n const ofs = batch * strideBatch + ch * strideCh;\n let sum = 0.0;\n let sqsum = 0.0;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[ofs + r];\n sum += v;\n sqsum += v * v;\n }\n const mean = sum / reductionLength;\n const variance = sqsum / reductionLength - mean * mean;\n const invstd = 1 / Math.sqrt(variance + this.epsilon);\n const chscale = dS[ch] * invstd;\n const chbias = -mean * chscale + dB[ch];\n for (let r = 0; r < reductionLength; r++) {\n dO[ofs + r] = dI[ofs + r] * chscale + chbias;\n }\n }\n }\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"InstanceNormalization\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new InstanceNormalization(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.553747296333313, "alphanum_fraction": 0.5704496502876282, "avg_line_length": 27.303030014038086, "blob_id": "9db72f176c827bfb1065afd760350756de4378c2", "content_id": "31f2dc2f36ab192711d06e636739ae683229c58e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4726, "license_type": "permissive", "max_line_length": 97, "num_lines": 165, "path": "/src/descriptor_runner/operators/webgl/operators/standard/binary7.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { broadcastMulti } from \"../../../operatorUtil\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class WebGLBinary7 extends OperatorImpl {\n constructor(\n public kernelName: string,\n private binaryCalculationSource: string\n ) {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error();\n }\n // Elementwiseのアクセスにおいてテクスチャサイズが同じであることを仮定\n if (inputA.dimPerPixel !== 1 || inputB.dimPerPixel !== 1) {\n throw new Error();\n }\n\n const { dims: outShape, allStrides: inAllStrides } = broadcastMulti([\n inputA.dims,\n inputB.dims,\n ]),\n outputTensor = context.emptyTensor(outShape, \"float32\"),\n /*\n * Gl_FragCoord.x: 0.5, 1.5, 2.5, ..., textureWidth-0.5\n * Texture2D(textureName, vec2(x, y)): x=(0.5, 1.5, 2.5, ...) / textureWidth\n */\n outNdim = outShape.length,\n kernelName = `${this.kernelName}_${outNdim}`;\n if (!context.hasKernel(kernelName)) {\n let idxs: string;\n switch (outNdim) {\n case 0:\n idxs = \"\";\n break;\n case 1:\n idxs = \"tex_output_0\";\n break;\n case 2:\n idxs = \"tex_output_0, tex_output_1\";\n break;\n case 3:\n idxs = \"tex_output_0, tex_output_1, tex_output_2\";\n break;\n case 4:\n idxs = \"tex_output_0, tex_output_1, tex_output_2, tex_output_3\";\n break;\n case 5:\n idxs =\n \"tex_output_0, tex_output_1, tex_output_2, tex_output_3, tex_output_4\";\n break;\n case 6:\n idxs =\n \"tex_output_0, tex_output_1, tex_output_2, tex_output_3, tex_output_4, tex_output_5\";\n break;\n default:\n throw new Error();\n }\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n${shaderGenTensorOutputUniform(outNdim)}\n${shaderGenTensorNDGet(\"tex_input_a\", outNdim, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_input_b\", outNdim, context.webgl2)}\n\n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(outNdim)}\n float sa = get_tex_input_a(${idxs});\n float sb = get_tex_input_b(${idxs});\n ${this.binaryCalculationSource}\n ${shaderGenOutput(\"v\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n inAllStrides[0],\n inputA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n inAllStrides[1],\n inputB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n outShape,\n outputTensor,\n context.webgl2\n ),\n ];\n\n await context.runKernel(\n kernelName,\n [\n { tensor: inputA, name: \"tex_input_a\" },\n { tensor: inputB, name: \"tex_input_b\" },\n ],\n outputTensor,\n uniforms\n );\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n // Add, Sub, Mul, Div, Pow: opset under 7 requires explicit broadcast flag\n {\n opType: \"Add\",\n backend: \"webgl\",\n opsetMin: 7,\n factory: () => new WebGLBinary7(\"add\", \"float v = sa + sb;\"),\n },\n {\n opType: \"Sub\",\n backend: \"webgl\",\n opsetMin: 7,\n factory: () => new WebGLBinary7(\"sub\", \"float v = sa - sb;\"),\n },\n {\n opType: \"Mul\",\n backend: \"webgl\",\n opsetMin: 7,\n factory: () => new WebGLBinary7(\"mul\", \"float v = sa * sb;\"),\n },\n {\n opType: \"Div\",\n backend: \"webgl\",\n opsetMin: 7,\n factory: () => new WebGLBinary7(\"div\", \"float v = sa / sb;\"),\n },\n // Pow(-1.1, 2) is error in GLSL, but useful in normalization algorithm\n {\n opType: \"Pow\",\n backend: \"webgl\",\n opsetMin: 7,\n factory: () => new WebGLBinary7(\"pow\", \"float v = pow(abs(sa), sb);\"),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5081288814544678, "alphanum_fraction": 0.5329589247703552, "avg_line_length": 31.21904754638672, "blob_id": "e2c4320311ff25279800fa93f02a3cf92b04ed35", "content_id": "a9930384480866ce9138ab90c65451298c94f50e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3383, "license_type": "permissive", "max_line_length": 80, "num_lines": 105, "path": "/src/descriptor_runner/operators/base/averagepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { Backend } from \"../../interface/core/constants\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { getAttrInt, getAttrInts, getAttrString } from \"../operatorUtil\";\n\n// Version 11\nexport abstract class AveragePool extends OperatorImpl {\n autoPad!: string;\n ceilMode!: boolean;\n countIncludePad!: boolean;\n kernelShape!: number[]; // [y, x]\n pads!: number[]; // [y_begin, x_begin, y_end, x_end]\n strides!: number[]; // [y, x]\n\n constructor(backend: Backend) {\n super(backend);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.autoPad = getAttrString(attribute, \"auto_pad\", \"NOTSET\");\n this.ceilMode = getAttrInt(attribute, \"ceil_mode\", 0) !== 0;\n this.countIncludePad = getAttrInt(attribute, \"count_include_pad\", 0) !== 0;\n this.kernelShape = getAttrInts(attribute, \"kernel_shape\", []);\n this.pads = getAttrInts(attribute, \"pads\", []);\n this.strides = getAttrInts(attribute, \"strides\", []);\n }\n\n protected calcShape(dimsX: ReadonlyArray<number>) {\n const batch = dimsX[0],\n { kernelShape } = this,\n strides = this.strides.length > 0 ? this.strides : [1, 1],\n inShape = [dimsX[2], dimsX[3]];\n let outShape: number[];\n let pads: number[];\n if (this.autoPad === \"NOTSET\") {\n pads = this.pads.length > 0 ? this.pads : [0, 0, 0, 0];\n if (this.ceilMode) {\n outShape = [\n Math.ceil(\n (inShape[0] + pads[0] + pads[2] - kernelShape[0]) / strides[0]\n ) + 1,\n Math.ceil(\n (inShape[1] + pads[1] + pads[3] - kernelShape[1]) / strides[1]\n ) + 1,\n ];\n } else {\n outShape = [\n Math.floor(\n (inShape[0] + pads[0] + pads[2] - kernelShape[0]) / strides[0]\n ) + 1,\n Math.floor(\n (inShape[1] + pads[1] + pads[3] - kernelShape[1]) / strides[1]\n ) + 1,\n ];\n }\n } else if (this.autoPad === \"SAME_UPPER\" || this.autoPad === \"SAME_LOWER\") {\n // calculate output shape as if padding is zero\n outShape = [\n Math.ceil(inShape[0] / strides[0]),\n Math.ceil(inShape[1] / strides[1]),\n ];\n // calculate needed padding\n const sumPad = [\n (outShape[0] - 1) * strides[0] + kernelShape[0] - inShape[0],\n (outShape[1] - 1) * strides[1] + kernelShape[1] - inShape[1],\n ];\n if (this.autoPad === \"SAME_UPPER\") {\n pads = [\n Math.floor(sumPad[0] / 2),\n Math.floor(sumPad[1] / 2),\n Math.ceil(sumPad[0] / 2),\n Math.ceil(sumPad[1] / 2),\n ];\n } else if (this.autoPad === \"SAME_LOWER\") {\n pads = [\n Math.ceil(sumPad[0] / 2),\n Math.ceil(sumPad[1] / 2),\n Math.floor(sumPad[0] / 2),\n Math.floor(sumPad[1] / 2),\n ];\n } else {\n throw new Error();\n }\n } else if (this.autoPad === \"VALID\") {\n outShape = [\n Math.ceil((inShape[0] - kernelShape[0] + 1) / strides[0]),\n Math.ceil((inShape[1] - kernelShape[1] + 1) / strides[1]),\n ];\n pads = [0, 0, 0, 0];\n } else {\n throw new Error(`Unknown auto_pad ${this.autoPad} for AveragePool`);\n }\n const ch = dimsX[1];\n return {\n batch,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch,\n };\n }\n}\n" }, { "alpha_fraction": 0.6145594120025635, "alphanum_fraction": 0.6295019388198853, "avg_line_length": 27.064516067504883, "blob_id": "ca1d455520df1285823555e8d16ff6f327b5a0fd", "content_id": "bbc1e74fa8087a5c8d8741e53cf446d55c0786f1", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2666, "license_type": "permissive", "max_line_length": 89, "num_lines": 93, "path": "/src/descriptor_runner/operators/webgl/operators/standard/clip.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorElementwiseGet,\n shaderGenTensorElementwiseGetUniformItem,\n} from \"../../shaderHelper\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { onnx } from \"onnx-proto\";\nimport { getAttrFloat } from \"../../../operatorUtil\";\n\nexport class WebGLClip extends OperatorImpl {\n clipMax!: number;\n clipMin!: number;\n\n constructor() {\n super(\"webgl\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.clipMax = getAttrFloat(attribute, \"max\", 65536);\n this.clipMin = getAttrFloat(attribute, \"min\", -65536);\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error();\n }\n const outputTensor = context.emptyTensor(input.dims, \"float32\");\n // Elementwiseのアクセスにおいてテクスチャサイズが同じであることを仮定\n if (\n input.textureWidth !== outputTensor.textureWidth ||\n input.textureHeight !== outputTensor.textureHeight ||\n input.dimPerPixel !== 1\n ) {\n throw new Error();\n }\n\n /*\n * Gl_FragCoord.x: 0.5, 1.5, 2.5, ..., textureWidth-0.5\n * texture2D(textureName, vec2(x, y)): x=(0.5, 1.5, 2.5, ...) / textureWidth\n */\n const kernelName = `clip_${this.clipMax}_${this.clipMin}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n ${shaderGenTensorElementwiseGet(\"tex_input\", context.webgl2)}\n void main() {\n float s = get_tex_input();\n float v = clamp(s, ${this.clipMin.toExponential()}, ${this.clipMax.toExponential()});\n ${shaderGenOutput(\"v\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorElementwiseGetUniformItem(\n \"tex_input\",\n input,\n context.webgl2\n ),\n ];\n\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n outputTensor,\n uniforms\n );\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Clip\",\n backend: \"webgl\",\n opsetMin: 1,\n opsetMax: 11,\n factory: () => new WebGLClip(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6261432766914368, "alphanum_fraction": 0.6261432766914368, "avg_line_length": 35.44444274902344, "blob_id": "7e1ef64e70e6164d94512f006648a9ebbcd5200e", "content_id": "6f4a6c5d21d72390909960e01a598f79c58b7165", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5248, "license_type": "permissive", "max_line_length": 90, "num_lines": 144, "path": "/src/descriptor_runner/index.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"./interface/core/constants\";\nimport * as Image from \"./image\";\nimport * as Math from \"./math\";\nexport { Image, Math };\nimport { WebDNNLogging } from \"./logging\";\nexport { WebDNNLogging as Logging };\nimport { BackendContexts, RunnerImpl } from \"./core/runnerImpl\";\nimport { WebDNNCPUContextImpl } from \"./backend/cpu/cpuContextImpl\";\nimport { WebDNNWebGLContextImpl } from \"./backend/webgl/webglContextImpl\";\nimport { WebDNNWasmContextImpl } from \"./backend/wasm/wasmContextImpl\";\nimport { WebDNNWebGPUContextImpl } from \"./backend/webgpu/webgpuContextImpl\";\nimport { wasmWorkerSrcUrl } from \"./operators/wasm/worker/worker\";\nimport { registerOperators } from \"./core/operatorTable\";\nimport { getOpEntries as getOpEntriesCPU } from \"./operators/cpu/opEntriesStandard\";\nimport { getOpEntries as getOpEntriesWasm } from \"./operators/wasm/opEntriesStandard\";\nimport { getOpEntries as getOpEntriesWebGL } from \"./operators/webgl/opEntriesStandard\";\nimport { getOpEntries as getOpEntriesWebGPU } from \"./operators/webgpu/opEntriesStandard\";\nimport { Runner } from \"./interface/core/runner\";\nimport { WebDNNWebGLContextOption } from \"./interface/backend/webgl/webglContext\";\nimport { WebDNNWebGPUContextOption } from \"./interface/backend/webgpu/webgpuContext\";\nexport { CPUTensorImpl as CPUTensor } from \"./backend/cpu/cpuTensorImpl\";\n\nexport interface InitOption {\n backendOrder?: Backend[];\n optimized?: boolean;\n backendOptions?: {\n wasm?: WebDNNWebGLContextOption;\n webgl?: WebDNNWebGLContextOption;\n webgpu?: WebDNNWebGPUContextOption;\n };\n progressCallback?: (loaded: number, total: number) => unknown;\n}\n\nconst defaultContexts = {\n cpu: null as WebDNNCPUContextImpl | null,\n wasm: null as WebDNNWasmContextImpl | null,\n webgl: null as WebDNNWebGLContextImpl | null,\n webgpu: null as WebDNNWebGPUContextImpl | null,\n};\n\nexport async function load(\n directory: string,\n options: InitOption = {}\n): Promise<Runner> {\n const { backendOrder = [\"webgl\", \"wasm\", \"cpu\"], optimized } = options;\n if (optimized) {\n throw new Error(\n \"Currently, webdnn.js does not support optimized model. Use webdnn-core.js instead.\"\n );\n }\n if (!defaultContexts.cpu) {\n defaultContexts.cpu = new WebDNNCPUContextImpl();\n await defaultContexts.cpu.initialize();\n registerOperators(getOpEntriesCPU());\n }\n const cpuContext: WebDNNCPUContextImpl = defaultContexts.cpu,\n backendContexts: BackendContexts = { cpu: cpuContext };\n let succeedBackend: Backend | null = null;\n for (const tryBackend of backendOrder) {\n switch (tryBackend) {\n case \"cpu\":\n succeedBackend = \"cpu\";\n break;\n case \"wasm\":\n {\n if (!defaultContexts.wasm) {\n try {\n const ctx = new WebDNNWasmContextImpl(\n cpuContext,\n options.backendOptions?.wasm || {}\n );\n await ctx.initialize(wasmWorkerSrcUrl);\n defaultContexts.wasm = ctx;\n registerOperators(getOpEntriesWasm());\n succeedBackend = \"wasm\";\n backendContexts.wasm = defaultContexts.wasm;\n // eslint-disable-next-line no-empty\n } catch {}\n } else {\n succeedBackend = \"wasm\";\n backendContexts.wasm = defaultContexts.wasm;\n }\n }\n break;\n case \"webgl\":\n {\n if (!defaultContexts.webgl) {\n try {\n const ctx = new WebDNNWebGLContextImpl(\n cpuContext,\n options.backendOptions?.webgl || {}\n );\n await ctx.initialize();\n defaultContexts.webgl = ctx;\n registerOperators(getOpEntriesWebGL());\n succeedBackend = \"webgl\";\n backendContexts.webgl = defaultContexts.webgl;\n // eslint-disable-next-line no-empty\n } catch {}\n } else {\n succeedBackend = \"webgl\";\n backendContexts.webgl = defaultContexts.webgl;\n }\n }\n break;\n\n case \"webgpu\":\n {\n if (!defaultContexts.webgpu) {\n try {\n const ctx = new WebDNNWebGPUContextImpl(\n cpuContext,\n options.backendOptions?.webgpu || {}\n );\n await ctx.initialize();\n defaultContexts.webgpu = ctx;\n registerOperators(getOpEntriesWebGPU());\n succeedBackend = \"webgpu\";\n backendContexts.webgpu = defaultContexts.webgpu;\n // eslint-disable-next-line no-empty\n } catch {}\n } else {\n succeedBackend = \"webgpu\";\n backendContexts.webgpu = defaultContexts.webgpu;\n }\n }\n break;\n default:\n throw new Error(`Unknown backend ${tryBackend}`);\n }\n\n if (succeedBackend) {\n break;\n }\n }\n if (!succeedBackend) {\n throw new Error(\"No backend available\");\n }\n const actualBackendOrder: Backend[] =\n succeedBackend === \"cpu\" ? [\"cpu\"] : [succeedBackend, \"cpu\"],\n runner = new RunnerImpl(actualBackendOrder, backendContexts);\n await runner.loadModel(directory, \"model.onnx\", options.progressCallback);\n return runner;\n}\n" }, { "alpha_fraction": 0.6163461804389954, "alphanum_fraction": 0.6221153736114502, "avg_line_length": 26.36842155456543, "blob_id": "de174b3d3e903ca79d23e68e97fa8a7dc4c0f977", "content_id": "9ca5ff588438a6349d6204ca29774f1ab3e8b585", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1160, "license_type": "permissive", "max_line_length": 80, "num_lines": 38, "path": "/src/descriptor_runner/operators/cpu/operators/standard/shape.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Shape extends OperatorImpl {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n // メタデータしか使わないので、どのバックエンドに存在してもよい\n const input = inputs[0],\n shapeData = new Int32Array(input.dims),\n output = context.emptyTensor([shapeData.length], \"int32\", shapeData);\n return [output];\n }\n\n getTensorBackendRequirement(\n nInputs: number,\n nOutputs: number\n ): (Backend | null)[] {\n // メタデータしか使わないので、どのバックエンドに存在してもよい\n return [null];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Shape\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Shape(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6817558407783508, "alphanum_fraction": 0.6817558407783508, "avg_line_length": 23.299999237060547, "blob_id": "6938ff53c681105ea5d5dce544c82fdd9f0b31b7", "content_id": "73a495720cb85f9550d2385511e2a238b89f76f8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 729, "license_type": "permissive", "max_line_length": 54, "num_lines": 30, "path": "/src/descriptor_runner/interface/core/operator.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { BackendContext } from \"./backendContext\";\nimport { Backend } from \"./constants\";\nimport { Tensor } from \"./tensor\";\n\nexport interface Operator {\n backend: Backend;\n run(\n context: BackendContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]>;\n initialize(attribute: onnx.IAttributeProto[]): void;\n getTensorBackendRequirement(\n nInputs: number,\n nOutputs: number\n ): (Backend | null)[];\n}\n\nexport interface OperatorEntry {\n opType: string;\n // Inclusive\n opsetMin: number;\n // Exclusive, undefined means infinite\n opsetMax?: number;\n // Operator set domain. Not yet supported.\n domain?: string;\n backend: Backend;\n factory: () => Operator;\n}\n" }, { "alpha_fraction": 0.6087031960487366, "alphanum_fraction": 0.6222260594367981, "avg_line_length": 27.413793563842773, "blob_id": "82a9ef02398958e7374146af7f20c815479a0069", "content_id": "383669425990f1a0a79f2beb1e38180dc0314691", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5768, "license_type": "permissive", "max_line_length": 77, "num_lines": 203, "path": "/src/descriptor_runner/core/tensorLoaderImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { CPUTensor } from \"..\";\nimport { WebDNNCPUContext } from \"../interface/backend/cpu/cpuContext\";\nimport { DataArrayTypes, DataType } from \"../interface/core/constants\";\nimport { TensorLoader } from \"../interface/core/tensorLoader\";\nimport { arrayProd, arraySum } from \"../util\";\nimport { decodeTensorEightbit } from \"./tensorDecoder/decodeTensorEightbit\";\nimport { decodeTensorRaw } from \"./tensorDecoder/decodeTensorRaw\";\n\nconst signatureFile = 843990103, // \"WDN2\"\n signatureTensor = 1397638484, // \"TENS\"\n signautreClose = 1397705795; // \"CLOS\"\n\nexport class TensorLoaderImpl implements TensorLoader {\n paths: string[];\n\n constructor(path: string[] | string, public cpuContext: WebDNNCPUContext) {\n if (typeof path === \"string\") {\n this.paths = [path];\n } else {\n this.paths = path;\n }\n }\n\n async loadAll(\n progressCallback?: (loadedBytes: number) => unknown\n ): Promise<Map<string, CPUTensor>> {\n const fileArray = await this.fetchAllFile(progressCallback),\n view = new DataView(\n fileArray.buffer,\n fileArray.byteOffset,\n fileArray.byteLength\n );\n if (signatureFile !== view.getUint32(0, true)) {\n throw new Error(\"Unexpected file signature\");\n }\n let offset = 4;\n const tensors = new Map<string, CPUTensor>();\n let close = false;\n while (!close) {\n const chunkInfo = this.extractChunk(fileArray.buffer, offset);\n switch (chunkInfo.signature) {\n case signatureTensor:\n {\n const { name, tensor } = this.parseTensorChunk(\n fileArray.buffer,\n chunkInfo.bodyByteOffset,\n chunkInfo.bodyByteLength\n );\n tensors.set(name, tensor);\n }\n break;\n case signautreClose:\n close = true;\n break;\n }\n offset = chunkInfo.nextByteOffset;\n }\n return tensors;\n }\n\n private async fetchAllFile(\n progressCallback?: (loadedBytes: number) => unknown\n ): Promise<Uint8Array> {\n const abs: ArrayBuffer[] = [];\n let loadedBytes = 0;\n progressCallback?.(loadedBytes);\n for (const path of this.paths) {\n const f = await fetch(path),\n ab = await f.arrayBuffer();\n abs.push(ab);\n loadedBytes += ab.byteLength;\n progressCallback?.(loadedBytes);\n }\n const totalLength = arraySum(abs.map((ab) => ab.byteLength)),\n concatArray = new Uint8Array(totalLength);\n let ofs = 0;\n for (const ab of abs) {\n const src = new Uint8Array(ab);\n concatArray.set(src, ofs);\n ofs += src.byteLength;\n }\n\n return concatArray;\n }\n\n private extractChunk(\n buf: ArrayBuffer,\n byteOffset: number\n ): {\n signature: number;\n nextByteOffset: number;\n bodyByteOffset: number;\n bodyByteLength: number;\n } {\n const view = new DataView(buf, byteOffset);\n if (view.byteLength < 8) {\n throw new Error(\"Unexpected EOF\");\n }\n const signature = view.getUint32(0, true),\n bodyByteLength = view.getUint32(4, true),\n bodyByteOffset = byteOffset + 8;\n if (view.byteLength < 8 + bodyByteLength) {\n throw new Error(\"Unexpected EOF\");\n }\n const nextByteOffset = bodyByteOffset + bodyByteLength;\n return { signature, bodyByteLength, bodyByteOffset, nextByteOffset };\n }\n\n private parseTensorChunk(\n buf: ArrayBuffer,\n bodyByteOffset: number,\n bodyByteLength: number\n ): { name: string; tensor: CPUTensor } {\n const view = new DataView(buf, bodyByteOffset, bodyByteLength);\n\n let ofs = 0;\n const compressionAlgorithm = view.getUint8(ofs);\n ofs += 1;\n const bodyCompressedLength = view.getUint32(ofs, true);\n ofs += 4;\n const dataType = view.getUint8(ofs);\n ofs += 1;\n const ndim = view.getUint8(ofs);\n ofs += 1;\n const dims: number[] = [];\n for (let i = 0; i < ndim; i++) {\n dims.push(view.getUint32(ofs, true));\n ofs += 4;\n }\n const numel = arrayProd(dims),\n nameLength = view.getUint32(ofs, true);\n ofs += 4;\n const name = this.parseString(buf, bodyByteOffset + ofs, nameLength);\n ofs += nameLength;\n const extraLength = view.getUint32(ofs, true);\n ofs += 4;\n // Skip extra data\n ofs += extraLength;\n\n const data = this.parseTensorBody(\n buf,\n compressionAlgorithm,\n bodyByteOffset + ofs,\n bodyCompressedLength,\n dataType,\n numel\n );\n let dataTypeString: DataType;\n switch (dataType) {\n case onnx.TensorProto.DataType.FLOAT:\n dataTypeString = \"float32\";\n break;\n case onnx.TensorProto.DataType.INT32:\n dataTypeString = \"int32\";\n break;\n default:\n throw new Error(\"Unsupported DataType\");\n }\n const tensor = this.cpuContext.emptyTensor(dims, dataTypeString, data);\n return { name, tensor };\n }\n\n private parseString(\n buf: ArrayBuffer,\n byteOffset: number,\n byteLength: number\n ): string {\n const view = new Uint8Array(buf, byteOffset, byteLength);\n // TODO: support UTF-8\n return String.fromCharCode(...Array.from(view));\n }\n\n private parseTensorBody(\n buf: ArrayBuffer,\n compressionAlgorithm: number,\n bodyByteOffset: number,\n bodyCompressedLength: number,\n dataType: number,\n numel: number\n ): DataArrayTypes {\n switch (compressionAlgorithm) {\n case 0:\n return decodeTensorRaw(\n buf,\n bodyByteOffset,\n bodyCompressedLength,\n dataType,\n numel\n );\n case 1:\n return decodeTensorEightbit(\n buf,\n bodyByteOffset,\n bodyCompressedLength,\n dataType,\n numel\n );\n default:\n throw new Error(\"Unexpected compression algorithm\");\n }\n }\n}\n" }, { "alpha_fraction": 0.6869747638702393, "alphanum_fraction": 0.6880252361297607, "avg_line_length": 30.733333587646484, "blob_id": "4f4bee9369eb20aa80510beaec7b0e69009e6e72", "content_id": "ed47c8419984bb407641b8a4f4e35fd52b726564", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 952, "license_type": "permissive", "max_line_length": 101, "num_lines": 30, "path": "/src/descriptor_runner/operators/operatorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { Backend } from \"../interface/core/constants\";\nimport { TensorImpl } from \"../core/tensorImpl\";\nimport { Operator } from \"../interface/core/operator\";\nimport { BackendContext } from \"../interface/core/backendContext\";\n\nexport abstract class OperatorImpl implements Operator {\n constructor(public backend: Backend) {}\n\n abstract run(\n context: BackendContext,\n inputs: TensorImpl[],\n nOutputs: number\n ): Promise<TensorImpl[]>;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function\n initialize(attribute: onnx.IAttributeProto[]): void {}\n\n getTensorBackendRequirement(\n nInputs: number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nOutputs: number\n ): (Backend | null)[] {\n const backends: Backend[] = [];\n for (let i = 0; i < nInputs; i++) {\n backends.push(this.backend);\n }\n return backends;\n }\n}\n" }, { "alpha_fraction": 0.464497447013855, "alphanum_fraction": 0.48945263028144836, "avg_line_length": 26.57794761657715, "blob_id": "f51bcd2dd0a7218495f961b733a7ccdafaa9934b", "content_id": "dc63306c10e637312ff63e3c3800ac5030b006e0", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 7261, "license_type": "permissive", "max_line_length": 163, "num_lines": 263, "path": "/src/descriptor_runner/operators/cpu/operators/standard/convtranspose.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { ConvTranspose } from \"../../../base/convtranspose\";\n\nclass CpuConvTranspose extends ConvTranspose {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"ConvTranspose other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims),\n // group, batch, inShape[0], inShape[1], chInPerGroup\n inputTransposeData = new Float32Array(\n chIn * batch * inShape[0] * inShape[1]\n ),\n // group, chOutPerGroup, kernelShape[0], kernelShape[1], chInPerGroup\n weightTransposeData = new Float32Array(\n chOut * kernelShape[0] * kernelShape[1] * chInPerGroup\n ),\n // group, batch, inShape[0], inShape[1], chOutPerGroup, kernelShape[0], kernelShape[1]\n matmulData = new Float32Array(\n chOut *\n batch *\n inShape[0] *\n inShape[1] *\n kernelShape[0] *\n kernelShape[1]\n ),\n col2ImData = new Float32Array(batch * chOut * outShape[0] * outShape[1]);\n this.transposeInput(\n inputX.data as Float32Array,\n inputTransposeData,\n group,\n batch,\n inShape[0] * inShape[1],\n chInPerGroup\n );\n this.transposeWeight(\n inputW.data as Float32Array,\n weightTransposeData,\n group,\n chInPerGroup,\n chOutPerGroup,\n kernelShape[0] * kernelShape[1]\n );\n this.matmul(\n inputTransposeData,\n weightTransposeData,\n matmulData,\n group,\n batch * inShape[0] * inShape[1],\n chOutPerGroup * kernelShape[0] * kernelShape[1],\n chInPerGroup\n );\n this.col2im(\n matmulData,\n col2ImData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chOutPerGroup\n );\n if (inputB) {\n this.bias(\n inputB.data as Float32Array,\n col2ImData,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n }\n const output = context.emptyTensor(\n [batch, chOut, outShape[0], outShape[1]],\n \"float32\",\n col2ImData\n );\n return [output];\n }\n\n private col2im(\n dI: Float32Array,\n dY: Float32Array,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chOutPerGroup: number\n ): void {\n let idx = 0;\n // dI: group, batch, inShape[0], inShape[1], chOutPerGroup, kernelShape[0], kernelShape[1]\n // dY: batch, group, chOutPerGroup, outShape[0], outShape[1]\n for (let b = 0; b < batch; b++) {\n for (let g = 0; g < group; g++) {\n for (let co = 0; co < chOutPerGroup; co++) {\n for (let o0 = 0; o0 < outShape[0]; o0++) {\n for (let o1 = 0; o1 < outShape[1]; o1++) {\n let v = 0;\n for (let k0 = 0; k0 < kernelShape[0]; k0++) {\n for (let k1 = 0; k1 < kernelShape[1]; k1++) {\n const i0s = o0 + pads[0] - k0 * dilations[0];\n const i1s = o1 + pads[1] - k1 * dilations[1];\n if (i0s % strides[0] !== 0 || i1s % strides[1] !== 0) {\n continue;\n }\n\n const i0 = i0s / strides[0];\n const i1 = i1s / strides[1];\n if (\n i0 < 0 ||\n i0 >= inShape[0] ||\n i1 < 0 ||\n i1 >= inShape[1]\n ) {\n continue;\n }\n v +=\n dI[\n (((((g * batch + b) * inShape[0] + i0) * inShape[1] +\n i1) *\n chOutPerGroup +\n co) *\n kernelShape[0] +\n k0) *\n kernelShape[1] +\n k1\n ];\n }\n }\n dY[idx++] = v;\n }\n }\n }\n }\n }\n }\n\n private matmul(\n dTX: Float32Array,\n dTW: Float32Array,\n dI: Float32Array,\n group: number,\n bin: number,\n cks: number,\n chInPerGroup: number\n ) {\n // dTX(group, batch*inShape[0]*inShape[1]=bin, chInPerGroup) * dTW(group, chOutPerGroup*kernelShape[0]*kernelShape[1]=cks, chInPerGroup) -> dI(group, bin, cks)\n for (let g = 0; g < group; g++) {\n for (let y = 0; y < bin; y++) {\n for (let x = 0; x < cks; x++) {\n let s = 0;\n const dtxofs = (g * bin + y) * chInPerGroup;\n const dtwofs = (g * cks + x) * chInPerGroup;\n for (let ip = 0; ip < chInPerGroup; ip++) {\n s += dTX[dtxofs + ip] * dTW[dtwofs + ip];\n }\n dI[(g * bin + y) * cks + x] = s;\n }\n }\n }\n }\n\n private transposeInput(\n dX: Float32Array,\n dTX: Float32Array,\n group: number,\n batch: number,\n inarea: number,\n chInPerGroup: number\n ) {\n // dX(batch, group, chInPerGroup, inShape[0], inShape[1]) -> dTX(group, batch, inShape[0], inShape[1], chInPerGroup)\n let idx = 0;\n for (let g = 0; g < group; g++) {\n for (let b = 0; b < batch; b++) {\n for (let x = 0; x < inarea; x++) {\n for (let c = 0; c < chInPerGroup; c++) {\n dTX[idx++] = dX[((b * group + g) * chInPerGroup + c) * inarea + x];\n }\n }\n }\n }\n }\n\n private transposeWeight(\n dW: Float32Array,\n dTW: Float32Array,\n group: number,\n chInPerGroup: number,\n chOutPerGroup: number,\n karea: number\n ) {\n // dW(group, chInPerGroup, chOutPerGroup, kernelShape[0], kernelShape[1]) -> dTW(group, chOutPerGroup, kernelShape[0], kernelShape[1], cInPerGroup)\n let idx = 0;\n for (let g = 0; g < group; g++) {\n for (let co = 0; co < chOutPerGroup; co++) {\n for (let k = 0; k < karea; k++) {\n for (let ci = 0; ci < chInPerGroup; ci++) {\n dTW[idx++] =\n dW[((g * chInPerGroup + ci) * chOutPerGroup + co) * karea + k];\n }\n }\n }\n }\n }\n\n private bias(\n dB: Float32Array,\n dO: Float32Array,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n let idx = 0;\n for (let b = 0; b < batch; b++) {\n for (let c = 0; c < chOut; c++) {\n for (let x = 0; x < outarea; x++) {\n dO[idx++] += dB[c];\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"ConvTranspose\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuConvTranspose(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6321348547935486, "alphanum_fraction": 0.6333536505699158, "avg_line_length": 28.656625747680664, "blob_id": "445829023f09eabfd78e1408af51b89c6dd61816", "content_id": "ff8849fc1d9b21f92ac02015facbec442dd99abd", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4923, "license_type": "permissive", "max_line_length": 87, "num_lines": 166, "path": "/src/descriptor_runner/backend/wasm/wasmContextImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../interface/backend/cpu/cpuContext\";\nimport {\n WasmKernelArgument,\n WebDNNWasmContext,\n WebDNNWasmContextOption,\n} from \"../../interface/backend/wasm/wasmContext\";\nimport { WasmTensor } from \"../../interface/backend/wasm/wasmTensor\";\nimport { DataArrayTypes, DataType } from \"../../interface/core/constants\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { WasmSharedBuffer, WasmTensorImpl } from \"./wasmTensorImpl\";\nimport { WebDNNLogging } from \"../../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.WebDNNWasmContextImpl\");\n\nexport class WebDNNWasmContextImpl implements WebDNNWasmContext {\n backend = \"wasm\" as const;\n\n initialized = false;\n\n private initializing = false;\n\n private worker!: Worker;\n\n private resolvers: ((ev: MessageEvent) => boolean | undefined)[] = [];\n\n private wasmWorkerSrcUrl!: string;\n\n perfTotalMemory = 0;\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(\n public cpuContext: WebDNNCPUContext,\n option: WebDNNWasmContextOption\n ) {\n if (typeof WebAssembly !== \"object\") {\n throw new Error(\"WebAssembly is not supported on this browser.\");\n }\n }\n\n async initialize(wasmWorkerSrcUrl: string): Promise<void> {\n if (this.initialized) {\n return;\n }\n if (this.initializing) {\n throw new Error(\"initialize is called while initialize is running\");\n }\n this.wasmWorkerSrcUrl = wasmWorkerSrcUrl;\n this.initializing = true;\n this.worker = new Worker(this.wasmWorkerSrcUrl);\n this.worker.onmessage = (ev) => {\n for (let i = 0; i < this.resolvers.length; i++) {\n if (this.resolvers[i](ev)) {\n this.resolvers.splice(i, 1);\n break;\n }\n }\n };\n this.resolvers.push((ev: MessageEvent) => {\n if (ev.data.type === \"error\") {\n logger.error(\"WebAssembly Error\", ev.data.message);\n return true;\n }\n });\n return new Promise((resolve) => {\n this.resolvers.push((ev: MessageEvent) => {\n if (ev.data.type === \"initializeComplete\") {\n this.initializing = false;\n this.initialized = true;\n resolve();\n return true;\n }\n });\n });\n }\n\n isWasmTensor(tensor: Tensor): tensor is WasmTensor {\n return tensor.backend === this.backend;\n }\n\n assertsWasmTensor(tensor: Tensor): asserts tensor is WasmTensor {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n\n assertsWasmTensorArray(tensors: Tensor[]): asserts tensors is WasmTensor[] {\n for (const tensor of tensors) {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n }\n\n emptyTensor(dims: ReadonlyArray<number>, dataType?: DataType): WasmTensor {\n return new WasmTensorImpl(this, dims, dataType);\n }\n\n async moveTensor(tensor: Tensor): Promise<WasmTensor> {\n const dst = new WasmTensorImpl(this, tensor.dims, tensor.dataType);\n await dst.setData(await tensor.getData());\n return dst;\n }\n\n private checkInitialized() {\n if (!this.initialized) {\n throw new Error(\"Not initialized\");\n }\n }\n\n runKernel(name: string, args: WasmKernelArgument[]): void {\n const argsToSend = args.map((arg) => {\n switch (arg.type) {\n case \"tensor\":\n return {\n type: \"tensor\",\n bufferId: arg.value.sharedBuffer.backendBufferId,\n };\n default:\n return { type: \"scalar\", value: arg.value };\n }\n });\n this.worker.postMessage({ type: \"runKernel\", name, args: argsToSend });\n }\n\n allocBuffer(buffer: WasmSharedBuffer): void {\n this.worker.postMessage({\n type: \"alloc\",\n bufferId: buffer.backendBufferId,\n byteLength: buffer.byteLength,\n });\n }\n\n destroyBuffer(buffer: WasmSharedBuffer): void {\n this.worker.postMessage({\n type: \"destroy\",\n bufferId: buffer.backendBufferId,\n });\n }\n\n writeTensor(buffer: WasmSharedBuffer, data: DataArrayTypes): void {\n const copyData = new Uint8Array(buffer.byteLength),\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n copyDataView = new (data.constructor as any)(copyData.buffer);\n copyDataView.set(data);\n this.worker.postMessage(\n { type: \"write\", bufferId: buffer.backendBufferId, data: copyData },\n [copyData.buffer]\n );\n }\n\n readTensor(buffer: WasmSharedBuffer): Promise<Uint8Array> {\n this.worker.postMessage({ type: \"read\", bufferId: buffer.backendBufferId });\n return new Promise((resolve) => {\n this.resolvers.push((ev: MessageEvent) => {\n if (ev.data.type === \"read\") {\n resolve(ev.data.data as Uint8Array);\n return true;\n }\n });\n });\n }\n}\n" }, { "alpha_fraction": 0.6431372761726379, "alphanum_fraction": 0.6478431224822998, "avg_line_length": 33.4594612121582, "blob_id": "c30d13b2aa9ac6dee0e507dafed5cc846d540147", "content_id": "0312e03a16ff5914cb7878627facd5a5dbdbd8fe", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1275, "license_type": "permissive", "max_line_length": 80, "num_lines": 37, "path": "/example/custom_operator/twice.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "// Example implementation of custom operator.\nimport { DataArrayConstructor } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Twice extends OperatorImpl {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n // constructs TypedArray (e.g. Float32Array) for output\n const newData = new DataArrayConstructor[input.dataType](input.data.length);\n // computation core\n for (let i = 0; i < newData.length; i++) {\n newData[i] = input.data[i] * 2;\n }\n // create output CPUTensor specifying shape, data type, data\n const output = context.emptyTensor(input.dims, input.dataType, newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Twice\", // ONNX Operator name\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Twice(), // Function to construct operator\n },\n ];\n}\n" }, { "alpha_fraction": 0.6387213468551636, "alphanum_fraction": 0.6465620994567871, "avg_line_length": 28.087718963623047, "blob_id": "f8af4c3cab9dbb3a5974c8b941ec4c995562989f", "content_id": "7449b403cbc743f3b41ee4b2e5935281d4f4c672", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1658, "license_type": "permissive", "max_line_length": 78, "num_lines": 57, "path": "/src/descriptor_runner/operators/wasm/operators/standard/dynamicunary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WasmKernelArgument,\n WebDNNWasmContext,\n} from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { onnx } from \"onnx-proto\";\nimport { getAttrFloat } from \"../../../operatorUtil\";\n\nabstract class WasmDynamicUnary extends OperatorImpl {\n constructor(private kernelName: string) {\n super(\"wasm\");\n }\n\n protected abstract getKernelArgs(): WasmKernelArgument[];\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWasmTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error(\"Only float32 is supported\");\n }\n const output = context.emptyTensor(input.dims, input.dataType);\n context.runKernel(this.kernelName, [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: input.length },\n ...this.getKernelArgs(),\n ]);\n return [output];\n }\n}\n\nclass WasmLeakyRelu extends WasmDynamicUnary {\n alpha!: number;\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 0.01);\n }\n\n protected getKernelArgs(): WasmKernelArgument[] {\n return [{ type: \"float32\", value: this.alpha }];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"LeakyRelu\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmLeakyRelu(\"kernel_leakyrelu\"),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5885974764823914, "alphanum_fraction": 0.6009122133255005, "avg_line_length": 32.730770111083984, "blob_id": "4d746efc951cb6022b89e17e65dd0e8d85bb39f0", "content_id": "9795772c28ca12c1c6eb70427a9ed1ca6ff83c67", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4385, "license_type": "permissive", "max_line_length": 152, "num_lines": 130, "path": "/src/graph_transpiler/webdnn/pass_fusion_unary_webgl.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import List\nimport onnx\nfrom webdnn.pass_fusion_unary import PassFusionUnary\nfrom webdnn.operator_shader_cpu import OperatorShaderCPU\nfrom webdnn.optimization_pass_result_webgl import OptimizationPassResultWebGL\nfrom webdnn.operator_shader_webgl import OperatorShaderWebGL\n\nSHADER_TEMPLATE = \"\"\"\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorElementwiseGet,\n shaderGenTensorElementwiseGetUniformItem,\n} from \"../../shaderHelper\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class WebGLUnary extends OperatorImpl {\n constructor(\n public kernelName: string,\n private unaryCalculationSource: string,\n private unaryCalculationSourceWebGL1?: string\n ) {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error();\n }\n const outputTensor = context.emptyTensor(input.dims, \"float32\");\n if (\n input.textureWidth !== outputTensor.textureWidth ||\n input.textureHeight !== outputTensor.textureHeight ||\n input.dimPerPixel !== 1\n ) {\n throw new Error();\n }\n\n if (!context.hasKernel(this.kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n ${shaderGenTensorElementwiseGet(\"tex_input\", context.webgl2)}\n void main() {\n float v0 = get_tex_input();\n ${\n !context.webgl2 && this.unaryCalculationSourceWebGL1\n ? this.unaryCalculationSourceWebGL1\n : this.unaryCalculationSource\n }\n ${shaderGenOutput(\"v\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(this.kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorElementwiseGetUniformItem(\n \"tex_input\",\n input,\n context.webgl2\n ),\n ];\n\n await context.runKernel(\n this.kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n outputTensor,\n uniforms\n );\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"%%OP_TYPE%%\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"%%OP_TYPE%%\", \"%%FUNC_BODY_2%%\", \"%%FUNC_BODY_1%%\"),\n },\n ];\n}\n\n\"\"\"\n\nFUNC_TEMPLATES_2 = {\n \"Ceil\": \"float %%VAR_OUT%% = ceil(%%VAR_IN%%);\",\n \"Exp\": \"float %%VAR_OUT%% = exp(%%VAR_IN%%);\",\n \"Floor\": \"float %%VAR_OUT%% = floor(%%VAR_IN%%);\",\n \"Relu\": \"float %%VAR_OUT%% = max(%%VAR_IN%%, 0.0);\",\n \"Sigmoid\": \"float %%VAR_OUT%% = (tanh(%%VAR_IN%% * 0.5) + 1.0) * 0.5;\",\n \"Sqrt\": \"float %%VAR_OUT%% = sqrt(%%VAR_IN%%);\",\n \"Tanh\": \"float %%VAR_OUT%% = tanh(%%VAR_IN%%);\",\n}\n\nFUNC_TEMPLATES_1 = {\n \"Ceil\": \"float %%VAR_OUT%% = ceil(%%VAR_IN%%);\",\n \"Exp\": \"float %%VAR_OUT%% = exp(%%VAR_IN%%);\",\n \"Floor\": \"float %%VAR_OUT%% = floor(%%VAR_IN%%);\",\n \"Relu\": \"float %%VAR_OUT%% = max(%%VAR_IN%%, 0.0);\",\n \"Sigmoid\": \"float %%VAR_OUT%% = 1.0 / (1.0 + exp(-%%VAR_IN%%));\",\n \"Sqrt\": \"float %%VAR_OUT%% = sqrt(%%VAR_IN%%);\",\n \"Tanh\": \"float %%VAR_OUT%%_t = exp(-2.0 * %%VAR_IN%%); float %%VAR_OUT%% = (1.0 - %%VAR_OUT%%_t) / (1.0 + %%VAR_OUT%%_t);\",\n}\n\nclass PassFusionUnaryWebGL(PassFusionUnary):\n def _make_shader(self, custom_op_type: str, nodes: List[onnx.NodeProto]) -> OperatorShaderCPU:\n func_body_1 = \"\"\n func_body_2 = \"\"\n for i, node in enumerate(nodes):\n tmpl = FUNC_TEMPLATES_1[node.op_type]\n func_body_1 += tmpl.replace(\"%%VAR_IN%%\", f\"v{i}\").replace(\"%%VAR_OUT%%\", f\"v{i+1}\")\n tmpl = FUNC_TEMPLATES_2[node.op_type]\n func_body_2 += tmpl.replace(\"%%VAR_IN%%\", f\"v{i}\").replace(\"%%VAR_OUT%%\", f\"v{i+1}\")\n func_body_1 += f\"float v = v{len(nodes)};\"\n func_body_2 += f\"float v = v{len(nodes)};\"\n ts_code = SHADER_TEMPLATE.replace(\"%%OP_TYPE%%\", custom_op_type).replace(\"%%FUNC_BODY_1%%\", func_body_1).replace(\"%%FUNC_BODY_2%%\", func_body_2)\n return OperatorShaderWebGL(ts_code)\n\n def _construct_result(self):\n return OptimizationPassResultWebGL()\n" }, { "alpha_fraction": 0.6035398244857788, "alphanum_fraction": 0.6099115014076233, "avg_line_length": 33.0361442565918, "blob_id": "ca66c0eed082596665e75674227b6e0f4ae8dd8e", "content_id": "26ae7130f560a7db3caf9755a3cae5edd004e1e3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2825, "license_type": "permissive", "max_line_length": 106, "num_lines": 83, "path": "/src/graph_transpiler/webdnn/pass_fusion_unary_wasm.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import List\nimport onnx\nfrom webdnn.pass_fusion_unary import PassFusionUnary\nfrom webdnn.operator_shader_wasm import OperatorShaderWasm\nfrom webdnn.optimization_pass_result_wasm import OptimizationPassResultWasm\n\nTS_TEMPLATE = \"\"\"\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNWasmContext } from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass WasmUnary extends OperatorImpl {\n constructor(private kernelName: string) {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWasmTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error(\"Only float32 is supported\");\n }\n const output = context.emptyTensor(input.dims, input.dataType);\n context.runKernel(this.kernelName, [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: input.length },\n ]);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"%%OP_TYPE%%\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"%%OP_TYPE%%\"),\n },\n ];\n}\n\"\"\"\n\nCPP_TEMPLATE = \"\"\"\n#include <algorithm>\n#include <cmath>\n#include \"../../common/kernel.hpp\"\n#include \"../../common/unary.hpp\"\n\nextern \"C\"\n{\n void WEBDNN_KERNEL %%OP_TYPE%%(const float *src, float *dst, int length)\n {\n webdnn_unary(src, dst, length, [](float v0) { %%FUNC_BODY%% });\n }\n}\n\"\"\"\n\nFUNC_TEMPLATES = {\n \"Ceil\": \"float %%VAR_OUT%% = std::ceil(%%VAR_IN%%);\",\n \"Exp\": \"float %%VAR_OUT%% = std::exp(%%VAR_IN%%);\",\n \"Floor\": \"float %%VAR_OUT%% = std::floor(%%VAR_IN%%);\",\n \"Relu\": \"float %%VAR_OUT%% = std::max(%%VAR_IN%%, 0.0f);\",\n \"Sigmoid\": \"float %%VAR_OUT%% = (std::tanh(%%VAR_IN%% * 0.5f) + 1.0f) * 0.5f;\",\n \"Sqrt\": \"float %%VAR_OUT%% = std::sqrt(%%VAR_IN%%);\",\n \"Tanh\": \"float %%VAR_OUT%% = std::tanh(%%VAR_IN%%);\",\n}\n\nclass PassFusionUnaryWasm(PassFusionUnary):\n def _make_shader(self, custom_op_type: str, nodes: List[onnx.NodeProto]) -> OperatorShaderWasm:\n func_body = \"\"\n for i, node in enumerate(nodes):\n tmpl = FUNC_TEMPLATES[node.op_type]\n func_body += tmpl.replace(\"%%VAR_IN%%\", f\"v{i}\").replace(\"%%VAR_OUT%%\", f\"v{i+1}\")\n func_body += f\"return v{len(nodes)};\"\n ts_code = TS_TEMPLATE.replace(\"%%OP_TYPE%%\", custom_op_type)\n cpp_code = CPP_TEMPLATE.replace(\"%%OP_TYPE%%\", custom_op_type).replace(\"%%FUNC_BODY%%\", func_body)\n return OperatorShaderWasm(ts_code, custom_op_type, cpp_code)\n\n def _construct_result(self):\n return OptimizationPassResultWasm()\n" }, { "alpha_fraction": 0.48946014046669006, "alphanum_fraction": 0.5043701529502869, "avg_line_length": 28.923076629638672, "blob_id": "bd5e4b1251b4504c9a3b916be0ede149321f5d57", "content_id": "64acaf42d3d9cfe298a2ea0040c150f06787c427", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1945, "license_type": "permissive", "max_line_length": 83, "num_lines": 65, "path": "/src/descriptor_runner/operators/wasm/operators/standard/gemm.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWasmContext } from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Gemm } from \"../../../base/gemm\";\n\nclass WasmGemm extends Gemm {\n constructor() {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWasmTensorArray(inputs);\n const [inputA, inputB, inputC] = inputs;\n const { m, n, k } = this.calcShape(inputA.dims, inputB.dims);\n const output = context.emptyTensor([m, n]);\n if (this.alpha !== 1.0) {\n throw new Error(\"Gemm: alpha !== 1.0 is not yet supported\");\n }\n if (inputC) {\n if (this.beta !== 1.0) {\n throw new Error(\"Gemm: beta !== 1.0 is not yet supported\");\n }\n context.runKernel(\n `kernel_gemm_transa${this.transA ? \"1\" : \"0\"}_transb${\n this.transB ? \"1\" : \"0\"\n }_c`,\n [\n { type: \"tensor\", value: inputA },\n { type: \"tensor\", value: inputB },\n { type: \"tensor\", value: inputC },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: m },\n { type: \"int32\", value: n },\n { type: \"int32\", value: k },\n ]\n );\n } else {\n context.runKernel(\n `kernel_gemm_transa${this.transA ? \"1\" : \"0\"}_transb${\n this.transB ? \"1\" : \"0\"\n }`,\n [\n { type: \"tensor\", value: inputA },\n { type: \"tensor\", value: inputB },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: m },\n { type: \"int32\", value: n },\n { type: \"int32\", value: k },\n ]\n );\n }\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Gemm\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmGemm(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.662398636341095, "alphanum_fraction": 0.6628254652023315, "avg_line_length": 27.22891616821289, "blob_id": "3cd08a344b9b8334a5846fdb5a85407474feb33c", "content_id": "eaa3d6e9c73fe2a496ea9ca0517b5bed5f8340b5", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2535, "license_type": "permissive", "max_line_length": 76, "num_lines": 83, "path": "/src/descriptor_runner/core/modelTransform.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/* eslint-disable @typescript-eslint/no-non-null-assertion */\nimport { onnx } from \"onnx-proto\";\nimport { Backend } from \"../interface/core/constants\";\nimport { WebDNNLogging } from \"../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.modelTransform\");\n\nexport function modelTransform(\n model: onnx.ModelProto,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n backendOrder: Backend[]\n): void {\n /*\n * TODO: implementation\n * if (backendOrder.includes(\"webgl\")) {\n * const webglContext = WebDNNWebGLContext.getInstance();\n * if (webglContext.webgl2 && webglContext.canOnlyReadRGBA) {\n * outputPackRGBA(graph, backendOrder);\n * }\n * }\n */\n renameDuplicatedNode(model);\n}\n\nfunction renameDuplicatedNode(model: onnx.ModelProto): void {\n const usedNames = new Set<string>();\n for (const node of model.graph!.node!) {\n let origName = node.name;\n if (!origName) {\n origName = \"unnamed\";\n }\n if (usedNames.has(origName)) {\n let newName = origName + \"_\";\n while (usedNames.has(newName)) {\n newName = newName + \"_\";\n }\n node.name = newName;\n usedNames.add(newName);\n logger.warn(\n `node name ${origName} is already used: renaming to ${newName}`\n );\n } else {\n usedNames.add(origName);\n }\n }\n}\n\n/**\n * テンソルを開放するタイミングを計算する。\n * @param model 計算対象のモデル\n * @returns key: オペレータ名, value: そのオペレータ完了直後に開放するテンソルの名前\n */\nexport function findTensorReleaseTiming(\n model: onnx.ModelProto,\n initializerTensorNames: Set<string>\n): Map<string, string[]> {\n const lastReferencedAt = new Map<string, string>(),\n graph = model.graph!;\n for (const node of graph.node!) {\n for (const inputName of node.input!) {\n lastReferencedAt.set(inputName, node.name!);\n }\n }\n // Optimized modelではgraph.initializer以外からテンソルを読み込むため、実際に読み込まれたテンソル名リストを用いる\n for (const initializer of initializerTensorNames) {\n lastReferencedAt.delete(initializer);\n }\n for (const input of graph.input!) {\n lastReferencedAt.delete(input.name!);\n }\n for (const output of graph.output!) {\n lastReferencedAt.delete(output.name!);\n }\n\n const timing = new Map<string, string[]>();\n for (const [name, last] of lastReferencedAt.entries()) {\n const t = timing.get(last) || [];\n t.push(name);\n timing.set(last, t);\n }\n\n return timing;\n}\n" }, { "alpha_fraction": 0.6108927130699158, "alphanum_fraction": 0.6273126602172852, "avg_line_length": 26.629392623901367, "blob_id": "fb254299d7927fb4b96dfff199a5522be7c6ec95", "content_id": "619936d905724eb43a574513fc3f3e0eb74fd813", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 8666, "license_type": "permissive", "max_line_length": 107, "num_lines": 313, "path": "/src/descriptor_runner/operators/webgl/operators/standard/matmul.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { MatMul } from \"../../../base/matmul\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 13\nexport class WebGLMatMul extends MatMul {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1];\n if (inputA.dataType !== \"float32\" || inputB.dataType !== \"float32\") {\n throw new Error(\"only float32 is supported\");\n }\n if (inputA.dimPerPixel !== 1 || inputB.dimPerPixel !== 1) {\n throw new Error();\n }\n const {\n resultLength,\n resultDims,\n resultStrides,\n resultDimsAfterSqueeze,\n stridesA,\n stridesB,\n innerProductLength,\n } = this.calcShape(inputA.dims, inputB.dims),\n output = context.emptyTensor(resultDimsAfterSqueeze, \"float32\");\n if (resultDims.length === 2) {\n await this.calcDim2(\n context,\n inputA,\n inputB,\n output,\n resultDims,\n resultStrides,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else if (resultDims.length === 3) {\n await this.calcDim3(\n context,\n inputA,\n inputB,\n output,\n resultDims,\n resultStrides,\n stridesA,\n stridesB,\n innerProductLength\n );\n } else {\n // TODO: 4次元以上のサポート\n throw new Error();\n }\n\n return [output];\n }\n\n private async calcDim2(\n context: WebDNNWebGLContext,\n dA: WebGLTensor,\n dB: WebGLTensor,\n dC: WebGLTensor,\n resultDims: number[],\n resultStrides: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n const kernelSource = context.webgl2\n ? `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLength ${innerProductLength}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\nuniform sampler2D tex_input_a;\nuniform int tex_input_a_stride_0;\nuniform int tex_input_a_stride_1;\n\nivec2 get_coord_a(int d0) {\n int flat_index = d0 * tex_input_a_stride_0;\n int texture_w = textureSize(tex_input_a, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nuniform sampler2D tex_input_b;\nuniform int tex_input_b_stride_0;\nuniform int tex_input_b_stride_1;\n\nivec2 get_coord_b(int d1) {\n int flat_index = d1 * tex_input_b_stride_1;\n int texture_w = textureSize(tex_input_b, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n ivec2 c_a = get_coord_a(tex_output_0);\n ivec2 c_b = get_coord_b(tex_output_1);\n int texture_w_a = textureSize(tex_input_a, 0).x;\n int texture_w_b = textureSize(tex_input_b, 0).x;\n for (int ip = 0; ip < innerProductLength; ip++) {\n s += texelFetch(tex_input_a, c_a, 0).r * texelFetch(tex_input_b, c_b, 0).r;\n c_a.x += tex_input_a_stride_1;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n c_b.x += tex_input_b_stride_0;\n if (c_b.x >= texture_w_b) {\n c_b = ivec2(c_b.x - texture_w_b, c_b.y + 1);\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`\n : `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLength ${innerProductLength}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\n${shaderGenTensorNDGet(\"tex_input_a\", 2, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_input_b\", 2, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n for (int ip = 0; ip < innerProductLength; ip++) {\n s += get_tex_input_a(tex_output_0, ip) * get_tex_input_b(ip, tex_output_1);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`,\n kernelName = `matmul_2_${innerProductLength}`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n stridesA,\n dA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n stridesB,\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(resultDims, dC, context.webgl2),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dA, name: \"tex_input_a\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dC,\n uniforms\n );\n }\n\n private async calcDim3(\n context: WebDNNWebGLContext,\n dA: WebGLTensor,\n dB: WebGLTensor,\n dC: WebGLTensor,\n resultDims: number[],\n resultStrides: number[],\n stridesA: ReadonlyArray<number>,\n stridesB: ReadonlyArray<number>,\n innerProductLength: number\n ) {\n const kernelSource = context.webgl2\n ? `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLength ${innerProductLength}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\nuniform sampler2D tex_input_a;\nuniform int tex_input_a_stride_0;\nuniform int tex_input_a_stride_1;\nuniform int tex_input_a_stride_2;\n\nivec2 get_coord_a(int d0, int d1) {\n int flat_index = d0 * tex_input_a_stride_0 + d1 * tex_input_a_stride_1;\n int texture_w = textureSize(tex_input_a, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nuniform sampler2D tex_input_b;\nuniform int tex_input_b_stride_0;\nuniform int tex_input_b_stride_1;\nuniform int tex_input_b_stride_2;\n\nivec2 get_coord_b(int d0, int d2) {\n int flat_index = d0 * tex_input_b_stride_0 + d2 * tex_input_b_stride_2;\n int texture_w = textureSize(tex_input_b, 0).x;\n int y = flat_index / texture_w;\n int x = flat_index - y * texture_w;\n return ivec2(x, y);\n}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n ivec2 c_a = get_coord_a(tex_output_0, tex_output_1);\n ivec2 c_b = get_coord_b(tex_output_0, tex_output_2);\n int texture_w_a = textureSize(tex_input_a, 0).x;\n int texture_w_b = textureSize(tex_input_b, 0).x;\n for (int ip = 0; ip < innerProductLength; ip++) {\n s += texelFetch(tex_input_a, c_a, 0).r * texelFetch(tex_input_b, c_b, 0).r;\n c_a.x += tex_input_a_stride_2;\n if (c_a.x >= texture_w_a) {\n c_a = ivec2(c_a.x - texture_w_a, c_a.y + 1);\n }\n c_b.x += tex_input_b_stride_1;\n if (c_b.x >= texture_w_b) {\n c_b = ivec2(c_b.x - texture_w_b, c_b.y + 1);\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`\n : `${shaderGenHeader(context.webgl2)}\n\n#define innerProductLength ${innerProductLength}\n${shaderGenTensorOutputUniform(resultDims.length)}\n\n${shaderGenTensorNDGet(\"tex_input_a\", 3, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_input_b\", 3, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(resultDims.length)}\n float s = 0.0;\n for (int ip = 0; ip < innerProductLength; ip++) {\n s += get_tex_input_a(tex_output_0, tex_output_1, ip) * get_tex_input_b(tex_output_0, ip, tex_output_2);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`,\n kernelName = `matmul_3_${innerProductLength}`;\n context.addKernel(kernelName, kernelSource);\n\n if (stridesA[2] > dA.textureWidth || stridesB[1] > dB.textureWidth) {\n throw new Error(\"MatMul: kernel assumption does not hold\");\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n stridesA,\n dA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n stridesB,\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(resultDims, dC, context.webgl2),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dA, name: \"tex_input_a\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dC,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"MatMul\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLMatMul(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7610192894935608, "alphanum_fraction": 0.7685950398445129, "avg_line_length": 38.24324417114258, "blob_id": "76b7a80858710a57c36666fc9fcd0d15596dbf1e", "content_id": "366991acd584e439be4a0eb876981b02f31a0cfb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1452, "license_type": "permissive", "max_line_length": 292, "num_lines": 37, "path": "/example/custom_operator/README.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# Example of custom operator implementation\n\nA sample that implements and runs a custom operator that does not exist in the ONNX specification.\n\nCreate a model that contains a custom operator named `Twice` that doubles the input and run it on WebDNN.\n\nPyTorch (`>=1.7`) is required to run the Python script.\n\n# Operation procedure\n## Generate ONNX models with custom operators\n```\npython make_model.py\n```\n\n## Install custom operator implementation\n\nCopy `twice.ts`, the implementation of the custom operator, to `<repository_root>/src\\descriptor_runner\\operators\\cpu\\operators\\custom\\twice.ts`.\n\n## Build an operator set with custom operators\n\n```\npython -m webdnn.optimize_model output/model.onnx output\n```\n\nOutput files `model-{cpu,wasm,webgl}.onnx`, `op-{cpu,wasm,webgl}.js`, `weight-{cpu,wasm,webgl}-0.bin` are generated. `op-{cpu,wasm,webgl}.js` includes standard and custom operator implementations.`model.onnx` is no longer needed.\n\n## Run on a web browser\n\nAt repository root, execute\n\n```\nyarn server\n```\n\nWith this running, open [http://localhost:8080/example/custom_operator/](http://localhost:8080/example/custom_operator/) with a web browser.\n\nIn this sample, the custom operator is an implementation running on the CPU; the standard operator, for which there is an implementation running on the GPU, runs on the GPU, and tensor data is automatically transferred between the CPU and GPU before and after the custom operator is executed.\n" }, { "alpha_fraction": 0.625, "alphanum_fraction": 0.6275510191917419, "avg_line_length": 26.034482955932617, "blob_id": "81b39fd415086d10ce532d0a8fcebfe58200b1a2", "content_id": "b779c935a1faf6ae89167aca54a5c9970fc9ece8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 784, "license_type": "permissive", "max_line_length": 86, "num_lines": 29, "path": "/src/descriptor_runner/operators/webgl/operators/standard/flatten.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Flatten } from \"../../../base/flatten\";\n\nclass WebGLFlatten extends Flatten {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0],\n computedShape = this.calcShape(input);\n\n return [input.alias(computedShape)];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Flatten\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLFlatten(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7419354915618896, "alphanum_fraction": 0.7419354915618896, "avg_line_length": 14.5, "blob_id": "ce9d624c5339024ad8d83ca8803bbe38df613315", "content_id": "5fb795c5a896f358acf0cfe2e15ab8077134fc8e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31, "license_type": "permissive", "max_line_length": 21, "num_lines": 2, "path": "/src/graph_transpiler/webdnn/operator_shader.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "class OperatorShader:\n pass\n" }, { "alpha_fraction": 0.6560472249984741, "alphanum_fraction": 0.6625368595123291, "avg_line_length": 37.08988952636719, "blob_id": "40dcac7f25a88837817ae8640275bf3c24f992c1", "content_id": "44df68c342ec2f0085befa7f98f892d85a918549", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3390, "license_type": "permissive", "max_line_length": 167, "num_lines": 89, "path": "/src/graph_transpiler/webdnn/tensor_export.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import Dict, List\nimport struct\nimport numpy as np\nimport onnx\nfrom webdnn.constant_codec_eightbit import compress_tensor_eightbit\nfrom webdnn.onnx_util import DATA_TYPE_TO_NUMPY, tensor_proto_to_numpy\n\nFILE_SIGNATURE = b\"WDN2\"\nTENSOR_SIGNATURE = b\"TENS\"\nCLOSE_SIGNATURE = b\"CLOS\"\n\n\ndef _data_type_from_numpy(np_dtype) -> int:\n # dict like {np.float32: 1} cannot be used due to key equality check\n for k, v in DATA_TYPE_TO_NUMPY.items():\n if v == np_dtype:\n return k\n raise ValueError\n\ndef _compress_tensor_raw(data: np.ndarray) -> bytes:\n return data.tobytes()\n\ndef _compress_tensor(data: np.ndarray, compression_algorithm: int) -> bytes:\n if compression_algorithm == 0:\n return _compress_tensor_raw(data)\n elif compression_algorithm == 1:\n return compress_tensor_eightbit(data)\n else:\n raise ValueError\n\ndef _select_compression_algorithm(data: np.ndarray, compression_algorithm: int) -> int:\n if data.dtype != np.float32:\n return 0\n return compression_algorithm\n\ndef _make_tensor_chunk(name: str, data: np.ndarray, compression_algorithm: int) -> bytes:\n data_type = _data_type_from_numpy(data.dtype)\n compression_algorithm = _select_compression_algorithm(data, compression_algorithm)\n compressed_body = _compress_tensor(data, compression_algorithm)\n compressed_body_size = len(compressed_body)\n ndim = data.ndim\n dims = data.shape\n name_bytes = name.encode(\"utf-8\")\n name_length = len(name_bytes)\n extra_bytes = b\"\"\n extra_length = len(extra_bytes)\n header = struct.pack(\"<BIBB\",\n compression_algorithm,\n compressed_body_size,\n data_type,\n ndim)\n header += struct.pack(\"<\" + \"I\" * len(dims), *dims)\n header += struct.pack(\"<I\", name_length)\n header += name_bytes\n header += struct.pack(\"<I\", extra_length)\n header += extra_bytes\n header += compressed_body\n header = TENSOR_SIGNATURE + struct.pack(\"<I\", len(header)) + header\n return header\n\ndef _make_close_chunk() -> bytes:\n return CLOSE_SIGNATURE + b\"\\0\\0\\0\\0\"\n\ndef serialize_tensors(path_template: str, tensors: Dict[str, np.ndarray], split_size: int=0, compression_algorithm: int=0) -> List[str]:\n chunks = [FILE_SIGNATURE]\n for name, data in tensors.items():\n chunks.append(_make_tensor_chunk(name, data, compression_algorithm))\n chunks.append(_make_close_chunk())\n full_data = b\"\".join(chunks)\n if split_size <= 0:\n with open(path_template, \"wb\") as f:\n f.write(full_data)\n return [path_template]\n else:\n file_paths = []\n for i in range((len(full_data) + split_size - 1) // split_size):\n file_path = path_template.format(i)\n file_paths.append(file_path)\n with open(file_path, \"wb\") as f:\n f.write(full_data[i*split_size:(i+1)*split_size])\n return file_paths\n\ndef export_initializers(path_template: str, model: onnx.ModelProto, initializers: Dict[str, np.ndarray], split_size: int=0, compression_algorithm: int=0) -> List[str]:\n tensors = initializers.copy()\n initializers = model.graph.initializer\n while len(initializers) > 0:\n tensor_proto = initializers.pop()\n tensors[tensor_proto.name] = tensor_proto_to_numpy(tensor_proto)\n return serialize_tensors(path_template, tensors, split_size, compression_algorithm)\n" }, { "alpha_fraction": 0.579483687877655, "alphanum_fraction": 0.5910326242446899, "avg_line_length": 24.824562072753906, "blob_id": "6644a774e6df4154858b0d44f7f5ea774f5d9aa0", "content_id": "058517b3812ca044a00c284a114b8a1a91733758", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1480, "license_type": "permissive", "max_line_length": 86, "num_lines": 57, "path": "/src/descriptor_runner/operators/webgl/operators/standard/averagepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { AveragePool } from \"../../../base/averagepool\";\nimport { averagepool } from \"../../rawcomputation/averagepool\";\nimport { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 1, 7, 10, 11+\nclass WebGLAveragePool extends AveragePool {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"MaxPool other than 2D is not yet supported\");\n }\n if (inputX.dimPerPixel !== 1) {\n throw new Error();\n }\n\n const { batch, kernelShape, pads, strides, inShape, outShape, ch } =\n this.calcShape(inputX.dims),\n output = context.emptyTensor(\n [batch, ch, outShape[0], outShape[1]],\n \"float32\",\n { dimPerPixel: 1 }\n );\n await averagepool(\n context,\n inputX,\n output,\n this.countIncludePad,\n batch,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"AveragePool\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLAveragePool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6613253951072693, "alphanum_fraction": 0.6658753752708435, "avg_line_length": 38.4921875, "blob_id": "bdede5ccaf573ee1612b3bf8de9978677579d05b", "content_id": "b890f363c1f56e4ccd199fd7b32717692690f580", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5055, "license_type": "permissive", "max_line_length": 146, "num_lines": 128, "path": "/scripts/extract_subgraph.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "\"\"\"\nTool to extract subgraph of ONNX model for debug.\n\"\"\"\n\nimport argparse\nimport numpy as np\nimport onnx\nimport onnxruntime\nfrom webdnn.tensor_export import serialize_tensors\n\ndef parse_name_shapes(name_shapes: str):\n result = {}\n for name_shape in name_shapes.split(\",\"):\n name, shape_str = name_shape.split(\"=\")\n shape = tuple(map(int, shape_str.split(\"x\")))\n result[name] = shape\n return result\n\ndef find_node_which_output(graph, output_name):\n for node in graph.node:\n for output in node.output:\n if output == output_name:\n return node\n return None\n\ndef collect_subgraph_nodes(graph, in_names, out_names, initializer_names):\n unresolved_tensors = list(out_names)\n resolved_tensors = set(in_names) | set(initializer_names)\n subgraph_nodes = []\n while len(unresolved_tensors) > 0:\n target = unresolved_tensors.pop()\n node = find_node_which_output(graph, target)\n if node is None:\n print(target)\n raise ValueError\n for input_name in node.input:\n if input_name not in resolved_tensors:\n unresolved_tensors.append(input_name)\n for output_name in node.output:\n resolved_tensors.add(output_name)\n if node not in subgraph_nodes:\n subgraph_nodes.append(node)\n return sort_subgraph_nodes(subgraph_nodes, in_names, initializer_names)\n\ndef sort_subgraph_nodes(nodes, in_names, initializer_names):\n resolved_tensors = set(in_names) | set(initializer_names)\n sorted_nodes = []\n unresolved_nodes = nodes.copy()\n while len(unresolved_nodes) > 0:\n for node in unresolved_nodes:\n if all(name in resolved_tensors for name in node.input):\n for name in node.output:\n resolved_tensors.add(name)\n sorted_nodes.append(node)\n unresolved_nodes.remove(node)\n break\n return sorted_nodes\n\ndef collect_initializers(graph, subgraph_nodes):\n input_names = set()\n for n in subgraph_nodes:\n for in_name in n.input:\n input_names.add(in_name)\n filtered_initializers = []\n for init in graph.initializer:\n if init.name in input_names:\n filtered_initializers.append(init)\n return filtered_initializers\n\ndef replace_list(protobuf_list, new_items):\n while len(protobuf_list) > 0:\n protobuf_list.pop()\n for item in new_items:\n protobuf_list.append(item)\n\ndef process_model(model, input_shapes, output_shapes):\n graph = model.graph\n initializer_names = [initializer.name for initializer in graph.initializer]\n input_names = list(input_shapes.keys())\n output_names = list(output_shapes.keys())\n s_nodes = collect_subgraph_nodes(graph, input_names, output_names, initializer_names)\n f_initializers = collect_initializers(graph, s_nodes)\n replace_list(graph.node, s_nodes)\n replace_list(graph.initializer, f_initializers)\n replace_list(graph.input, [onnx.helper.make_tensor_value_info(name, onnx.TensorProto.FLOAT, shape) for name, shape in input_shapes.items()])\n replace_list(graph.output, [onnx.helper.make_tensor_value_info(name, onnx.TensorProto.FLOAT, shape) for name, shape in output_shapes.items()])\n\ndef make_test_io(model_path, output_shapes, test_input_shapes):\n test_inputs = {}\n session = onnxruntime.InferenceSession(model_path)\n for name, shape in test_input_shapes.items():\n test_inputs[name] = np.random.random(shape).astype(np.float32)\n output_names = list(output_shapes.keys())\n output_arrays = session.run(output_names, test_inputs)\n test_io_case = test_inputs\n for name, array in zip(output_names, output_arrays):\n test_io_case[name] = array\n return test_io_case\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"src\", help=\"Source onnx model\")\n parser.add_argument(\"dst\", help=\"Destination onnx model\")\n parser.add_argument(\"inputshapes\", help=\"Input tensor names and shapes of subgraph. Example: 'tensor3=10x256,tensor4=-1x-1'\")\n parser.add_argument(\"outputshapes\", help=\"Output tensor names and shapes of subgraph. Example: 'tensor3=10x256,tensor4=-1x-1'\")\n parser.add_argument(\"--test\", help=\"Outputs test case file. Also needs --test-shapes\")\n parser.add_argument(\"--test-shapes\")\n args = parser.parse_args()\n input_shapes = parse_name_shapes(args.inputshapes)\n output_shapes = parse_name_shapes(args.outputshapes)\n model = onnx.load_model(args.src)\n process_model(model, input_shapes, output_shapes)\n try:\n onnx.checker.check_model(model)\n except Exception as ex:\n print(\"Checker error\")\n print(ex)\n onnx.save_model(model, args.dst)\n print(\"Saved model\")\n\n if args.test:\n test_shapes = parse_name_shapes(args.test_shapes)\n test_io_case = make_test_io(args.dst, output_shapes, test_shapes)\n serialize_tensors(args.test, test_io_case)\n print(\"Saved test io case\")\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.6066945791244507, "alphanum_fraction": 0.6178521513938904, "avg_line_length": 28.46575355529785, "blob_id": "9157190348b226bdc146bcc4165b09541108f81f", "content_id": "aae99c999276b3202c16e3779ee156eda0c699f9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2151, "license_type": "permissive", "max_line_length": 83, "num_lines": 73, "path": "/src/descriptor_runner/operators/wasm/operators/standard/squeeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"../../../../interface/core/constants\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { WebDNNWasmContext } from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Squeeze1, Squeeze13 } from \"../../../base/squeeze\";\n\nclass WasmSqueeze1 extends Squeeze1 {\n constructor() {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n // TODO: avoid copy\n const input = inputs[0];\n context.assertsWasmTensor(input);\n const computedShape = this.calcShape(input),\n output = context.emptyTensor(computedShape, input.dataType);\n context.runKernel(\"kernel_copy\", [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: output.length },\n ]);\n return [output];\n }\n}\n\nclass WasmSqueeze13 extends Squeeze13 {\n constructor() {\n super(\"wasm\");\n }\n\n getTensorBackendRequirement(\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nInputs: number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n nOutputs: number\n ): (Backend | null)[] {\n return [\"wasm\", \"cpu\"];\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n // TODO: avoid copy\n const input = inputs[0],\n axes = inputs[1];\n context.assertsWasmTensor(input);\n context.cpuContext.assertsCPUTensor(axes);\n const computedShape = this.calcShape(input, axes),\n output = context.emptyTensor(computedShape, input.dataType);\n context.runKernel(\"kernel_copy\", [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: output.length },\n ]);\n return [output];\n }\n}\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Squeeze\",\n backend: \"wasm\",\n opsetMin: 13,\n factory: () => new WasmSqueeze13(),\n },\n {\n opType: \"Squeeze\",\n backend: \"wasm\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new WasmSqueeze1(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.3673139214515686, "alphanum_fraction": 0.39077669382095337, "avg_line_length": 25.869565963745117, "blob_id": "271e740efb8c04f18f00612b86fc455353180d53", "content_id": "a427680d45a0ea66e5f2ad83bd40a273afa9c386", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1236, "license_type": "permissive", "max_line_length": 73, "num_lines": 46, "path": "/src/descriptor_runner/operators/cpu/rawcomputation/averagepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "export function averagepool(\n dX: Float32Array,\n dI: Float32Array,\n countIncludePad: boolean,\n batch: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n ch: number\n): void {\n let idx = 0;\n for (let b = 0; b < batch; b++) {\n for (let c = 0; c < ch; c++) {\n for (let oy = 0; oy < outShape[0]; oy++) {\n for (let ox = 0; ox < outShape[1]; ox++) {\n let ctr = 0,\n sum = 0;\n for (let ky = 0; ky < kernelShape[0]; ky++) {\n for (let kx = 0; kx < kernelShape[1]; kx++) {\n const iny = oy * strides[0] - pads[0] + ky,\n inx = ox * strides[1] - pads[1] + kx;\n if (\n iny >= 0 &&\n iny < inShape[0] &&\n inx >= 0 &&\n inx < inShape[1]\n ) {\n const xidx =\n ((b * ch + c) * inShape[0] + iny) * inShape[1] + inx,\n v = dX[xidx];\n sum += v;\n ctr++;\n }\n }\n }\n\n dI[idx++] = countIncludePad\n ? sum / (kernelShape[0] * kernelShape[1])\n : sum / ctr;\n }\n }\n }\n }\n}\n" }, { "alpha_fraction": 0.5474022626876831, "alphanum_fraction": 0.5549008846282959, "avg_line_length": 24.93055534362793, "blob_id": "bd2b6390a7ad629a7bc89002ff17ee7de71cc407", "content_id": "02ab42d63da637cede14655aff4cc6cb88fe399c", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1867, "license_type": "permissive", "max_line_length": 83, "num_lines": 72, "path": "/src/descriptor_runner/operators/wasm/operators/standard/unary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNWasmContext } from \"../../../../interface/backend/wasm/wasmContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass WasmUnary extends OperatorImpl {\n constructor(private kernelName: string) {\n super(\"wasm\");\n }\n\n async run(context: WebDNNWasmContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWasmTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error(\"Only float32 is supported\");\n }\n const output = context.emptyTensor(input.dims, input.dataType);\n context.runKernel(this.kernelName, [\n { type: \"tensor\", value: input },\n { type: \"tensor\", value: output },\n { type: \"int32\", value: input.length },\n ]);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Ceil\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_ceil\"),\n },\n {\n opType: \"Exp\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_exp\"),\n },\n {\n opType: \"Floor\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_floor\"),\n },\n {\n opType: \"Relu\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_relu\"),\n },\n {\n opType: \"Sigmoid\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_sigmoid\"),\n },\n {\n opType: \"Sqrt\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_sqrt\"),\n },\n {\n opType: \"Tanh\",\n backend: \"wasm\",\n opsetMin: 1,\n factory: () => new WasmUnary(\"kernel_tanh\"),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5972913503646851, "alphanum_fraction": 0.6088999509811401, "avg_line_length": 25.799999237060547, "blob_id": "cd86956413f2cc055287527f36278d573813d7b6", "content_id": "52376a6b444fa6f3a31ad1606a9df3d06ca5b807", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3640, "license_type": "permissive", "max_line_length": 80, "num_lines": 135, "path": "/src/descriptor_runner/operators/webgl/operators/standard/gemm.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Gemm } from \"../../../base/gemm\";\nimport { broadcastUni } from \"../../../operatorUtil\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\n\nexport class WebGLGemm extends Gemm {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1],\n inputC = inputs[2];\n if (inputC) {\n return this.runWithC(context, inputA, inputB, inputC);\n }\n throw new Error();\n }\n\n private async runWithC(\n context: WebDNNWebGLContext,\n inputA: WebGLTensor,\n inputB: WebGLTensor,\n inputC: WebGLTensor\n ): Promise<WebGLTensor[]> {\n const {\n m,\n n,\n k,\n strideA: [strideA0, strideA1],\n strideB: [strideB0, strideB1],\n } = this.calcShape(inputA.dims, inputB.dims),\n [strideC0, strideC1] = broadcastUni([m, n], inputC.dims);\n\n if (\n inputA.dimPerPixel !== 1 ||\n inputB.dimPerPixel !== 1 ||\n inputC.dimPerPixel !== 1\n ) {\n throw new Error();\n }\n\n const outputTensor = context.emptyTensor([m, n], \"float32\"),\n // ループ回数は定数が必要\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define m ${m}\n#define n ${n}\n#define k ${k}\n${shaderGenTensorOutputUniform(2)}\nuniform float alpha;\nuniform float beta;\n\n${shaderGenTensorNDGet(\"tex_input_a\", 2, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_input_b\", 2, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_input_c\", 2, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(2)}\n float s = 0.0;\n for (int ip = 0; ip < k; ip++) {\n s += get_tex_input_a(tex_output_0, ip) * get_tex_input_b(ip, tex_output_1);\n }\n s *= alpha;\n s += beta * get_tex_input_c(tex_output_0, tex_output_1);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`,\n kernelName = `gemm_${m}_${n}_${k}`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_a\",\n [strideA0, strideA1],\n inputA,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n [strideB0, strideB1],\n inputB,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_c\",\n [strideC0, strideC1],\n inputC,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([m, n], outputTensor, context.webgl2),\n { name: \"alpha\", type: \"float\", value: this.alpha },\n { name: \"beta\", type: \"float\", value: this.beta },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: inputA, name: \"tex_input_a\" },\n { tensor: inputB, name: \"tex_input_b\" },\n { tensor: inputC, name: \"tex_input_c\" },\n ],\n outputTensor,\n uniforms\n );\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Gemm\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLGemm(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7055888175964355, "alphanum_fraction": 0.7055888175964355, "avg_line_length": 31.322580337524414, "blob_id": "838d9620289071ca486aa4526ba2b2a490a13be4", "content_id": "23d4f4fa8f86c1bd7e1a7ab737f788d6123e04ac", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1002, "license_type": "permissive", "max_line_length": 77, "num_lines": 31, "path": "/src/graph_transpiler/webdnn/optimization_pass.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import Dict, Optional\nimport numpy as np\nimport onnx\n\nfrom webdnn.operator_shader import OperatorShader\n\nclass OptimizationPassResult:\n operator_shaders: Dict[str, OperatorShader]\n initializers: Dict[str, np.ndarray]\n tensor_move_options: Dict[str, dict]\n\n def __init__(self) -> None:\n self.operator_shaders = {}\n self.initializers = {}\n self.tensor_move_options = {}\n \n def merge(self, other: \"OptimizationPassResult\"):\n self.operator_shaders.update(other.operator_shaders)\n self.initializers.update(other.initializers)\n # TODO: check conflict for same tensor\n self.tensor_move_options.update(other.tensor_move_options)\n\n def write_code(self, root_directory: str):\n raise NotImplementedError\n\n def remove_code(self, root_directory: str):\n raise NotImplementedError\n\nclass OptimizationPass:\n def optimize(model: onnx.ModelProto) -> Optional[OptimizationPassResult]:\n raise NotImplementedError\n" }, { "alpha_fraction": 0.6027722954750061, "alphanum_fraction": 0.6110891103744507, "avg_line_length": 25.86170196533203, "blob_id": "4bc4bf2ee341bc6ac5d082ff8220a9856c3de9d3", "content_id": "2bdaa5f17ee84d4d7d4298e33e38c71fe3aedae7", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2525, "license_type": "permissive", "max_line_length": 78, "num_lines": 94, "path": "/src/descriptor_runner/operators/webgl/operators/standard/split.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { Split2 } from \"../../../base/split\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class WebGLSplit2 extends Split2 {\n constructor() {\n super(\"webgl\");\n }\n\n async run(\n context: WebDNNWebGLContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0],\n {\n eachOutputParams,\n outerLength,\n innerLength,\n inOuterStride,\n inConcatStride,\n } = this.calcShape(input, nOutputs),\n outputs: WebGLTensor[] = [],\n kernelName = \"split\",\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(3)}\nuniform int offset;\n\n${shaderGenTensorNDGet(\"tex_input\", 3, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(3)}\n float s = get_tex_input(tex_output_0, tex_output_1 + offset, tex_output_2);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n for (let i = 0; i < nOutputs; i++) {\n const { dim, offset, outShape } = eachOutputParams[i],\n ot = context.emptyTensor(outShape, input.dataType),\n uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [inOuterStride, inConcatStride, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength, dim, innerLength],\n ot,\n context.webgl2\n ),\n { name: \"offset\", type: \"int\", value: offset },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n ot,\n uniforms\n );\n outputs.push(ot);\n }\n return outputs;\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Split\",\n backend: \"webgl\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new WebGLSplit2(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6105263233184814, "avg_line_length": 22.75, "blob_id": "d0fcb9dcbd4ea7695581321dac967c11454138ce", "content_id": "e1f7966480af8259637d613ad4e6194c4e7dc15a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 95, "license_type": "permissive", "max_line_length": 51, "num_lines": 4, "path": "/src/graph_transpiler/webdnn/util.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import uuid\n\ndef make_random_identifier():\n return \"_\" + str(uuid.uuid4()).replace(\"-\", \"\")\n" }, { "alpha_fraction": 0.6497461795806885, "alphanum_fraction": 0.6497461795806885, "avg_line_length": 20.88888931274414, "blob_id": "0205d9e360c47565ad6d1a97a3033e9b7ae550ad", "content_id": "351020cf9d87815f93d39585445546db6a797fd1", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 197, "license_type": "permissive", "max_line_length": 76, "num_lines": 9, "path": "/src/shader/wasm/src/kernels/standard/copy.cpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#include <cstring>\n#include \"../../common/kernel.hpp\"\n\nextern \"C\"\n{\n void WEBDNN_KERNEL kernel_copy(const float *src, float *dst, int length) {\n memcpy(dst, src, length * sizeof(float));\n }\n}\n" }, { "alpha_fraction": 0.7764317393302917, "alphanum_fraction": 0.7896475791931152, "avg_line_length": 23.54054069519043, "blob_id": "ae1f63c82f1916c6400c3af630c694b8eee63a6e", "content_id": "6715d80fc9f2ade358637100d87df36940d7d893", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1622, "license_type": "permissive", "max_line_length": 175, "num_lines": 37, "path": "/example/custom_operator/README.ja.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# カスタムオペレータの実装例\n\nONNX仕様に存在しないカスタムオペレータを実装し、動作させるサンプル。\n\n入力を2倍にする`Twice`という名前のカスタムオペレータを含むモデルを作成し、WebDNN上で実行する。\n\nPythonスクリプトの実行には、PyTorch (`>=1.7`)が必要。\n\n# 操作手順\n## カスタムオペレータを含むONNXモデルを生成\n```\npython make_model.py\n```\n\n## カスタムオペレータの実装を設置\n\nカスタムオペレータの実装である`twice.ts`を`<repository_root>/src\\descriptor_runner\\operators\\cpu\\operators\\custom\\twice.ts`にコピー。\n\n## カスタムオペレータを含むオペレータセットのビルド\n\n```\npython -m webdnn.optimize_model output/model.onnx output\n```\n\n出力ファイル`model-{cpu,wasm,webgl}.onnx`, `op-{cpu,wasm,webgl}.js`, `weight-{cpu,wasm,webgl}-0.bin`が生成される。`op-{cpu,wasm,webgl}.js`に標準オペレータおよびカスタムオペレータの実装が含まれる。`model.onnx`はもはや必要ない。\n\n## Webブラウザ上での実行\n\nrepository rootにて\n\n```\nyarn server\n```\n\nを実行。この状態で、Webブラウザで[http://localhost:8080/example/custom_operator/](http://localhost:8080/example/custom_operator/)を開く。\n\nこのサンプルでは、カスタムオペレータはCPU上で動作する実装である。GPU上で動作する実装が存在する標準オペレータは、GPU上で動作し、カスタムオペレータの実行前後で自動的にテンソルデータがCPU/GPU間で転送される。\n" }, { "alpha_fraction": 0.6245487332344055, "alphanum_fraction": 0.6357072591781616, "avg_line_length": 25.845815658569336, "blob_id": "3b9d81177dfcbdaaf8dddf2870ae126e6587624f", "content_id": "b9d73effb7ab789ab1839bc44c7a64a3e140169e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 6144, "license_type": "permissive", "max_line_length": 79, "num_lines": 227, "path": "/src/descriptor_runner/operators/webgl/operators/standard/instancenormalization.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { arrayProd, getAttrInt } from \"../../../operatorUtil\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenOutputVec4,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorNDGetVec4,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Opset 1\nexport class InstanceNormalization extends OperatorImpl {\n epsilon!: number;\n\n constructor() {\n super(\"webgl\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.epsilon = getAttrInt(attribute, \"epsilon\", 1e-5);\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const [input, scale, bias] = inputs;\n if (!context.webgl2) {\n // mean, stdの2要素を出力することが難しいため\n throw new Error(\"InstanceNormalization: WebGL1 is not supported\");\n }\n\n const reductionLength = arrayProd(input.dims.slice(2));\n const [dimBatch, dimCh] = input.dims;\n\n // 統計量計算\n const maxSumExpTensor = context.emptyTensor(\n [dimBatch * dimCh * 4],\n \"float32\",\n { dimPerPixel: 4 }\n );\n await this.calcStat(\n context,\n dimBatch,\n dimCh,\n reductionLength,\n this.epsilon,\n input,\n scale,\n bias,\n maxSumExpTensor\n );\n\n // 結果計算\n const output = context.emptyTensor(input.dims, input.dataType);\n await this.calcOutput2(\n context,\n dimBatch,\n dimCh,\n reductionLength,\n input,\n maxSumExpTensor,\n output\n );\n maxSumExpTensor.dispose();\n return [output];\n }\n\n private async calcStat(\n context: WebDNNWebGLContext,\n batchLength: number,\n chLength: number,\n reductionLength: number,\n epsilon: number,\n input: WebGLTensor,\n scale: WebGLTensor,\n bias: WebGLTensor,\n maxSumExpTensor: WebGLTensor\n ) {\n const kernelName = `instancenormalization_stats_${reductionLength}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define reductionLength ${reductionLength}\nuniform float epsilon;\n${shaderGenTensorOutputUniform(2)}\n${shaderGenTensorNDGet(\"tex_input\", 3, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_scale\", 1, context.webgl2)}\n${shaderGenTensorNDGet(\"tex_bias\", 1, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(2)}\n float s_sum = 0.0;\n float s_sqsum = 0.0;\n for (int i = 0; i < reductionLength; i++) {\n float v = get_tex_input(tex_output_0, tex_output_1, i);\n s_sum += v;\n s_sqsum += v * v;\n }\n float s_mean = s_sum / float(reductionLength);\n float s_var = s_sqsum / float(reductionLength) - s_mean * s_mean + epsilon;\n float s_invstd = inversesqrt(s_var);\n float s_scale = get_tex_scale(tex_output_1) * s_invstd;\n float s_bias = -s_mean * s_scale + get_tex_bias(tex_output_1);\n\n vec4 s = vec4(s_scale, s_bias, 0.0, 0.0);\n ${shaderGenOutputVec4(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [chLength * reductionLength, reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_scale\",\n scale.strides,\n scale,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_bias\",\n bias.strides,\n bias,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [batchLength, chLength],\n maxSumExpTensor,\n context.webgl2\n ),\n { name: \"epsilon\", value: epsilon, type: \"float\" },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: input, name: \"tex_input\" },\n { tensor: scale, name: \"tex_scale\" },\n { tensor: bias, name: \"tex_bias\" },\n ],\n maxSumExpTensor,\n uniforms\n );\n }\n\n private async calcOutput2(\n context: WebDNNWebGLContext,\n batchLength: number,\n chLength: number,\n reductionLength: number,\n input: WebGLTensor,\n maxSumExpTensor: WebGLTensor,\n output: WebGLTensor\n ) {\n const kernelName = `instancenormalization_output`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(3)}\n\n${shaderGenTensorNDGet(\"tex_input\", 3, context.webgl2)}\n${shaderGenTensorNDGetVec4(\"tex_stats\", 2, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(3)}\n vec4 m = get_vec4_tex_stats(tex_output_0, tex_output_1);\n float v = get_tex_input(tex_output_0, tex_output_1, tex_output_2);\n float s = v * m.r + m.g;\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [chLength * reductionLength, reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_stats\",\n [chLength, 1],\n maxSumExpTensor,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [batchLength, chLength, reductionLength],\n output,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: input, name: \"tex_input\" },\n { tensor: maxSumExpTensor, name: \"tex_stats\" },\n ],\n output,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"InstanceNormalization\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new InstanceNormalization(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6661486029624939, "alphanum_fraction": 0.6663039922714233, "avg_line_length": 29.93269157409668, "blob_id": "59b3f44663ba156d7c3c6921b7eddb7202f3f3c1", "content_id": "cf7f8f8ddd12dc03f370734775a3350a8c9790b0", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 6434, "license_type": "permissive", "max_line_length": 142, "num_lines": 208, "path": "/src/descriptor_runner/separateBuild/coreOnly.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend } from \"../interface/core/constants\";\nimport * as Image from \"../image\";\nimport * as Math from \"../math\";\nexport { Image, Math };\nimport { WebDNNLogging } from \"../logging\";\nexport { WebDNNLogging as Logging };\nimport { BackendContexts, RunnerImpl } from \"../core/runnerImpl\";\nimport { WebDNNCPUContextImpl } from \"../backend/cpu/cpuContextImpl\";\nimport { WebDNNWebGLContextImpl } from \"../backend/webgl/webglContextImpl\";\nimport { WebDNNWasmContextImpl } from \"../backend/wasm/wasmContextImpl\";\nimport { WebDNNWebGPUContextImpl } from \"../backend/webgpu/webgpuContextImpl\";\nimport { registerOperators } from \"../core/operatorTable\";\nimport { Runner } from \"../interface/core/runner\";\nimport { OperatorEntry } from \"../interface/core/operator\";\nimport { WebDNNWebGLContextOption } from \"../interface/backend/webgl/webglContext\";\nimport { WebDNNWebGPUContextOption } from \"../interface/backend/webgpu/webgpuContext\";\nexport { CPUTensorImpl as CPUTensor } from \"../backend/cpu/cpuTensorImpl\";\n\nexport interface InitOption {\n backendOrder?: Backend[];\n optimized?: boolean;\n backendOptions?: {\n wasm?: WebDNNWebGLContextOption;\n webgl?: WebDNNWebGLContextOption;\n webgpu?: WebDNNWebGPUContextOption;\n };\n progressCallback?: (loaded: number, total: number) => unknown;\n}\n\nconst defaultContexts = {\n cpu: null as WebDNNCPUContextImpl | null,\n};\n\ninterface InjectionParams {\n operatorEntries: OperatorEntry[];\n wasmWorkerSrcUrl?: string;\n}\n\nlet injectionCallback: ((params: InjectionParams) => void) | null = null;\n\nexport function injectOperators(params: InjectionParams): void {\n if (injectionCallback) {\n injectionCallback(params);\n injectionCallback = null;\n }\n}\n\nfunction loadJS(url: string): Promise<InjectionParams> {\n return new Promise((resolve) => {\n injectionCallback = resolve;\n const tag = document.createElement(\"script\");\n tag.type = \"text/javascript\";\n tag.src = url;\n document.body.appendChild(tag);\n });\n}\n\nasync function loadCPU(\n directory: string,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n options: InitOption,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n cpuContext: WebDNNCPUContextImpl\n): Promise<void> {\n const injectionParams = await loadJS(`${directory}op-cpu.js`);\n registerOperators(injectionParams.operatorEntries);\n}\n\nasync function loadWasm(\n directory: string,\n options: InitOption,\n cpuContext: WebDNNCPUContextImpl\n): Promise<WebDNNWasmContextImpl> {\n const ctx = new WebDNNWasmContextImpl(\n cpuContext,\n options.backendOptions?.wasm || {}\n ),\n injectionParams = await loadJS(`${directory}op-wasm.js`);\n if (typeof injectionParams.wasmWorkerSrcUrl !== \"string\") {\n throw new Error(\"Invalid injection parameter\");\n }\n await ctx.initialize(injectionParams.wasmWorkerSrcUrl);\n registerOperators(injectionParams.operatorEntries);\n return ctx;\n}\n\nasync function loadWebGL(\n directory: string,\n options: InitOption,\n cpuContext: WebDNNCPUContextImpl\n): Promise<WebDNNWebGLContextImpl> {\n const ctx = new WebDNNWebGLContextImpl(\n cpuContext,\n options.backendOptions?.webgl || {}\n ),\n injectionParams = await loadJS(`${directory}op-${ctx.version}.js`);\n await ctx.initialize();\n registerOperators(injectionParams.operatorEntries);\n return ctx;\n}\n\nasync function loadWebGPU(\n directory: string,\n options: InitOption,\n cpuContext: WebDNNCPUContextImpl\n): Promise<WebDNNWebGPUContextImpl> {\n const ctx = new WebDNNWebGPUContextImpl(\n cpuContext,\n options.backendOptions?.webgpu || {}\n ),\n injectionParams = await loadJS(`${directory}op-webgpu.js`);\n await ctx.initialize();\n registerOperators(injectionParams.operatorEntries);\n return ctx;\n}\n\nexport async function load(\n directory: string,\n options: InitOption = {}\n): Promise<Runner> {\n const { backendOrder = [\"webgl\", \"wasm\", \"cpu\"], optimized } = options;\n if (!optimized) {\n throw new Error(\n \"webdnn-core.js only accepts optimized model. Specify directory which contains model-cpu.onnx and specify {optimized: true} in options.\"\n );\n }\n if (!defaultContexts.cpu) {\n defaultContexts.cpu = new WebDNNCPUContextImpl();\n await defaultContexts.cpu.initialize();\n }\n const cpuContext: WebDNNCPUContextImpl = defaultContexts.cpu,\n backendContexts: BackendContexts = { cpu: cpuContext };\n let succeedBackend: Backend | null = null;\n const opDirectory = directory;\n for (const tryBackend of backendOrder) {\n switch (tryBackend) {\n case \"cpu\":\n try {\n await loadCPU(opDirectory, options, cpuContext);\n succeedBackend = \"cpu\";\n // eslint-disable-next-line no-empty\n } catch {}\n break;\n case \"wasm\":\n {\n try {\n backendContexts.wasm = await loadWasm(\n opDirectory,\n options,\n cpuContext\n );\n succeedBackend = \"wasm\";\n // eslint-disable-next-line no-empty\n } catch {}\n }\n break;\n case \"webgl\":\n {\n try {\n backendContexts.webgl = await loadWebGL(\n opDirectory,\n options,\n cpuContext\n );\n succeedBackend = \"webgl\";\n // eslint-disable-next-line no-empty\n } catch {}\n }\n break;\n case \"webgpu\":\n {\n try {\n backendContexts.webgpu = await loadWebGPU(\n opDirectory,\n options,\n cpuContext\n );\n succeedBackend = \"webgpu\";\n // eslint-disable-next-line no-empty\n } catch {}\n }\n break;\n default:\n throw new Error(`Unknown backend ${tryBackend}`);\n }\n\n if (succeedBackend) {\n break;\n }\n }\n if (!succeedBackend) {\n throw new Error(\"No backend available\");\n }\n const actualBackendOrder: Backend[] =\n succeedBackend === \"cpu\" ? [\"cpu\"] : [succeedBackend, \"cpu\"],\n runner = new RunnerImpl(actualBackendOrder, backendContexts);\n let modelNameBackendPart: string = actualBackendOrder[0];\n if (modelNameBackendPart === \"webgl\") {\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n modelNameBackendPart = backendContexts.webgl!.version;\n }\n await runner.loadModel(\n directory,\n `model-${modelNameBackendPart}.onnx`,\n options.progressCallback\n );\n return runner;\n}\n" }, { "alpha_fraction": 0.6153846383094788, "alphanum_fraction": 0.6358428597450256, "avg_line_length": 28.095237731933594, "blob_id": "2e1de6f7e9d3f638dccce9b56a92e3c76a5fc4b5", "content_id": "dcc08dd2fbc0c89fa85d74f626645c2c1c505a56", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1222, "license_type": "permissive", "max_line_length": 72, "num_lines": 42, "path": "/src/descriptor_runner/core/tensorDecoder/decodeTensorRaw.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import Long from \"long\";\nimport { onnx } from \"onnx-proto\";\nimport { DataArrayTypes } from \"../../interface/core/constants\";\nimport { clipLong } from \"../../util\";\n\nexport function decodeTensorRaw(\n buf: ArrayBuffer,\n bodyByteOffset: number,\n bodyCompressedLength: number,\n dataType: number,\n numel: number\n): DataArrayTypes {\n let data: DataArrayTypes;\n switch (dataType) {\n case onnx.TensorProto.DataType.FLOAT:\n data = new Float32Array(numel);\n break;\n case onnx.TensorProto.DataType.INT32:\n data = new Int32Array(numel);\n break;\n case onnx.TensorProto.DataType.INT64: {\n data = new Int32Array(numel);\n const view = new DataView(buf, bodyByteOffset, numel * 8);\n for (let idx = 0; idx < numel; idx++) {\n data[idx] = clipLong(\n new Long(\n view.getUint32(idx * 8, true),\n view.getUint32(idx * 8 + 4, true)\n )\n );\n }\n return data;\n }\n default:\n throw new Error(\"Unsupported DataType\");\n }\n // Buf may not be aligned\n const dataUint8View = new Uint8Array(data.buffer),\n srcUint8View = new Uint8Array(buf, bodyByteOffset, data.byteLength);\n dataUint8View.set(srcUint8View);\n return data;\n}\n" }, { "alpha_fraction": 0.7213333249092102, "alphanum_fraction": 0.7213333249092102, "avg_line_length": 38.47368240356445, "blob_id": "3ed64ba36bfe5d9239bf45c4139c8139f8074d6d", "content_id": "98f03ee1ee46eb5794f54a2dcc50d4dda3610d91", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 750, "license_type": "permissive", "max_line_length": 75, "num_lines": 19, "path": "/src/descriptor_runner/interface/backend/cpu/cpuContext.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { BackendContext } from \"../../core/backendContext\";\nimport { DataArrayTypes, DataType } from \"../../core/constants\";\nimport { Tensor } from \"../../core/tensor\";\nimport { CPUTensor } from \"./cpuTensor\";\n\nexport interface WebDNNCPUContext extends BackendContext {\n backend: \"cpu\";\n initialize(): Promise<void>;\n isCPUTensor(tensor: Tensor): tensor is CPUTensor;\n assertsCPUTensor(tensor: Tensor): asserts tensor is CPUTensor;\n assertsCPUTensorArray(tensors: Tensor[]): asserts tensors is CPUTensor[];\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n data?: DataArrayTypes\n ): CPUTensor;\n // eslint-disable-next-line @typescript-eslint/ban-types\n moveTensor(tensor: Tensor, option: {}): Promise<CPUTensor>;\n}\n" }, { "alpha_fraction": 0.5942515730857849, "alphanum_fraction": 0.6017170548439026, "avg_line_length": 25.790000915527344, "blob_id": "2b31dac5fa142df1cff6814dbe13deafd41e989a", "content_id": "7434f0a5b85631bac2a98eeaf9c8ce97ae8c0ca5", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2679, "license_type": "permissive", "max_line_length": 79, "num_lines": 100, "path": "/src/descriptor_runner/operators/base/split.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { arrayProd, getAttrInt, getAttrInts } from \"../operatorUtil\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { CPUTensor } from \"../..\";\n\nabstract class Split extends OperatorImpl {\n axis!: number;\n\n protected calcShapeBase(\n input: Tensor,\n nOutputs: number,\n splitSrc: ReadonlyArray<number>\n ) {\n let { axis } = this;\n if (axis < 0) {\n axis += input.ndim;\n }\n if (axis < 0 || axis >= input.ndim) {\n throw new Error(`Split: axis ${axis} out of range`);\n }\n const axisLength = input.dims[axis],\n split =\n splitSrc.length > 0\n ? splitSrc\n : Array.from({ length: nOutputs }, () =>\n Math.floor(axisLength / nOutputs)\n ),\n outerLength = arrayProd(input.dims.slice(0, axis)),\n innerLength = arrayProd(input.dims.slice(axis + 1)),\n inOuterStride = input.strides[Math.max(axis - 1, 0)],\n inConcatStride = input.strides[axis];\n let offset = 0;\n const eachOutputParams: {\n dim: number;\n offset: number;\n outShape: number[];\n outerStride: number;\n splitStride: number;\n }[] = [];\n for (let i = 0; i < nOutputs; i++) {\n const dim = split[i],\n outShape = input.dims.slice();\n outShape[axis] = dim;\n // Stride of output axis=Math.max(axis-1, 0)\n const outerStride = arrayProd(outShape.slice(Math.max(axis - 1, 0) + 1)),\n // Stride of output axis=axis\n splitStride = arrayProd(outShape.slice(axis + 1));\n eachOutputParams.push({\n dim,\n offset,\n outShape,\n outerStride,\n splitStride,\n });\n\n offset += dim;\n }\n return {\n eachOutputParams,\n outerLength,\n innerLength,\n inOuterStride,\n inConcatStride,\n };\n }\n}\n\nexport abstract class Split2 extends Split {\n split!: number[];\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axis = getAttrInt(attribute, \"axis\", 0);\n this.split = getAttrInts(attribute, \"split\", []);\n }\n\n protected calcShape(input: Tensor, nOutputs: number) {\n return this.calcShapeBase(input, nOutputs, this.split);\n }\n}\n\nexport abstract class Split13 extends Split {\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axis = getAttrInt(attribute, \"axis\", 0);\n }\n\n protected calcShape(\n input: Tensor,\n nOutputs: number,\n splitTensor?: CPUTensor\n ) {\n return this.calcShapeBase(\n input,\n nOutputs,\n splitTensor ? Array.from(splitTensor.data) : []\n );\n }\n}\n" }, { "alpha_fraction": 0.6075035929679871, "alphanum_fraction": 0.62193363904953, "avg_line_length": 26.176469802856445, "blob_id": "977fc47af5fe75d53f05ed16a082ed7076ac873a", "content_id": "7f45803fc8c7955ac5e926f7600459ce43f26a53", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1386, "license_type": "permissive", "max_line_length": 80, "num_lines": 51, "path": "/src/descriptor_runner/operators/cpu/operators/standard/squeeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Squeeze1, Squeeze13 } from \"../../../base/squeeze\";\n\nexport class CPUSqueeze1 extends Squeeze1 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n newShape = this.calcShape(input),\n output = context.emptyTensor(newShape, input.dataType, input.data);\n return [output];\n }\n}\n\nexport class CPUSqueeze13 extends Squeeze13 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n axes = inputs[1],\n newShape = this.calcShape(input, axes),\n output = context.emptyTensor(newShape, input.dataType, input.data);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Squeeze\",\n backend: \"cpu\",\n opsetMin: 13,\n factory: () => new CPUSqueeze13(),\n },\n {\n opType: \"Squeeze\",\n backend: \"cpu\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new CPUSqueeze1(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6578378081321716, "alphanum_fraction": 0.6729729771614075, "avg_line_length": 38.36170196533203, "blob_id": "654450f5503e25054b5342e4078914fc97b64ea2", "content_id": "1ddface170466e79fd7bf527988be444878c69ac", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1850, "license_type": "permissive", "max_line_length": 198, "num_lines": 47, "path": "/src/shader/wasm/compile.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "\"\"\"\ncompile operator kernels of c++ into wasm, then embed them in single ts file, to distribute single webdnn.js\n\"\"\"\n\nimport base64\nimport glob\nimport os\nimport subprocess\nimport sys\n\nCPP_SRC_DIR = \"src\"\nDST_DIR = \"../../descriptor_runner/operators/wasm/worker\"\nOPTIMIZATION = \"-O3\"\n\n# change current directory to where this file is\nos.chdir(os.path.dirname(os.path.abspath(__file__)))\n\n# dependency C++ library\nif not os.path.exists(\"./lib/eigen-3.3.9\"):\n sys.stderr.write(f\"downloading eigen library into {os.path.join(os.getcwd(), 'lib')}\\n\")\n os.makedirs(\"./lib\", exist_ok=True)\n import urllib.request\n import tarfile\n thetarfile = \"https://gitlab.com/libeigen/eigen/-/archive/3.3.9/eigen-3.3.9.tar.bz2\"\n ftpstream = urllib.request.urlopen(thetarfile)\n thetarfile = tarfile.open(fileobj=ftpstream, mode=\"r|bz2\")\n thetarfile.extractall(\"./lib\")\n\nsrcs = glob.glob(CPP_SRC_DIR + \"/**/*.cpp\", recursive=True)\n\nsubprocess.check_call([\"emcc\", \"-std=c++11\", \"--pre-js\", \"pre.js\", \"-I\", \"lib/eigen-3.3.9\", \"-o\", f\"{DST_DIR}/workerRaw.js\", OPTIMIZATION, \"-s\", \"ALLOW_MEMORY_GROWTH=1\", *srcs], shell=os.name=='nt')\n\n# embed wasm into worker js\nwith open(f\"{DST_DIR}/workerRaw.wasm\", \"rb\") as f:\n worker_wasm = f.read()\nwith open(f\"{DST_DIR}/workerRaw.js\", \"rt\", encoding=\"utf-8\") as f:\n worker_js = f.read()\n\nworker_js_with_wasm = worker_js.replace(\"WASM_WORKER_WASM_BINARY_BASE64\", base64.b64encode(worker_wasm).decode(\"ascii\"))\nworker_js_with_wasm_escaped = worker_js_with_wasm.replace(\"\\\\\", \"\\\\\\\\\").replace(\"`\", \"\\\\`\")\n\nworker_data_url_src = f\"\"\"/* eslint-disable */\nexport const wasmWorkerSrcUrl = URL.createObjectURL(new File([`{worker_js_with_wasm_escaped}`], \"worker.js\", {{type: \"text/javascript\"}}));\n\"\"\"\n\nwith open(f\"{DST_DIR}/worker.ts\", \"wt\", encoding=\"utf-8\", newline=\"\\n\") as f:\n f.write(worker_data_url_src)\n" }, { "alpha_fraction": 0.615646243095398, "alphanum_fraction": 0.6198034882545471, "avg_line_length": 36.26760482788086, "blob_id": "316d288988d770ca6604aff953ba00ca9c2aa149", "content_id": "7ac3f97374389454d1fddb295bf3280652b2b7bb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2646, "license_type": "permissive", "max_line_length": 108, "num_lines": 71, "path": "/src/graph_transpiler/webdnn/pass_fusion_unary_cpu.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from typing import List\nimport onnx\nfrom webdnn.pass_fusion_unary import PassFusionUnary\nfrom webdnn.operator_shader_cpu import OperatorShaderCPU\nfrom webdnn.optimization_pass_result_cpu import OptimizationPassResultCPU\n\nSHADER_TEMPLATE = \"\"\"\nimport { DataArrayConstructor, DataType } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass CPUUnary extends OperatorImpl {\n constructor(\n private op: (value: number) => number,\n private allowDataTypes: DataType[]\n ) {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n if (!this.allowDataTypes.includes(input.dataType)) {\n throw new Error(`Unary: DataType ${input.dataType} not supported`);\n }\n const newData = new DataArrayConstructor[input.dataType](input.data.length);\n const op = this.op;\n for (let i = 0; i < newData.length; i++) {\n newData[i] = op(input.data[i]);\n }\n const output = context.emptyTensor(input.dims, input.dataType, newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"%%OP_TYPE%%\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((v0) => {%%FUNC_BODY%%}, [\"float32\"]),\n },\n ];\n}\n\"\"\"\n\nFUNC_TEMPLATES = {\n \"Ceil\": \"const %%VAR_OUT%% = Math.ceil(%%VAR_IN%%);\",\n \"Exp\": \"const %%VAR_OUT%% = Math.exp(%%VAR_IN%%);\",\n \"Floor\": \"const %%VAR_OUT%% = Math.floor(%%VAR_IN%%);\",\n \"Relu\": \"const %%VAR_OUT%% = Math.max(%%VAR_IN%%, 0);\",\n \"Sigmoid\": \"const %%VAR_OUT%% = (Math.tanh(%%VAR_IN%% / 2) + 1) / 2;\",\n \"Sqrt\": \"const %%VAR_OUT%% = Math.sqrt(%%VAR_IN%%);\",\n \"Tanh\": \"const %%VAR_OUT%% = Math.tanh(%%VAR_IN%%);\",\n}\n\nclass PassFusionUnaryCPU(PassFusionUnary):\n def _make_shader(self, custom_op_type: str, nodes: List[onnx.NodeProto]) -> OperatorShaderCPU:\n func_body = \"\"\n for i, node in enumerate(nodes):\n tmpl = FUNC_TEMPLATES[node.op_type]\n func_body += tmpl.replace(\"%%VAR_IN%%\", f\"v{i}\").replace(\"%%VAR_OUT%%\", f\"v{i+1}\")\n func_body += f\"return v{len(nodes)};\"\n ts_code = SHADER_TEMPLATE.replace(\"%%OP_TYPE%%\", custom_op_type).replace(\"%%FUNC_BODY%%\", func_body)\n return OperatorShaderCPU(ts_code)\n\n def _construct_result(self):\n return OptimizationPassResultCPU()\n" }, { "alpha_fraction": 0.6006528735160828, "alphanum_fraction": 0.61915123462677, "avg_line_length": 28.967391967773438, "blob_id": "0756e3cc4189d7ca2b209e8978041b6489a583d8", "content_id": "1f96d30b8a407d24edc0a03225ad2432bbb21d5b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3245, "license_type": "permissive", "max_line_length": 74, "num_lines": 92, "path": "/src/descriptor_runner/operators/base/matmul.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../operatorImpl\";\nimport { calcStrides } from \"../operatorUtil\";\n\n// Version 13\nexport abstract class MatMul extends OperatorImpl {\n protected calcShape(\n dimsA: ReadonlyArray<number>,\n dimsB: ReadonlyArray<number>\n ) {\n /*\n *Matmulの出力shape、入力stride計算\n *matmul((a,b,m,k), (a,b,k,n)) => (a,b,m,n)\n *\n *a, bの部分は2個に限らず0~無限個の次元がつけられる。\n *2行列で各次元のサイズは一致が必要。\n *broadcastingあり。次元数が少ない側には先頭にサイズ1の次元が付与。\n *そのうえでサイズ1とそれ以外のサイズがある場合にそれ以外のサイズに合わせbroadcast\n *\n *一方の入力が1次元の場合の特例。\n *(k), (a,b,k,n) => (a,b,n)\n *(k)を(a,b,1,k)にbroadcastしたうえで計算して、(a,b,1,n)を得て、1の軸を消して(a,b,n)\n *\n *(a,b,m,k), (k) => (a,b,m)\n *(k)を(a,b,k,1)にbroadcastしたうえで計算して、(a,b,m,1)を得て、1の軸を消して(a,b,m)\n *\n *両方1次元だと、単純な内積で(1,1)を得て1の軸2つが消え、0次元のスカラー値。\n */\n\n // 出力の次元数(1次元の場合の特例適用前)\n const totalNDims = Math.max(dimsA.length, dimsB.length, 2),\n expandedDimsA = dimsA.slice();\n if (expandedDimsA.length === 0) {\n throw new Error();\n } else if (expandedDimsA.length === 1) {\n expandedDimsA.unshift(1);\n }\n while (expandedDimsA.length < totalNDims) {\n expandedDimsA.unshift(1);\n }\n const expandedDimsB = dimsB.slice();\n if (expandedDimsB.length === 0) {\n throw new Error();\n } else if (expandedDimsB.length === 1) {\n expandedDimsB.push(1);\n }\n while (expandedDimsB.length < totalNDims) {\n expandedDimsB.unshift(1);\n }\n\n const resultDims = [\n expandedDimsA[expandedDimsA.length - 2],\n expandedDimsB[expandedDimsB.length - 1],\n ],\n innerProductLength = expandedDimsA[expandedDimsA.length - 1];\n if (innerProductLength !== expandedDimsB[expandedDimsB.length - 2]) {\n throw new Error();\n }\n const stridesA = calcStrides(expandedDimsA),\n stridesB = calcStrides(expandedDimsB);\n for (let i = expandedDimsA.length - 3; i >= 0; i--) {\n const resultDim = Math.max(expandedDimsA[i], expandedDimsB[i]);\n // Broadcastされた次元はstrideは0 (出力サイズ1の次元でも0にしてOK)\n if (expandedDimsA[i] === 1) {\n stridesA[i] = 0;\n }\n if (expandedDimsB[i] === 1) {\n stridesB[i] = 0;\n }\n resultDims.unshift(resultDim);\n }\n\n const resultStrides = calcStrides(resultDims),\n resultLength = resultStrides[0] * resultDims[0],\n resultDimsAfterSqueeze = resultDims.slice();\n if (dimsA.length === 1) {\n resultDimsAfterSqueeze.splice(resultDimsAfterSqueeze.length - 2, 1);\n }\n if (dimsB.length === 1) {\n resultDimsAfterSqueeze.splice(resultDimsAfterSqueeze.length - 1, 1);\n }\n\n return {\n resultLength,\n resultDims,\n resultStrides,\n resultDimsAfterSqueeze,\n stridesA,\n stridesB,\n innerProductLength,\n };\n }\n}\n" }, { "alpha_fraction": 0.5636040568351746, "alphanum_fraction": 0.568324863910675, "avg_line_length": 31.888147354125977, "blob_id": "3586c36aa48cb21dea4ddc3da7b6accf4fc4f8cb", "content_id": "5c32b0d7080fa8f692d66d61a01f4d8980ef2e64", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 19854, "license_type": "permissive", "max_line_length": 90, "num_lines": 599, "path": "/src/descriptor_runner/core/runnerImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/* eslint-disable @typescript-eslint/no-explicit-any */\n/* eslint-disable @typescript-eslint/no-non-null-assertion */\nimport { onnx } from \"onnx-proto\";\nimport {\n Backend,\n DataType,\n backendsWithoutCPU,\n backends,\n} from \"../interface/core/constants\";\nimport {\n clipLong,\n intOrLongToInt,\n intOrLongToIntVector,\n nonnull,\n} from \"../util\";\nimport Long from \"long\";\nimport { InputProxy } from \"./inputProxy\";\nimport { OutputProxy } from \"./outputProxy\";\nimport { findTensorReleaseTiming, modelTransform } from \"./modelTransform\";\nimport { CPUTensor } from \"../interface/backend/cpu/cpuTensor\";\nimport { Tensor } from \"../interface/core/tensor\";\nimport { WebGPUTensor } from \"../interface/backend/webgpu/webgpuTensor\";\nimport { WebGLTensor } from \"../interface/backend/webgl/webglTensor\";\nimport { WasmTensor } from \"../interface/backend/wasm/wasmTensor\";\nimport { instantiateOperator } from \"./operatorTable\";\nimport { Runner } from \"../interface/core/runner\";\nimport { WebDNNCPUContext } from \"../interface/backend/cpu/cpuContext\";\nimport { WebDNNWasmContext } from \"../interface/backend/wasm/wasmContext\";\nimport { WebDNNWebGLContext } from \"../interface/backend/webgl/webglContext\";\nimport { WebDNNWebGPUContext } from \"../interface/backend/webgpu/webgpuContext\";\nimport { TensorLoaderImpl } from \"./tensorLoaderImpl\";\nimport { TensorLoader } from \"../interface/core/tensorLoader\";\nimport { WebDNNLogging } from \"../logging\";\n\nconst logger = WebDNNLogging.getLogger(\"WebDNN.runner\");\n\nexport interface BackendContexts {\n cpu: WebDNNCPUContext;\n wasm?: WebDNNWasmContext;\n webgl?: WebDNNWebGLContext;\n webgpu?: WebDNNWebGPUContext;\n}\n\nexport class RunnerImpl implements Runner {\n model?: onnx.ModelProto;\n\n loaded: boolean;\n\n initializerTensors!: Map<string, CPUTensor>;\n\n copiedInitializerTensors!: Map<Backend, Map<string, Tensor>>;\n\n useCompatibilityProxy: boolean;\n\n private inputsWithoutInitializer!: onnx.IValueInfoProto[];\n\n inputs!: InputProxy[];\n\n outputs!: OutputProxy[];\n\n opset!: number;\n\n tensorMoveOptions: { [key: string]: Record<string, any> };\n\n /**\n * key: operator name\n */\n forceOperatorBackendOrder: { [key: string]: Backend[] };\n\n /**\n * Primary backend\n */\n readonly backendName: Backend;\n\n constructor(\n public backendOrder: Backend[],\n private backendContexts: BackendContexts\n ) {\n this.backendName = this.backendOrder[0];\n this.loaded = false;\n this.useCompatibilityProxy = false;\n this.tensorMoveOptions = {};\n this.forceOperatorBackendOrder = {};\n }\n\n getTensorLoader(path: string[] | string): TensorLoader {\n return new TensorLoaderImpl(path, this.backendContexts.cpu);\n }\n\n async loadModel(\n directory: string,\n onnxBasename: string,\n progressCallback?: (loaded: number, total: number) => unknown\n ): Promise<void> {\n const f = await fetch(directory + onnxBasename),\n b = await f.arrayBuffer();\n this.model = onnx.ModelProto.decode(new Uint8Array(b));\n modelTransform(this.model, this.backendOrder);\n if (this.model!.opsetImport.length !== 1) {\n logger.warn(\n `Specifying multiple opset_import is not supported. Using first one.`\n );\n }\n this.opset = intOrLongToInt(this.model!.opsetImport[0].version!);\n this.initializerTensors = new Map();\n for (const [name, tensor] of this.extractInitializerTensor().entries()) {\n this.initializerTensors.set(name, tensor);\n }\n for (const [name, tensor] of (\n await this.loadExternalInitializerTensor(directory, progressCallback)\n ).entries()) {\n this.initializerTensors.set(name, tensor);\n }\n if (this.useCompatibilityProxy) {\n this.initInputProxy();\n this.initOutputProxy();\n }\n this.copiedInitializerTensors = new Map();\n for (const backend of this.backendOrder) {\n if (backend !== \"cpu\") {\n this.copiedInitializerTensors.set(backend, new Map());\n }\n }\n this.inputsWithoutInitializer = this.model!.graph!.input!.filter(\n (v) => v.name && !this.initializerTensors.has(v.name)\n );\n for (const md of this.model!.metadataProps) {\n if (md.key === \"WebDNN2.TensorMoveOptions\") {\n this.tensorMoveOptions = JSON.parse(md.value!);\n }\n if (md.key === \"WebDNN2.ForceOperatorBackendOrder\") {\n this.forceOperatorBackendOrder = JSON.parse(md.value!);\n }\n }\n this.loaded = true;\n }\n\n private extractInitializerTensor(): Map<string, CPUTensor> {\n const tensors = new Map<string, CPUTensor>();\n for (const initializer of this.model!.graph!.initializer!) {\n const dims = intOrLongToIntVector(initializer.dims!);\n if (initializer.dataType === onnx.TensorProto.DataType.FLOAT) {\n if (initializer.rawData?.byteLength) {\n // Float32Array(initializer.rawData!.buffer) は不可(4byteにアライメントされていない場合がある)\n const newBuffer = new Uint8Array(initializer.rawData!.byteLength);\n newBuffer.set(initializer.rawData!);\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(\n dims,\n \"float32\",\n new Float32Array(\n newBuffer.buffer,\n 0,\n newBuffer.byteLength / Float32Array.BYTES_PER_ELEMENT\n )\n )\n );\n } else if (initializer.floatData) {\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(\n dims,\n \"float32\",\n new Float32Array(initializer.floatData)\n )\n );\n }\n } else if (initializer.dataType === onnx.TensorProto.DataType.INT64) {\n // 1要素が8byte (int64)\n if (initializer.rawData?.byteLength) {\n const rawData = initializer.rawData!,\n view = new DataView(\n rawData.buffer,\n rawData.byteOffset,\n rawData.byteLength\n ),\n ab = new Int32Array(view.byteLength / 8);\n for (let idx = 0; idx < ab.length; idx++) {\n ab[idx] = clipLong(\n new Long(\n view.getUint32(idx * 8, true),\n view.getUint32(idx * 8 + 4, true)\n )\n );\n }\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(dims, \"int32\", ab)\n );\n } else if (initializer.int64Data) {\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(\n dims,\n \"int32\",\n new Int32Array(intOrLongToIntVector(initializer.int64Data))\n )\n );\n }\n } else if (initializer.dataType === onnx.TensorProto.DataType.INT32) {\n if (initializer.rawData?.byteLength) {\n // 1要素が4byte (int32)\n const rawData = initializer.rawData!,\n view = new DataView(\n rawData.buffer,\n rawData.byteOffset,\n rawData.byteLength\n ),\n ab = new Int32Array(view.byteLength / 4);\n for (let idx = 0; idx < ab.length; idx++) {\n ab[idx] = view.getInt32(idx * 4, true);\n }\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(dims, \"int32\", ab)\n );\n } else if (initializer.int32Data) {\n tensors.set(\n initializer.name!,\n this.backendContexts.cpu.emptyTensor(\n dims,\n \"int32\",\n new Int32Array(initializer.int32Data)\n )\n );\n }\n } else {\n throw new Error(\n `Unsupported initializer dataType ${initializer.dataType}`\n );\n }\n }\n return tensors;\n }\n\n private async loadExternalInitializerTensor(\n directory: string,\n progressCallback?: (loaded: number, total: number) => unknown\n ): Promise<Map<string, CPUTensor>> {\n let totalExpectedSize: number | null = null;\n for (const md of this.model!.metadataProps) {\n if (md.key === \"WebDNN2.WeightSizes\") {\n totalExpectedSize = 0;\n for (const sizeStr of md.value!.split(\":\")) {\n totalExpectedSize += Number(sizeStr);\n }\n }\n }\n for (const md of this.model!.metadataProps) {\n if (md.key === \"WebDNN2.WeightPaths\") {\n const paths = md.value!.split(\":\").map((bn) => directory + bn);\n const loader = this.getTensorLoader(paths);\n let cb: ((loadedBytes: number) => unknown) | undefined = undefined;\n if (totalExpectedSize && progressCallback) {\n const ex = totalExpectedSize;\n cb = (loadedBytes: number) => {\n progressCallback(loadedBytes, ex);\n };\n }\n return loader.loadAll(cb);\n }\n }\n if (progressCallback) {\n logger.warn(\n `progressCallback is currently supported when loading optimized model.`\n );\n }\n return new Map();\n }\n\n private getIOProxyShape(vi: onnx.IValueInfoProto) {\n const shape = nonnull(\n vi.type?.tensorType?.shape?.dim?.map((d) =>\n intOrLongToInt(nonnull(d.dimValue))\n )\n );\n let dataType: DataType;\n switch (vi.type?.tensorType?.elemType) {\n case onnx.TensorProto.DataType.FLOAT:\n dataType = \"float32\";\n break;\n case onnx.TensorProto.DataType.INT32:\n case onnx.TensorProto.DataType.INT64:\n dataType = \"int32\";\n break;\n default:\n throw new Error();\n }\n return { shape, dataType };\n }\n\n private initInputProxy() {\n this.inputs = this.inputsWithoutInitializer.map((input) => {\n const { shape, dataType } = this.getIOProxyShape(input);\n return new InputProxy(shape, dataType);\n });\n }\n\n private initOutputProxy() {\n const graph = nonnull(this.model?.graph);\n this.outputs = graph.output!.map((input) => {\n const { shape, dataType } = this.getIOProxyShape(input);\n return new OutputProxy(shape, dataType);\n });\n }\n\n getInputNames(): string[] {\n return this.inputsWithoutInitializer.map((gi) => gi.name!);\n }\n\n getOutputNames(): string[] {\n const graph = nonnull(this.model?.graph);\n return graph.output!.map((gi) => gi.name!);\n }\n\n async run(\n inputs?: CPUTensor[],\n options: { measurePerformance?: boolean } = {}\n ): Promise<CPUTensor[]> {\n if (!this.model || !this.loaded) {\n throw new Error(\"not initialized\");\n }\n const graph = nonnull(this.model.graph),\n tensorsForBackends = {\n cpu: new Map<string, CPUTensor>(),\n wasm: new Map<string, WasmTensor>(),\n webgl: new Map<string, WebGLTensor>(),\n webgpu: new Map<string, WebGPUTensor>(),\n };\n\n for (const [name, tensor] of this.initializerTensors.entries()) {\n tensorsForBackends.cpu.set(name, tensor);\n }\n for (const [backend, kv] of this.copiedInitializerTensors.entries()) {\n for (const [name, tensor] of kv.entries()) {\n tensorsForBackends[backend].set(name, tensor as any);\n }\n }\n\n if (!inputs) {\n // From inputProxy\n if (this.useCompatibilityProxy) {\n inputs = this.inputs.map((v) => {\n const t = this.backendContexts.cpu.emptyTensor(v.dims, v.dataType);\n t.data.set(v);\n return t;\n });\n } else {\n throw new Error();\n }\n }\n\n // 入力設定\n if (this.inputsWithoutInitializer.length !== inputs.length) {\n throw new Error(\"length of inputs mismatch\");\n }\n for (let i = 0; i < inputs.length; i++) {\n const graphInput = this.inputsWithoutInitializer[i];\n // if (graphInput.type!.tensorType!.elemType !== 1) {\n // throw new Error(\"graph input type must be float32\");\n // }\n tensorsForBackends.cpu.set(graphInput.name!, inputs[i]);\n }\n\n const tensorReleaseTiming = findTensorReleaseTiming(\n this.model!,\n new Set(this.initializerTensors.keys())\n ),\n nodePerformances: {\n opType: string;\n name: string;\n backend: Backend;\n inputDims: ReadonlyArray<number>[];\n outputDims: ReadonlyArray<number>[];\n elapsed: number;\n }[] = [];\n\n for (let i = 0; i < graph.node!.length; i++) {\n const nodeStartTime = Date.now(),\n node = graph.node![i],\n opType = nonnull(node.opType);\n let actualBackend: Backend,\n actualInputDims: ReadonlyArray<number>[],\n actualOutputDims: ReadonlyArray<number>[],\n backendOrderForNode =\n this.forceOperatorBackendOrder[node.name!] || this.backendOrder;\n let firstTry = true;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n try {\n // テンソルがどこにあるのか調べる\n const currentTensorsBackends: Backend[][] = [];\n for (let j = 0; j < node.input!.length; j++) {\n const inputName = node.input![j],\n bs: Backend[] = [];\n for (const backend of backendOrderForNode) {\n if (tensorsForBackends[backend].has(inputName)) {\n bs.push(backend);\n }\n }\n if (bs.length === 0) {\n // forceOperatorBackendOrder == [\"webgl\"]のような場合に、cpu上にあるTensorをスキャンする\n for (const backend of backends) {\n if (tensorsForBackends[backend].has(inputName)) {\n bs.push(backend);\n }\n }\n }\n currentTensorsBackends.push(bs);\n }\n const operator = instantiateOperator(\n opType,\n this.opset,\n backendOrderForNode,\n currentTensorsBackends\n );\n if (!operator) {\n throw new Error(\n `Operator implementation for ${opType}, opset=${this.opset} does not exist.`\n );\n }\n operator.initialize(nonnull(node.attribute));\n const tensorBackendRequirement = operator.getTensorBackendRequirement(\n node.input!.length,\n node.output!.length\n ),\n // 入力を集める\n operatorInputs: Tensor[] = [];\n for (let j = 0; j < node.input!.length; j++) {\n const inputName = node.input![j],\n reqBackend = tensorBackendRequirement[j];\n if (!reqBackend) {\n // どこでもいい\n const t =\n tensorsForBackends[currentTensorsBackends[j][0]].get(inputName);\n if (!t) {\n throw new Error();\n }\n operatorInputs.push(t);\n } else {\n const t = tensorsForBackends[reqBackend].get(inputName);\n if (t) {\n operatorInputs.push(t);\n } else {\n let found = false;\n for (const otherBackend of this.backendOrder) {\n const otherT =\n tensorsForBackends[otherBackend].get(inputName);\n if (otherT) {\n const tensorMoveOption =\n this.tensorMoveOptions[inputName] || {},\n movedT = await this.backendContexts[\n reqBackend\n ]!.moveTensor(otherT, tensorMoveOption);\n tensorsForBackends[reqBackend].set(\n inputName,\n movedT as any\n );\n operatorInputs.push(movedT);\n found = true;\n break;\n }\n }\n if (!found) {\n throw new Error(`Input ${inputName} not found`);\n }\n }\n }\n }\n\n let context: any = {};\n switch (operator.backend) {\n case \"wasm\":\n context = this.backendContexts.wasm;\n break;\n case \"webgpu\":\n context = this.backendContexts.webgpu;\n break;\n case \"webgl\":\n context = this.backendContexts.webgl;\n break;\n case \"cpu\":\n context = this.backendContexts.cpu;\n break;\n default:\n throw new Error();\n }\n logger.debug(\n `Running ${node.name!}(${opType}) on ${operator.backend}`\n );\n if (options.measurePerformance && operator.backend === \"webgl\") {\n this.backendContexts[\"webgl\"]?.enablePerformanceQuery(\n `${node.name}(${opType})`\n );\n }\n const operatorOutputs = await operator.run(\n context,\n operatorInputs,\n node.output!.length\n );\n actualInputDims = operatorInputs.map((t) => t.dims);\n actualOutputDims = operatorOutputs.map((t) => t.dims);\n for (let j = 0; j < node.output!.length; j++) {\n const outputName = node.output![j];\n tensorsForBackends[operatorOutputs[j].backend].set(\n outputName,\n operatorOutputs[j] as any\n );\n }\n actualBackend = operator.backend;\n break;\n } catch (error) {\n if (firstTry) {\n logger.warn(`Failed to run ${node.name}. Retrying on cpu.`, error);\n firstTry = false;\n backendOrderForNode = [\"cpu\"];\n continue;\n } else {\n throw error;\n }\n }\n }\n\n const tensorNamesToRelease = tensorReleaseTiming.get(node.name!) || [];\n for (const name of tensorNamesToRelease) {\n for (const backend of Object.keys(tensorsForBackends) as Backend[]) {\n const t = tensorsForBackends[backend].get(name);\n if (t) {\n t.dispose();\n tensorsForBackends[backend].delete(name);\n }\n }\n }\n const nodeEndTime = Date.now();\n nodePerformances.push({\n opType: node.opType!,\n name: node.name!,\n backend: actualBackend,\n inputDims: actualInputDims,\n outputDims: actualOutputDims,\n elapsed: nodeEndTime - nodeStartTime,\n });\n }\n\n const outputs = [];\n for (let j = 0; j < graph.output!.length; j++) {\n const outputInfo = graph.output![j];\n let outputTensor = tensorsForBackends.cpu.get(outputInfo.name!);\n if (!outputTensor) {\n for (const otherBackend of this.backendOrder) {\n const otherT = tensorsForBackends[otherBackend].get(outputInfo.name!);\n if (otherT) {\n const movedT = await this.backendContexts.cpu.moveTensor(\n otherT,\n {}\n );\n tensorsForBackends.cpu.set(outputInfo.name!, movedT as any);\n outputTensor = movedT;\n break;\n }\n }\n }\n if (!outputTensor) {\n throw new Error(`Output ${outputInfo.name} not found`);\n }\n\n if (this.useCompatibilityProxy) {\n // Copy value to output proxy\n this.outputs[j].set(outputTensor.data);\n }\n\n outputs.push(outputTensor);\n }\n\n for (const backend of backendsWithoutCPU) {\n for (const [name, t] of tensorsForBackends[backend].entries()) {\n if (this.initializerTensors.has(name)) {\n this.copiedInitializerTensors.get(backend)!.set(name, t);\n } else {\n t.dispose();\n }\n }\n }\n\n if (options.measurePerformance) {\n logger.debug(\"Performance\", nodePerformances);\n try {\n const webglPerformance =\n await this.backendContexts.webgl?.gatherPerformanceQueryResult();\n logger.debug(\"WebGL Performance\", webglPerformance);\n } catch {\n logger.warn(\"Failed to get WebGL Performance\");\n }\n }\n\n return outputs;\n }\n}\n" }, { "alpha_fraction": 0.6280360817909241, "alphanum_fraction": 0.6301179528236389, "avg_line_length": 35.025001525878906, "blob_id": "b97e19d0116cb888c4188a5a22e9bfc252420916", "content_id": "261bd1b9f1b8393603f19d8ca0fb2f7741b8a2b9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1441, "license_type": "permissive", "max_line_length": 122, "num_lines": 40, "path": "/scripts/make_operator_entries.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import glob\nimport os\nimport re\n\ndef make_operator_entries(base_dir, standard):\n os.chdir(base_dir)\n import_lines = []\n push_lines = []\n glob_path = \"./operators/standard/**/*.ts\" if standard else \"./operators/**/*.ts\"\n for ts_path in sorted(glob.glob(glob_path, recursive=True)):\n ts_relative_path = ts_path.replace(\"\\\\\", \"/\")[:-3]\n func_name = \"getOpEntries\" + re.sub(\"[^a-zA-Z0-9]\", \"\", ts_relative_path)\n import_lines.append(f\"import {{ getOpEntries as {func_name} }} from \\\"{ts_relative_path}\\\";\")\n push_lines.append(f\" entries.push(...{func_name}());\")\n entry_src = \"\"\"// auto-generated by scripts/make_operator_entries.py\nimport { OperatorEntry } from \"../../interface/core/operator\";\n\n\"\"\"+\"\\n\".join(import_lines)+\"\"\"\n\nexport function getOpEntries(): OperatorEntry[] {\n const entries: OperatorEntry[] = [];\n\"\"\"+\"\\n\".join(push_lines)+\"\"\"\n return entries;\n}\n\"\"\"\n with open(f\"{base_dir}/opEntries{'Standard' if standard else 'All'}.ts\", \"w\", newline=\"\\n\") as f:\n f.write(entry_src)\n\ndef make_operator_entries_all_backend():\n operators_root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + \"/src/descriptor_runner/operators/\"\n for standard in [False, True]:\n for backend in [\"cpu\", \"wasm\", \"webgl\", \"webgpu\"]:\n make_operator_entries(operators_root_dir + backend, standard)\n\n\ndef main():\n make_operator_entries_all_backend()\n\n\nmain()\n" }, { "alpha_fraction": 0.6770161390304565, "alphanum_fraction": 0.6850806474685669, "avg_line_length": 32.06666564941406, "blob_id": "fc93b3553441f0c7f927b5ddf65349dd2ba02721", "content_id": "9ee938e66182f7edcb0343f7c2272cf7aa7b9960", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2480, "license_type": "permissive", "max_line_length": 80, "num_lines": 75, "path": "/src/descriptor_runner/backend/webgpu/webgpuTensorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes, DataType } from \"../../interface/core/constants\";\nimport { TensorImpl } from \"../../core/tensorImpl\";\nimport { WebDNNWebGPUContextImpl } from \"./webgpuContextImpl\";\nimport { WebGPUTensor } from \"../../interface/backend/webgpu/webgpuTensor\";\n\nexport class WebGPUTensorImpl extends TensorImpl implements WebGPUTensor {\n buffer: GPUBuffer;\n\n private mappedForWriteFromCPU: boolean;\n\n bufferSize: number; // Unit: byte\n\n constructor(\n private context: WebDNNWebGPUContextImpl,\n dims: ReadonlyArray<number>,\n dataType: DataType = \"float32\",\n public readonly forWriteFromCPU: boolean = false,\n public readonly forReadToCPU: boolean = true\n ) {\n super(dims, dataType, \"webgpu\");\n if (dataType !== \"float32\") {\n throw new Error(\"WebGLTensor only supports float32\");\n }\n if (forWriteFromCPU && forReadToCPU) {\n throw new Error(\"WebGPUTensor cannot be both for read and write\");\n }\n\n this.bufferSize = Math.max(this.length * Float32Array.BYTES_PER_ELEMENT, 4);\n let usage = GPUBufferUsage.STORAGE;\n if (forReadToCPU) {\n usage |= GPUBufferUsage.COPY_SRC;\n }\n this.buffer = this.context.device.createBuffer({\n mappedAtCreation: forWriteFromCPU,\n size: this.bufferSize,\n usage,\n });\n this.mappedForWriteFromCPU = forWriteFromCPU;\n }\n\n async getData(): Promise<DataArrayTypes> {\n const data: Float32Array = new Float32Array(this.length),\n dst = this.context.device.createBuffer({\n size: this.bufferSize,\n usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,\n }),\n commandEncoder = this.context.device.createCommandEncoder();\n commandEncoder.copyBufferToBuffer(this.buffer, 0, dst, 0, this.bufferSize);\n this.context.device.queue.submit([commandEncoder.finish()]);\n await dst.mapAsync(GPUMapMode.READ);\n const arrayBuffer = dst.getMappedRange(),\n buffer_mapped_array = new Float32Array(arrayBuffer, 0, this.length);\n data.set(buffer_mapped_array);\n dst.unmap();\n dst.destroy();\n return data;\n }\n\n async setData(data: DataArrayTypes): Promise<void> {\n if (!this.mappedForWriteFromCPU) {\n throw new Error(\"The buffer is not mapped\");\n }\n const ab = this.buffer.getMappedRange(),\n mappedArray = new Float32Array(ab);\n mappedArray.set(data);\n this.buffer.unmap();\n this.mappedForWriteFromCPU = false;\n }\n\n dispose(): void {\n if (this.buffer) {\n this.buffer.destroy();\n }\n }\n}\n" }, { "alpha_fraction": 0.5664194226264954, "alphanum_fraction": 0.582602858543396, "avg_line_length": 24.568965911865234, "blob_id": "de32a2d4377a5c3a140a075fa2f4c641b7b3f97c", "content_id": "64c1dc5e4755d579d11607daede95255ce8b81c2", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1511, "license_type": "permissive", "max_line_length": 86, "num_lines": 58, "path": "/src/descriptor_runner/operators/webgl/operators/standard/globalaveragepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNWebGLContext } from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { averagepool } from \"../../rawcomputation/averagepool\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 11\nexport class WebGLGlobalAveragePool extends OperatorImpl {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"MaxPool other than 2D is not yet supported\");\n }\n if (inputX.dimPerPixel !== 1) {\n throw new Error();\n }\n\n const batch = inputX.dims[0],\n ch = inputX.dims[1],\n inShape = [inputX.dims[2], inputX.dims[3]],\n outShape = [1, 1],\n output = context.emptyTensor(\n [batch, ch, outShape[0], outShape[1]],\n \"float32\"\n );\n await averagepool(\n context,\n inputX,\n output,\n true, // わずかに計算量が減る\n batch,\n inShape,\n [0, 0, 0, 0],\n [1, 1],\n inShape,\n outShape,\n ch\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"GlobalAveragePool\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLGlobalAveragePool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6435202360153198, "alphanum_fraction": 0.6442629098892212, "avg_line_length": 27.19895362854004, "blob_id": "cc378cefacf9be991007b74e4b050dc22625133e", "content_id": "944ec1cf62662f2f3060ceb09dc6ba2b813cc65e", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5386, "license_type": "permissive", "max_line_length": 87, "num_lines": 191, "path": "/src/descriptor_runner/backend/webgpu/webgpuContextImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../interface/backend/cpu/cpuContext\";\nimport {\n WebDNNWebGPUContext,\n WebDNNWebGPUContextOption,\n WebGPURunnerRequest,\n} from \"../../interface/backend/webgpu/webgpuContext\";\nimport { WebGPUTensor } from \"../../interface/backend/webgpu/webgpuTensor\";\nimport { DataType } from \"../../interface/core/constants\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { WebGPUMetaBuffer } from \"./webgpuMetaBuffer\";\nimport { WebGPUTensorImpl } from \"./webgpuTensorImpl\";\n\ninterface WebGPURunnerPipeline {\n bindGroupLayout: GPUBindGroupLayout;\n pipeline: GPUComputePipeline;\n}\n\nexport class WebDNNWebGPUContextImpl implements WebDNNWebGPUContext {\n backend = \"webgpu\" as const;\n\n initialized: boolean;\n\n isSupported: boolean;\n\n device!: GPUDevice;\n\n private pipelines: Map<string, WebGPURunnerPipeline>;\n\n pooledMetaBuffer: WebGPUMetaBuffer[] = [];\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n constructor(\n public cpuContext: WebDNNCPUContext,\n option: WebDNNWebGPUContextOption\n ) {\n if (\n typeof navigator.gpu !== \"object\" ||\n typeof navigator.gpu.requestAdapter !== \"function\"\n ) {\n throw new Error(\"WebGPU is not supported on this browser\");\n }\n this.initialized = false;\n this.isSupported = false;\n this.pipelines = new Map();\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) {\n return;\n }\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n const adapter = await navigator.gpu!.requestAdapter();\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n this.device = (await adapter!.requestDevice()) as GPUDevice;\n if (!this.device) {\n throw new Error(\"GPUAdapter.requestDevice() returned null\");\n }\n this.isSupported = true;\n this.initialized = true;\n }\n\n isWebGLTensor(tensor: Tensor): tensor is WebGPUTensor {\n return tensor.backend === this.backend;\n }\n\n assertsWebGPUTensor(tensor: Tensor): asserts tensor is WebGPUTensor {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n\n assertsWebGPUTensorArray(\n tensors: Tensor[]\n ): asserts tensors is WebGPUTensor[] {\n for (const tensor of tensors) {\n if (tensor.backend !== this.backend) {\n throw new Error(\n `Tensor backend ${this.backend} is expected, but ${tensor.backend} is given.`\n );\n }\n }\n }\n\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n forWriteFromCPU?: boolean,\n forReadToCPU?: boolean\n ): WebGPUTensor {\n return new WebGPUTensorImpl(\n this,\n dims,\n dataType,\n forWriteFromCPU,\n forReadToCPU\n );\n }\n\n async moveTensor(tensor: Tensor): Promise<WebGPUTensor> {\n const dst = new WebGPUTensorImpl(\n this,\n tensor.dims,\n tensor.dataType,\n true,\n false\n );\n await dst.setData(await tensor.getData());\n return dst;\n }\n\n hasPipeline(name: string): boolean {\n return this.pipelines.has(name);\n }\n\n createPipeline(name: string, shader: Uint32Array, nBuffers: number): void {\n if (this.hasPipeline(name)) {\n return;\n }\n const { device } = this,\n bindings: GPUBindGroupLayoutEntry[] = [];\n for (let i = 0; i < nBuffers; i++) {\n bindings.push({\n binding: i,\n visibility: GPUShaderStage.COMPUTE,\n buffer: { type: \"storage\" },\n });\n }\n const bindGroupLayout = device.createBindGroupLayout({\n entries: bindings,\n }),\n pipelineLayout = device.createPipelineLayout({\n bindGroupLayouts: [bindGroupLayout],\n }),\n shaderModule = device.createShaderModule({ code: shader }),\n pipeline = device.createComputePipeline({\n layout: pipelineLayout,\n computeStage: {\n module: shaderModule,\n entryPoint: \"main\",\n },\n });\n\n this.pipelines.set(name, { bindGroupLayout, pipeline });\n }\n\n async run(request: WebGPURunnerRequest): Promise<void> {\n const pipeline = this.pipelines.get(request.pipelineName);\n if (!pipeline) {\n throw new Error(`Pipeline ${pipeline} not found`);\n }\n const { device } = this,\n entries: GPUBindGroupEntry[] = request.tensors.map((t, i) => ({\n binding: i,\n resource: {\n buffer: (t as WebGPUTensorImpl).buffer,\n size: (t as WebGPUTensorImpl).bufferSize,\n },\n }));\n let meta: WebGPUMetaBuffer | null = null;\n if (request.meta) {\n meta = await WebGPUMetaBuffer.createBuffer(this, request.meta);\n entries.push({\n binding: entries.length,\n resource: {\n buffer: meta.tensor.buffer,\n size: meta.tensor.bufferSize,\n },\n });\n }\n const bindGroup = device.createBindGroup({\n layout: pipeline.bindGroupLayout,\n entries,\n }),\n commandEncoder = device.createCommandEncoder(),\n passEncoder = commandEncoder.beginComputePass();\n passEncoder.setBindGroup(0, bindGroup);\n passEncoder.setPipeline(pipeline.pipeline);\n passEncoder.dispatch(\n request.workGroups.x,\n request.workGroups.y,\n request.workGroups.z\n );\n passEncoder.endPass();\n\n device.queue.submit([commandEncoder.finish()]);\n\n meta?.pushToPool();\n }\n}\n" }, { "alpha_fraction": 0.7452307939529419, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 19.56962013244629, "blob_id": "b8f2dafde97fac9e25246eb517e91de6a82d8569", "content_id": "5e92fe0bcb22a3551628a08f9e93ceebdb6fe5c4", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2630, "license_type": "permissive", "max_line_length": 142, "num_lines": 79, "path": "/README.ja.md", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "# WebDNN\n\n[English](README.md)\n\nWebDNN version 2のα版です。WebDNN 1.xとの大きな違いは、入力としてONNX形式のモデルのみを受け付ける点です。Pythonによる前処理なしで、ONNXモデルを直接Webブラウザで読み込むことが可能です。さらに、オフラインでのモデル最適化を行うことも可能です。\n\n[Version 1.x](https://github.com/mil-tokyo/webdnn/tree/v1.2.11)\n\n# 対応バックエンド(高速化技術)\n\nモダンブラウザのほとんどで、WebGLが使用可能。\n\n- WebGPU\n - Chrome Canary搭載版。\n - iOS13に搭載のWebGPUは廃止予定のWSL言語によるシェーダを必要とするため非対応。\n- WebGL\n - WebGL2が使用可能な場合は使用する。WebGL1のみ対応のSafariにも対応。\n- WebAssembly\n\n# 開発環境セットアップ\n\nnode.js 14, python 3.6以降, emscripten 2.0以降が動作する環境が必要です。\n\n```\nyarn\npython setup.py develop\n```\n\n# ビルド\n```\nyarn build:all\n```\n\nビルド成果物\n- `dist/webdnn.js`\n - 最適化されていないONNXモデルを読み込むことができるライブラリ\n- `dist/webdnn-core.js`\n - WebDNNにより最適化されたモデルを読み込むことができるライブラリ\n\n# 基本的な使い方\n\n`dist/webdnn.js`を`<script>`タグでロードすることで、グローバルに`WebDNN`オブジェクトが追加される。ONNXモデル`model_directory/model.onnx`が存在すると仮定し、形状`[1, 2]`のテンソルを入力として実行する。\n\n```javascript\nconst runner = await WebDNN.load(\"model_directory/\");\nconst inputDataArray = new Float32Array([5.1, -2.3]);\nconst inputTensor = new WebDNN.CPUTensor([1, 2], \"float32\", inputDataArray);\nconst [outputTensor] = await runner.run([inputTensor]);\n\nconsole.log(outputTensor.data); // Float32Array\n```\n\n動作する最小限の完全なコードは`example/minimum`を参照。\n\n# テスト\n\nテスト対象のONNXモデルおよび、入出力テンソルの生成\n\n```\npython test/model_test/make_models.py\n```\n\nWebブラウザ上での実行\n\n```\nyarn server\n```\n\nWebブラウザで<http://localhost:8080/test/model_test/runner/standard.html>を開き、テストしたいバックエンドにチェックを入れ、Testボタンをクリックすることでテストが実行される。\n\nモデルの最適化を含めたテストを行う場合は、\n\n```\npython test/model_test/make_models.py --optimize\n```\n\n<http://localhost:8080/test/model_test/runner/optimized.html>\n\nを使用する。ただし、`make_models.py`の実行時間が長くかかる。\n" }, { "alpha_fraction": 0.622266411781311, "alphanum_fraction": 0.636845588684082, "avg_line_length": 30.768421173095703, "blob_id": "bd1b8d639a17a449dc26fe3c7a97ffa86ca39051", "content_id": "4cdb004c1d8c69034018a78dcae3e0661c530efc", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3062, "license_type": "permissive", "max_line_length": 97, "num_lines": 95, "path": "/example/benchmark/make_mobilenetv2.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import argparse\nimport json\nimport os\nimport shutil\nimport subprocess\nimport sys\nimport numpy as np\nimport torch\nimport torch.onnx\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom webdnn.tensor_export import serialize_tensors\n# do not import torchvision\n\n\nsys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), \"models\"))\n\ntorch.manual_seed(0)\n\nOUTPUT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"runner\", \"external_model\")\n\ndef dump_expected(directory, arrays_dict):\n serialize_tensors(directory + \"/expected.bin\", arrays_dict)\n\ndef scalar(value):\n return torch.Tensor([value]).squeeze()\n\n\ndef rand_scalar():\n return torch.rand(1).squeeze()\n\n\ndef randn_scalar():\n return torch.randn(1).squeeze()\n\ndef dump(name, model, input_shapes):\n output_dir = f\"{OUTPUT_DIR}/{name}\"\n os.makedirs(output_dir, exist_ok=True)\n inputs = []\n model.eval()\n for shape in input_shapes:\n if isinstance(shape, torch.Tensor):\n # 特定の入力をしたい場合はtorch.Tensor自体を与える\n inputs.append(shape)\n else:\n if len(shape) == 0:\n inputs.append(randn_scalar()) # スカラー\n else:\n inputs.append(torch.randn(*shape))\n input_names = [f\"input_{i}\" for i in range(len(inputs))]\n onnx_path = f\"{output_dir}/model.onnx\"\n with torch.no_grad():\n example_output = model(*inputs)\n if isinstance(example_output, tuple):\n output_names = [f\"output_{i}\" for i in range(len(example_output))]\n else:\n output_names = [\"output_0\"]\n torch.onnx.export(model, tuple(inputs), onnx_path,\n verbose=True,\n input_names=input_names,\n output_names=output_names, opset_version=10)\n dumps = {}\n for tensor, name in zip(inputs, input_names):\n dumps[name] = tensor.numpy()\n if isinstance(example_output, tuple):\n for i, eo in enumerate(example_output):\n dumps[f\"output_{i}\"] = eo.numpy()\n else:\n dumps[\"output_0\"] = example_output.numpy()\n dump_expected(output_dir, dumps)\n\ndef make_input_data():\n from torchvision import transforms\n from PIL import Image\n import skimage\n\n preprocess = transforms.Compose([\n transforms.Resize(256),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n ])\n input_tensor = preprocess(Image.fromarray(skimage.data.coffee()))\n input_batch = input_tensor.unsqueeze(0)\n return input_batch\n\ndef main():\n # torch.hub.load imports special torchvision from temporary directory.\n # It is impossible to load other model which uses standard torchvision in the same process.\n model = torch.hub.load('pytorch/vision:v0.9.0', 'mobilenet_v2', pretrained=True)\n preprocessed_image = make_input_data()\n dump(\"mobilenet_v2\", model, [preprocessed_image])\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6873188614845276, "alphanum_fraction": 0.7003623247146606, "avg_line_length": 56.5, "blob_id": "8000929199c16086a5d549f6754fd373f8adab3a", "content_id": "23cf166eed9aa2df3d79bff0a108c552c1ed8b5c", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2760, "license_type": "permissive", "max_line_length": 187, "num_lines": 48, "path": "/src/graph_transpiler/webdnn/optimize_model.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import argparse\nimport json\nimport os\nfrom os.path import abspath\nimport subprocess\nimport onnx\nfrom webdnn.passes import run_passes\nfrom webdnn.tensor_export import export_initializers\n\nALL_BACKENDS = [\"webgl2-16384\", \"webgl2-4096\", \"webgl1-16384\", \"webgl1-4096\", \"wasm\", \"cpu\"] # \"webgpu\" is not yet supported\nSUBPROCESS_SHELL = os.name=='nt'\nROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"onnx_model\")\n parser.add_argument(\"dst_dir\")\n parser.add_argument(\"--compression\", type=int, default=0, help=\"compression algorithm number (0=no compression)\")\n parser.add_argument(\"--backends\", default=\",\".join(ALL_BACKENDS))\n args = parser.parse_args()\n backends = args.backends.split(\",\")\n os.makedirs(args.dst_dir, exist_ok=True)\n for backend in backends:\n src_model = onnx.load_model(args.onnx_model)\n optimization_result = run_passes(src_model, backend)\n # print(optimization_result)\n optimization_result.write_code(ROOT_DIR)\n optimized_model = src_model\n tensor_pathes = export_initializers(os.path.join(args.dst_dir, f\"weight-{backend}-{{}}.bin\"), optimized_model, optimization_result.initializers, 4 * 1024 * 1024, args.compression)\n weight_paths = \":\".join([os.path.basename(tensor_path) for tensor_path in tensor_pathes])\n optimized_model.metadata_props.append(onnx.StringStringEntryProto(key=\"WebDNN2.WeightPaths\", value=weight_paths))\n weight_sizes = \":\".join([str(os.stat(tensor_path).st_size) for tensor_path in tensor_pathes])\n optimized_model.metadata_props.append(onnx.StringStringEntryProto(key=\"WebDNN2.WeightSizes\", value=weight_sizes))\n optimized_model.metadata_props.append(onnx.StringStringEntryProto(key=\"WebDNN2.TensorMoveOptions\", value=json.dumps(optimization_result.tensor_move_options)))\n onnx.save_model(optimized_model, os.path.join(args.dst_dir, f\"model-{backend}.onnx\"))\n if backend == \"wasm\":\n subprocess.check_call([\"yarn\", \"shader:wasm\"], shell=SUBPROCESS_SHELL, cwd=ROOT_DIR)\n if backend == \"webgpu\":\n subprocess.check_call([\"yarn\", \"shader:webgpu\"], shell=SUBPROCESS_SHELL, cwd=ROOT_DIR)\n subprocess.check_call([\"yarn\", \"makeShaderList\"], shell=SUBPROCESS_SHELL, cwd=ROOT_DIR)\n subprocess.check_call([\"yarn\", f\"build:{backend}\", \"-o\", os.path.abspath(args.dst_dir)], shell=SUBPROCESS_SHELL, cwd=ROOT_DIR)\n optimization_result.remove_code(ROOT_DIR)\n # reset shader list file (remove autogen entry)\n subprocess.check_call([\"yarn\", \"makeShaderList\"], shell=SUBPROCESS_SHELL, cwd=ROOT_DIR)\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.6261950135231018, "alphanum_fraction": 0.6357552409172058, "avg_line_length": 31.6875, "blob_id": "23a31cf8c40cc3a18f544676505ef996d9a4f349", "content_id": "bb50091ff6908d56d17b58ef43e6c9eb3eb5003a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1046, "license_type": "permissive", "max_line_length": 75, "num_lines": 32, "path": "/setup.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import sys\n\nif sys.version_info < (3, 6):\n sys.stderr.write(\"Sorry, this library only works with python >= 3.6\\n\")\n sys.exit(1)\n\nimport json\nfrom setuptools import setup, find_packages\n\nwith open(\"./package.json\") as f:\n package_info = json.load(f)\n\nsetup(\n name=\"webdnn\",\n version=package_info[\"version\"],\n python_requires=\">=3.6\",\n package_dir={\"\": \"src/graph_transpiler\"},\n packages=find_packages(\"src/graph_transpiler\"),\n package_data={\"\": [\"*.js\"]}, install_requires=['numpy'],\n url=\"https://github.com/mil-tokyo/webdnn\",\n description=package_info[\"description\"],\n author=package_info[\"author\"][\"name\"],\n author_email=package_info[\"author\"][\"email\"],\n keywords=\" \".join(package_info[\"keywords\"]),\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Intended Audience :: Science/Research\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.6\"\n ]\n)\n" }, { "alpha_fraction": 0.4825434386730194, "alphanum_fraction": 0.5029516220092773, "avg_line_length": 26.705608367919922, "blob_id": "8c578cfcda4239228bfdc5ac6b20bec674cb12bb", "content_id": "47ac5fc9d0d6f3e20ffeb8040f1e607d1946206d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5967, "license_type": "permissive", "max_line_length": 98, "num_lines": 214, "path": "/src/descriptor_runner/operators/cpu/operators/standard/eachelementwise.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes, DataType } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { broadcastMulti } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass EachElementwise extends OperatorImpl {\n constructor(\n private opType: string,\n private op: (values: number[]) => number,\n private allowDataTypes: DataType[]\n ) {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n for (const inputX of inputs) {\n if (inputX.dataType !== inputs[0].dataType) {\n throw new Error(\n `${this.opType}: input dataTypes mismatch: ${inputX.dataType} !== ${inputs[0].dataType}`\n );\n }\n }\n if (!this.allowDataTypes.includes(inputs[0].dataType)) {\n throw new Error(\n `${this.opType}: input dataType ${inputs[0].dataType} is not supported`\n );\n }\n // TODO: broadcast不要の場合に特化したパフォーマンス向上\n\n const { dims: outShape, allStrides: inAllStrides } = broadcastMulti(\n inputs.map((input) => input.dims)\n ),\n output = context.emptyTensor(outShape, inputs[0].dataType),\n { op } = this;\n const inputDataList = inputs.map((input) => input.data);\n switch (outShape.length) {\n case 0:\n this.op0d(inputDataList, output.data, op, outShape, inAllStrides);\n break;\n case 1:\n this.op1d(inputDataList, output.data, op, outShape, inAllStrides);\n break;\n case 2:\n this.op2d(inputDataList, output.data, op, outShape, inAllStrides);\n break;\n case 3:\n this.op3d(inputDataList, output.data, op, outShape, inAllStrides);\n break;\n case 4:\n this.op4d(inputDataList, output.data, op, outShape, inAllStrides);\n break;\n default:\n throw new Error(`Binary: input ndim > 4 is not yet supported`);\n }\n return [output];\n }\n\n private op0d(\n dIs: DataArrayTypes[],\n dO: DataArrayTypes,\n op: (values: number[]) => number,\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n outShape: number[],\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n inAllStrides: number[][]\n ) {\n dO[0] = op(dIs.map((dI) => dI[0]));\n }\n\n private op1d(\n dIs: DataArrayTypes[],\n dO: DataArrayTypes,\n op: (values: number[]) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n const values: number[] = [];\n for (let i = 0; i < dIs.length; i++) {\n values.push(dIs[i][a0 * inAllStrides[i][0]]);\n }\n dO[idx++] = op(values);\n }\n }\n\n private op2d(\n dIs: DataArrayTypes[],\n dO: DataArrayTypes,\n op: (values: number[]) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n const values: number[] = [];\n for (let i = 0; i < dIs.length; i++) {\n values.push(\n dIs[i][a0 * inAllStrides[i][0] + a1 * inAllStrides[i][1]]\n );\n }\n dO[idx++] = op(values);\n }\n }\n }\n\n private op3d(\n dIs: DataArrayTypes[],\n dO: DataArrayTypes,\n op: (values: number[]) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n const values: number[] = [];\n for (let i = 0; i < dIs.length; i++) {\n values.push(\n dIs[i][\n a0 * inAllStrides[i][0] +\n a1 * inAllStrides[i][1] +\n a2 * inAllStrides[i][2]\n ]\n );\n }\n dO[idx++] = op(values);\n }\n }\n }\n }\n\n private op4d(\n dIs: DataArrayTypes[],\n dO: DataArrayTypes,\n op: (values: number[]) => number,\n outShape: number[],\n inAllStrides: number[][]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n const values: number[] = [];\n for (let i = 0; i < dIs.length; i++) {\n values.push(\n dIs[i][\n a0 * inAllStrides[i][0] +\n a1 * inAllStrides[i][1] +\n a2 * inAllStrides[i][2] +\n a3 * inAllStrides[i][3]\n ]\n );\n }\n dO[idx++] = op(values);\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Max\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new EachElementwise(\"Max\", (values) => Math.max(...values), [\n \"float32\",\n \"int32\",\n ]),\n },\n {\n opType: \"Mean\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new EachElementwise(\n \"Mean\",\n (values) => values.reduce((s, v) => s + v, 0) / values.length,\n [\"float32\"]\n ),\n },\n {\n opType: \"Min\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new EachElementwise(\"Min\", (values) => Math.min(...values), [\n \"float32\",\n \"int32\",\n ]),\n },\n {\n opType: \"Sum\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new EachElementwise(\n \"Sum\",\n (values) => values.reduce((s, v) => s + v, 0),\n [\"float32\", \"int32\"]\n ),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5167815685272217, "alphanum_fraction": 0.5371591448783875, "avg_line_length": 23.53676414489746, "blob_id": "c8986f5c38e9882632d41d61a500c431013a8013", "content_id": "0196fe5d875503d4d30ee236b05e8b7645d2cc0a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 6682, "license_type": "permissive", "max_line_length": 89, "num_lines": 272, "path": "/src/descriptor_runner/operators/webgpu/operators/standard/conv.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWebGPUContext } from \"../../../../interface/backend/webgpu/webgpuContext\";\nimport { WebGPUTensor } from \"../../../../interface/backend/webgpu/webgpuTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Conv } from \"../../../base/conv\";\nimport { webgpuShaders } from \"../../shaders\";\n\nclass WebGPUConv extends Conv {\n constructor() {\n super(\"webgpu\");\n }\n\n async run(context: WebDNNWebGPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGPUTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"Conv other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims),\n im2colData = context.emptyTensor([\n group *\n batch *\n outShape[0] *\n outShape[1] *\n chInPerGroup *\n kernelShape[0] *\n kernelShape[1],\n ]);\n await this.im2col(\n context,\n inputX,\n im2colData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup\n );\n const matmulData = context.emptyTensor([\n group * batch * outShape[0] * outShape[1] * chOutPerGroup,\n ]);\n await this.matmul(\n context,\n im2colData,\n inputW,\n matmulData,\n group,\n batch * outShape[0] * outShape[1],\n chInPerGroup * kernelShape[0] * kernelShape[1],\n chOutPerGroup\n );\n im2colData.dispose();\n const output = context.emptyTensor([\n batch,\n chOut,\n outShape[0],\n outShape[1],\n ]);\n if (inputB) {\n const transposeData = context.emptyTensor([\n batch * chOut * outShape[0] * outShape[1],\n ]);\n\n await this.transpose(\n context,\n matmulData,\n transposeData,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n matmulData.dispose();\n await this.bias(\n context,\n transposeData,\n inputB,\n output,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n transposeData.dispose();\n } else {\n await this.transpose(\n context,\n matmulData,\n output,\n group,\n batch,\n outShape[0] * outShape[1],\n chOutPerGroup\n );\n matmulData.dispose();\n }\n return [output];\n }\n\n private async im2col(\n context: WebDNNWebGPUContext,\n dX: WebGPUTensor,\n dI: WebGPUTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chIn: number,\n chInPerGroup: number,\n chOut: number,\n chOutPerGroup: number\n ) {\n const shaderName = \"conv_im2col\";\n\n if (!context.hasPipeline(shaderName)) {\n context.createPipeline(shaderName, webgpuShaders[shaderName], 3);\n }\n\n await context.run({\n pipelineName: shaderName,\n tensors: [dX, dI],\n meta: {\n elements: [\n { value: group, type: \"int32\" },\n { value: batch, type: \"int32\" },\n { value: outShape[0], type: \"int32\" },\n { value: outShape[1], type: \"int32\" },\n { value: chInPerGroup, type: \"int32\" },\n { value: kernelShape[0], type: \"int32\" },\n { value: kernelShape[1], type: \"int32\" },\n { value: strides[0], type: \"int32\" },\n { value: strides[1], type: \"int32\" },\n { value: pads[0], type: \"int32\" },\n { value: pads[1], type: \"int32\" },\n { value: dilations[0], type: \"int32\" },\n { value: dilations[1], type: \"int32\" },\n { value: inShape[0], type: \"int32\" },\n { value: inShape[1], type: \"int32\" },\n { value: chIn, type: \"int32\" },\n ],\n },\n workGroups: { x: 4096 / 64, y: 1, z: 1 },\n });\n }\n\n private async matmul(\n context: WebDNNWebGPUContext,\n dI: WebGPUTensor,\n dW: WebGPUTensor,\n dT: WebGPUTensor,\n group: number,\n bout: number,\n cinkhkw: number,\n chOutPerGroup: number\n ) {\n const shaderName = \"conv_matmul\";\n\n if (!context.hasPipeline(shaderName)) {\n context.createPipeline(shaderName, webgpuShaders[shaderName], 4);\n }\n\n await context.run({\n pipelineName: shaderName,\n tensors: [dI, dW, dT],\n meta: {\n elements: [\n { value: group, type: \"int32\" },\n { value: bout, type: \"int32\" },\n { value: chOutPerGroup, type: \"int32\" },\n { value: cinkhkw, type: \"int32\" },\n ],\n },\n workGroups: { x: 4096 / 64, y: 1, z: 1 },\n });\n }\n\n private async transpose(\n context: WebDNNWebGPUContext,\n dT: WebGPUTensor,\n dO: WebGPUTensor,\n group: number,\n batch: number,\n outarea: number,\n chOutPerGroup: number\n ) {\n const shaderName = \"conv_transpose\";\n\n if (!context.hasPipeline(shaderName)) {\n context.createPipeline(shaderName, webgpuShaders[shaderName], 3);\n }\n\n await context.run({\n pipelineName: shaderName,\n tensors: [dT, dO],\n meta: {\n elements: [\n { value: group, type: \"int32\" },\n { value: batch, type: \"int32\" },\n { value: outarea, type: \"int32\" },\n { value: chOutPerGroup, type: \"int32\" },\n ],\n },\n workGroups: { x: 4096 / 64, y: 1, z: 1 },\n });\n }\n\n private async bias(\n context: WebDNNWebGPUContext,\n dI: WebGPUTensor,\n dB: WebGPUTensor,\n dO: WebGPUTensor,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n const shaderName = \"conv_bias\";\n\n if (!context.hasPipeline(shaderName)) {\n context.createPipeline(shaderName, webgpuShaders[shaderName], 4);\n }\n\n await context.run({\n pipelineName: shaderName,\n tensors: [dI, dB, dO],\n meta: {\n elements: [\n { value: batch, type: \"int32\" },\n { value: chOut, type: \"int32\" },\n { value: outarea, type: \"int32\" },\n ],\n },\n workGroups: { x: 4096 / 64, y: 1, z: 1 },\n });\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Conv\",\n backend: \"webgpu\",\n opsetMin: 1,\n factory: () => new WebGPUConv(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5754082798957825, "alphanum_fraction": 0.6037464141845703, "avg_line_length": 21.630434036254883, "blob_id": "59b3521bada21656dac81f55cf07711b947fd830", "content_id": "2517bd01da598def51c7ce06158f69535664a780", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2118, "license_type": "permissive", "max_line_length": 76, "num_lines": 92, "path": "/src/descriptor_runner/util.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import Long from \"long\";\n\nexport function nonnull<T>(value: T | null | undefined): T {\n if (value != null) {\n return value;\n }\n throw new Error(\"value is null\");\n}\n\nexport function arange(stop: number): number[];\nexport function arange(start: number, stop: number): number[];\nexport function arange(start: number, stop: number, step: number): number[];\nexport function arange(start: number, stop?: number, step = 1): number[] {\n if (stop == null) {\n const len = start;\n const array = new Array(len);\n for (let i = 0; i < len; i++) {\n array[i] = i;\n }\n return array;\n } else {\n const array: number[] = [];\n if (step > 0) {\n for (let i = start; i < stop; i += step) {\n array.push(i);\n }\n } else {\n for (let i = start; i > stop; i += step) {\n array.push(i);\n }\n }\n return array;\n }\n}\n\nexport function arraySum(vec: ArrayLike<number>): number {\n let x = 0;\n for (let i = 0; i < vec.length; i++) {\n x += vec[i];\n }\n return x;\n}\n\nexport function arrayProd(vec: ArrayLike<number>): number {\n let x = 1;\n for (let i = 0; i < vec.length; i++) {\n x *= vec[i];\n }\n return x;\n}\n\nexport function arrayEqual(\n vec1: ArrayLike<number>,\n vec2: ArrayLike<number>\n): boolean {\n if (vec1.length !== vec2.length) {\n return false;\n }\n\n for (let i = 0; i < vec1.length; i++) {\n if (vec1[i] !== vec2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\nconst longPositive32BitMax = new Long(0x7fffffff, 0),\n longPositive32BitMin = new Long(0x80000000, 0xffffffff);\n\n// 符号付きLongを丸めて、-2^31から2^31-1の範囲のnumberを返す\nexport function clipLong(v: Long): number {\n // Long(0xfffffff6, 0xffffffff) => -10\n if (v.lessThan(longPositive32BitMin)) {\n return -0x80000000;\n } else if (v.greaterThan(longPositive32BitMax)) {\n return 0x7fffffff;\n }\n return v.toNumber();\n}\n\nexport function intOrLongToInt(v: number | Long): number {\n if (v instanceof Long) {\n return clipLong(v);\n }\n return v;\n}\n\nexport function intOrLongToIntVector(v: (number | Long)[]): number[] {\n return v.map(intOrLongToInt);\n}\n" }, { "alpha_fraction": 0.6940298676490784, "alphanum_fraction": 0.6940298676490784, "avg_line_length": 25.799999237060547, "blob_id": "7070a453f0a39fc4d98ee4b718bc6a8f624f6b6f", "content_id": "021215066159dcf74f52a4fed9212b96a340b740", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 134, "license_type": "permissive", "max_line_length": 53, "num_lines": 5, "path": "/src/descriptor_runner/interface/core/runner.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { CPUTensor } from \"../backend/cpu/cpuTensor\";\n\nexport interface Runner {\n run(inputs?: CPUTensor[]): Promise<CPUTensor[]>;\n}\n" }, { "alpha_fraction": 0.691919207572937, "alphanum_fraction": 0.691919207572937, "avg_line_length": 27.285715103149414, "blob_id": "6ee26169d00feae280bef03840cb2d2303a13cb1", "content_id": "3a0e5dfd085b0161f166b7251c6fd5345bae9e97", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 198, "license_type": "permissive", "max_line_length": 55, "num_lines": 7, "path": "/src/descriptor_runner/interface/core/tensorLoader.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { CPUTensor } from \"../backend/cpu/cpuTensor\";\n\nexport interface TensorLoader {\n loadAll: (\n progressCallback?: (loadedBytes: number) => unknown\n ) => Promise<Map<string, CPUTensor>>;\n}\n" }, { "alpha_fraction": 0.7051070928573608, "alphanum_fraction": 0.7166392207145691, "avg_line_length": 30.532466888427734, "blob_id": "4bf21cf6766188da10a11ed67166d97dfc558428", "content_id": "5f1157a8b6e6496a1114083ab26a98ce59f7b398", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2428, "license_type": "permissive", "max_line_length": 116, "num_lines": 77, "path": "/src/descriptor_runner/interface/backend/webgl/webglContext.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { BackendContext } from \"../../core/backendContext\";\nimport { DataType } from \"../../core/constants\";\nimport { Tensor } from \"../../core/tensor\";\nimport { WebDNNCPUContext } from \"../cpu/cpuContext\";\nimport { WebGLTensor } from \"./webglTensor\";\n\nexport interface WebGLUniformItem {\n name: string;\n value: number;\n type: \"float\" | \"int\";\n}\n\nexport type WebDNNWebGLVersion =\n | \"webgl2-16384\"\n | \"webgl2-4096\"\n | \"webgl1-16384\"\n | \"webgl1-4096\";\n\nexport interface WebDNNWebGLContextOption {\n /**\n * Version order in which initialization is attempted.\n * The version means the combination of webgl version and max texture size.\n */\n versionOrder?: WebDNNWebGLVersion[];\n /**\n * Maximum GPU memory allocation in the context.\n * Pool deleted textures for future use until this capacity is exceeded.\n */\n maxAllocationBytes?: number;\n /**\n * When memory deletion is needed, the deallocation occurs until total memory allocation becomes below this value.\n */\n deallocateToBytes?: number;\n}\n\nexport interface WebDNNWebGLContextPerformance {\n key: string;\n kernelName: string;\n inputs: { name: string; dims: number[] }[];\n output: { dims: number[] };\n elapsedNanoSecond: number;\n gpuDisjoint: boolean;\n}\n\nexport interface WebDNNWebGLContext extends BackendContext {\n backend: \"webgl\";\n cpuContext: WebDNNCPUContext;\n canOnlyReadRGBA: boolean;\n gl: WebGLRenderingContext | WebGL2RenderingContext;\n webgl2: boolean;\n maxTextureSize: number;\n version: WebDNNWebGLVersion;\n\n initialize(): Promise<void>;\n isWebGLTensor(tensor: Tensor): tensor is WebGLTensor;\n assertsWebGLTensor(tensor: Tensor): asserts tensor is WebGLTensor;\n assertsWebGLTensorArray(tensors: Tensor[]): asserts tensors is WebGLTensor[];\n emptyTensor(\n dims: ReadonlyArray<number>,\n dataType?: DataType,\n option?: { dimPerPixel?: 1 | 4; textureShape?: ReadonlyArray<number> }\n ): WebGLTensor;\n moveTensor(\n tensor: Tensor,\n option: { dimPerPixel?: 1 | 4; textureShape?: ReadonlyArray<number> }\n ): Promise<WebGLTensor>;\n addKernel(name: string, sourceCode: string): void;\n hasKernel(name: string): boolean;\n runKernel(\n name: string,\n inputs: { tensor: WebGLTensor; name: string }[],\n output: WebGLTensor,\n uniforms: WebGLUniformItem[]\n ): Promise<void>;\n enablePerformanceQuery(key: string | null): void;\n gatherPerformanceQueryResult(): Promise<WebDNNWebGLContextPerformance[]>;\n}\n" }, { "alpha_fraction": 0.41180387139320374, "alphanum_fraction": 0.45041048526763916, "avg_line_length": 24.038888931274414, "blob_id": "adb794fa7805a22a87685aa5e52691776c2f1a91", "content_id": "e89c12f27d95dadb05aba62408e16981e623d715", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4507, "license_type": "permissive", "max_line_length": 80, "num_lines": 180, "path": "/src/descriptor_runner/operators/cpu/operators/standard/transpose.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes } from \"../../../../interface/core/constants\";\nimport { Transpose } from \"../../../base/transpose\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass CPUTranspose extends Transpose {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n { outShape, inStrides } = this.calcShape(input),\n output = context.emptyTensor(outShape, input.dataType);\n let func;\n switch (input.ndim) {\n case 1:\n func = this.copy1d;\n break;\n case 2:\n func = this.copy2d;\n break;\n case 3:\n func = this.copy3d;\n break;\n case 4:\n func = this.copy4d;\n break;\n case 5:\n func = this.copy5d;\n break;\n case 6:\n func = this.copy6d;\n break;\n default:\n throw new Error(\n `Transpose: ndim ${input.ndim} > 4 is not yet supported`\n );\n }\n func(input.data, output.data, outShape, inStrides);\n return [output];\n }\n\n copy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n dO[idx++] = dI[a0 * inStrides[0]];\n }\n }\n\n copy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n dO[idx++] = dI[a0 * inStrides[0] + a1 * inStrides[1]];\n }\n }\n }\n\n copy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n dO[idx++] =\n dI[a0 * inStrides[0] + a1 * inStrides[1] + a2 * inStrides[2]];\n }\n }\n }\n }\n\n copy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n dO[idx++] =\n dI[\n a0 * inStrides[0] +\n a1 * inStrides[1] +\n a2 * inStrides[2] +\n a3 * inStrides[3]\n ];\n }\n }\n }\n }\n }\n\n copy5d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n for (let a4 = 0; a4 < outShape[4]; a4++) {\n dO[idx++] =\n dI[\n a0 * inStrides[0] +\n a1 * inStrides[1] +\n a2 * inStrides[2] +\n a3 * inStrides[3] +\n a4 * inStrides[4]\n ];\n }\n }\n }\n }\n }\n }\n\n copy6d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outShape: number[],\n inStrides: number[]\n ) {\n let idx = 0;\n for (let a0 = 0; a0 < outShape[0]; a0++) {\n for (let a1 = 0; a1 < outShape[1]; a1++) {\n for (let a2 = 0; a2 < outShape[2]; a2++) {\n for (let a3 = 0; a3 < outShape[3]; a3++) {\n for (let a4 = 0; a4 < outShape[4]; a4++) {\n for (let a5 = 0; a5 < outShape[5]; a5++) {\n dO[idx++] =\n dI[\n a0 * inStrides[0] +\n a1 * inStrides[1] +\n a2 * inStrides[2] +\n a3 * inStrides[3] +\n a4 * inStrides[4] +\n a5 * inStrides[5]\n ];\n }\n }\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Transpose\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUTranspose(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6175386905670166, "alphanum_fraction": 0.6285924911499023, "avg_line_length": 26.139999389648438, "blob_id": "583b812a93ded3c6e93f3c2118fd25a0d3c96abd", "content_id": "4a660dd61b237606a93000789257b95b958380d3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2714, "license_type": "permissive", "max_line_length": 79, "num_lines": 100, "path": "/src/descriptor_runner/operators/webgl/operators/standard/transpose.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { Transpose } from \"../../../base/transpose\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 11\nexport class WebGLTranspose extends Transpose {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error();\n }\n if (input.dimPerPixel !== 1) {\n throw new Error();\n }\n const { outShape, inStrides } = this.calcShape(input),\n output = context.emptyTensor(outShape, \"float32\"),\n kernelName = `transpose_${outShape.length}`;\n let tex_input_idxs: string;\n switch (inStrides.length) {\n case 0:\n tex_input_idxs = \"\";\n break;\n case 1:\n tex_input_idxs = \"tex_output_0\";\n break;\n case 2:\n tex_input_idxs = \"tex_output_0, tex_output_1\";\n break;\n case 3:\n tex_input_idxs = \"tex_output_0, tex_output_1, tex_output_2\";\n break;\n case 4:\n tex_input_idxs =\n \"tex_output_0, tex_output_1, tex_output_2, tex_output_3\";\n break;\n default:\n throw new Error(\"Input with more than 4 dimensions is not supported\");\n }\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(outShape.length)}\n\n${shaderGenTensorNDGet(\"tex_input\", inStrides.length, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(outShape.length)}\n float s = get_tex_input(${tex_input_idxs});\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n inStrides,\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(outShape, output, context.webgl2),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n output,\n uniforms\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Transpose\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLTranspose(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5983319878578186, "alphanum_fraction": 0.6096314191818237, "avg_line_length": 34.74038314819336, "blob_id": "26938db0cad7c0b9d03c14f3e3038bd7f70e30f5", "content_id": "3f3842a03519157bb1d755b3a81e6f67aa151d15", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3717, "license_type": "permissive", "max_line_length": 111, "num_lines": 104, "path": "/src/shader/wasm/src/kernels/standard/gemm.cpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#include <algorithm>\n#include <cmath>\n#include <Eigen/Dense>\n#include \"../../common/kernel.hpp\"\n#include \"../../common/unary.hpp\"\n\nextern \"C\"\n{\n static void add_bias(float *c, float *y, int m, int n) {\n for (int row = 0; row < m; row++)\n {\n for (int col = 0; col < n; col++)\n {\n y[row * n + col] += c[col];\n }\n }\n }\n\n static void do_gemm_transa0_transb0(float *a, float *b, float *y, int m, int n, int k)\n {\n // 'const' float *a raises compile error\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > a_mat(a, m, k);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > b_mat(b, k, n);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > y_mat(y, m, n);\n\n y_mat.noalias() = a_mat * b_mat;\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa0_transb0(float *a, float *b, float *y, int m, int n, int k)\n {\n do_gemm_transa0_transb0(a, b, y, m, n, k);\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa0_transb0_c(float *a, float *b, float *c, float *y, int m, int n, int k)\n {\n do_gemm_transa0_transb0(a, b, y, m, n, k);\n add_bias(c, y, m, n);\n }\n\n static void do_gemm_transa0_transb1(float *a, float *b, float *y, int m, int n, int k)\n {\n // 'const' float *a raises compile error\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > a_mat(a, m, k);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor> > b_mat(b, k, n);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > y_mat(y, m, n);\n\n y_mat.noalias() = a_mat * b_mat;\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa0_transb1(float *a, float *b, float *y, int m, int n, int k)\n {\n do_gemm_transa0_transb1(a, b, y, m, n, k);\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa0_transb1_c(float *a, float *b, float *c, float *y, int m, int n, int k)\n {\n do_gemm_transa0_transb1(a, b, y, m, n, k);\n add_bias(c, y, m, n);\n }\n\n \n static void do_gemm_transa1_transb0(float *a, float *b, float *y, int m, int n, int k)\n {\n // 'const' float *a raises compile error\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor> > a_mat(a, m, k);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > b_mat(b, k, n);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > y_mat(y, m, n);\n\n y_mat.noalias() = a_mat * b_mat;\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa1_transb0(float *a, float *b, float *y, int m, int n, int k)\n {\n do_gemm_transa1_transb0(a, b, y, m, n, k);\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa1_transb0_c(float *a, float *b, float *c, float *y, int m, int n, int k)\n {\n do_gemm_transa1_transb0(a, b, y, m, n, k);\n add_bias(c, y, m, n);\n }\n\n \n static void do_gemm_transa1_transb1(float *a, float *b, float *y, int m, int n, int k)\n {\n // 'const' float *a raises compile error\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor> > a_mat(a, m, k);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor> > b_mat(b, k, n);\n Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> > y_mat(y, m, n);\n\n y_mat.noalias() = a_mat * b_mat;\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa1_transb1(float *a, float *b, float *y, int m, int n, int k)\n {\n do_gemm_transa1_transb1(a, b, y, m, n, k);\n }\n\n void WEBDNN_KERNEL kernel_gemm_transa1_transb1_c(float *a, float *b, float *c, float *y, int m, int n, int k)\n {\n do_gemm_transa1_transb1(a, b, y, m, n, k);\n add_bias(c, y, m, n);\n }\n}\n" }, { "alpha_fraction": 0.6474226713180542, "alphanum_fraction": 0.6494845151901245, "avg_line_length": 29.3125, "blob_id": "a76f24f9b2e5a8345c1e503567d88c9cf4c81604", "content_id": "21950aff3be736dd3a5e2ee388f906c7d0924e46", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1455, "license_type": "permissive", "max_line_length": 80, "num_lines": 48, "path": "/src/descriptor_runner/operators/cpu/operators/standard/constantofshape.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { DataArrayTypes, DataType } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrTensor } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass ConstantOfShape extends OperatorImpl {\n constant!: {\n data: DataArrayTypes;\n dataType: DataType;\n dims: number[];\n };\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n const constant = getAttrTensor(attribute, \"value\");\n if (!constant) {\n throw new Error(\"value not exist in ConstantOfShape\");\n }\n this.constant = constant;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = Array.from(inputs[0].data);\n const output = context.emptyTensor(input, this.constant.dataType);\n output.data.fill(this.constant.data[0]);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"ConstantOfShape\",\n backend: \"cpu\",\n opsetMin: 9,\n factory: () => new ConstantOfShape(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5942965149879456, "alphanum_fraction": 0.6239168047904968, "avg_line_length": 38.422359466552734, "blob_id": "359271428a5041e9fa0d88de58df652794d116a6", "content_id": "02b89bd8040117b456fe628049a8d7835c8cc0c5", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6419, "license_type": "permissive", "max_line_length": 129, "num_lines": 161, "path": "/example/detr/conversion.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from PIL import Image\nimport os\nimport numpy as np\nimport json\nimport requests\nimport subprocess\n\nimport torch\nfrom torch import nn\nfrom torchvision.models import resnet50\nimport torchvision.transforms as T\nfrom webdnn.tensor_export import serialize_tensors\n\n\nclass DETROnnx(nn.Module):\n \"\"\"\n Demo DETR implementation.\n\n Demo implementation of DETR in minimal number of lines, with the\n following differences wrt DETR in the paper:\n * learned positional encoding (instead of sine)\n * positional encoding is passed at input (instead of attention)\n * fc bbox predictor (instead of MLP)\n The model achieves ~40 AP on COCO val5k and runs at ~28 FPS on Tesla V100.\n Only batch size 1 supported.\n \"\"\"\n def __init__(self, num_classes, hidden_dim=256, nheads=8,\n num_encoder_layers=6, num_decoder_layers=6):\n super().__init__()\n\n # create ResNet-50 backbone\n self.backbone = resnet50()\n del self.backbone.fc\n\n # create conversion layer\n self.conv = nn.Conv2d(2048, hidden_dim, 1)\n\n # create a default PyTorch transformer\n self.transformer = nn.Transformer(\n hidden_dim, nheads, num_encoder_layers, num_decoder_layers)\n\n # prediction heads, one extra class for predicting non-empty slots\n # note that in baseline DETR linear_bbox layer is 3-layer MLP\n self.linear_class = nn.Linear(hidden_dim, num_classes + 1)\n self.linear_bbox = nn.Linear(hidden_dim, 4)\n\n # output positional encodings (object queries)\n # self.query_pos_us = nn.Parameter(torch.rand(100, 1, hidden_dim))\n\n # spatial positional encodings\n # note that in baseline DETR we use sine positional encodings\n # self.embed_const = nn.Parameter(torch.rand(850, 1, 256)) # depends on shape of output of self.conv(x)\n\n def forward(self, inputs, embed_const, query_pos_us):\n # propagate inputs through ResNet-50 up to avg-pool layer\n x = self.backbone.conv1(inputs)\n x = self.backbone.bn1(x)\n x = self.backbone.relu(x)\n x = self.backbone.maxpool(x)\n\n x = self.backbone.layer1(x)\n x = self.backbone.layer2(x)\n x = self.backbone.layer3(x)\n x = self.backbone.layer4(x)\n\n # convert from 2048 to 256 feature planes for the transformer\n h = self.conv(x)\n\n # propagate through the transformer\n #h = self.transformer(self.embed_const + 0.1 * h.flatten(2).permute(2, 0, 1),\n # self.query_pos_us).transpose(0, 1)\n # query_pos_usをモデルのパラメータとして持つと、onnx変換に失敗する\n # RuntimeError: Tensors must have same number of dimensions: got 1 and 2\n # 出力機構のバグと思われる\n h = self.transformer(embed_const + 0.1 * h.flatten(2).permute(2, 0, 1),\n query_pos_us).transpose(0, 1)\n \n # finally project transformer outputs to class labels and bounding boxes\n # pred_logits, pred_boxes\n return self.linear_class(h), self.linear_bbox(h).sigmoid()\n\ndef download_sample_image(path, url):\n if os.path.exists(path):\n return\n img = requests.get(url).content\n with open(path, \"wb\") as f:\n f.write(img)\n\ndef export_test_data(output_dir, model, embed_const, query_pos_us):\n im = Image.open(os.path.join(output_dir, \"000000039769.jpg\"))\n \n # standard PyTorch mean-std input image normalization\n transform = T.Compose([\n T.Resize(800),\n T.ToTensor(),\n T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n ])\n\n # for output bounding box post-processing\n def box_cxcywh_to_xyxy(x):\n x_c, y_c, w, h = x.unbind(1)\n b = [(x_c - 0.5 * w), (y_c - 0.5 * h),\n (x_c + 0.5 * w), (y_c + 0.5 * h)]\n return torch.stack(b, dim=1)\n\n def rescale_bboxes(out_bbox, size):\n img_w, img_h = size\n b = box_cxcywh_to_xyxy(out_bbox)\n b = b * torch.tensor([img_w, img_h, img_w, img_h], dtype=torch.float32)\n return b\n \n img_preprocessed = transform(im).unsqueeze(0)\n \n pred_logits, pred_boxes = model(img_preprocessed, embed_const, query_pos_us)\n serialize_tensors(os.path.join(output_dir, \"test.bin\"), {\n \"input_0\": img_preprocessed.numpy(),\n \"input_embed_const\": embed_const.numpy(),\n \"output_logits\": pred_logits.numpy(),\n \"output_boxes\": pred_boxes.numpy()})\n\ndef main():\n output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"output\")\n os.makedirs(output_dir, exist_ok=True)\n\n torch.set_grad_enabled(False)\n download_sample_image(os.path.join(output_dir, \"000000039769.jpg\"), 'http://images.cocodataset.org/val2017/000000039769.jpg')\n state_dict = torch.hub.load_state_dict_from_url(\n url='https://dl.fbaipublicfiles.com/detr/detr_demo-da2a99e9.pth',\n map_location='cpu', check_hash=True)\n\n state_dict_onnx = {k:v.float() for k, v in state_dict.items()}\n H, W = 25, 34\n query_pos_us = state_dict_onnx[\"query_pos\"].unsqueeze(1)\n embed_const = torch.cat([\n state_dict_onnx[\"col_embed\"][:W].unsqueeze(0).repeat(H, 1, 1),\n state_dict_onnx[\"row_embed\"][:H].unsqueeze(1).repeat(1, W, 1),\n ], dim=-1).flatten(0, 1).unsqueeze(1)\n del state_dict_onnx[\"row_embed\"]\n del state_dict_onnx[\"col_embed\"]\n del state_dict_onnx[\"query_pos\"]\n\n detr_onnx = DETROnnx(num_classes=91)\n detr_onnx.load_state_dict(state_dict_onnx)\n detr_onnx.eval()\n\n img_preprocessed = torch.zeros(1, 3, 800, 1066)\n onnx_path = f\"{output_dir}/unoptimized_model.onnx\"\n torch.onnx.export(detr_onnx, (img_preprocessed, embed_const, query_pos_us), onnx_path,\n verbose=True,\n input_names=[\"input_0\", \"input_embed_const\", \"input_query_pos_us\"],\n output_names=[\"output_logits\", \"output_boxes\"], opset_version=10)\n subprocess.check_call([\"python\", \"-m\", \"webdnn.optimize_model\", onnx_path, os.path.join(output_dir)])\n serialize_tensors(os.path.join(output_dir, \"embedding.bin\"), {\n \"col_embed\": state_dict[\"col_embed\"].float().numpy(),\n \"row_embed\": state_dict[\"row_embed\"].float().numpy(),\n \"query_pos_us\": query_pos_us.numpy()})\n\n export_test_data(output_dir, detr_onnx, embed_const, query_pos_us)\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7261146306991577, "alphanum_fraction": 0.7388535141944885, "avg_line_length": 23.789474487304688, "blob_id": "f1c8a7edb744ddc8bca7fd6f5fd73c344f725bca", "content_id": "7047ff0f97705899d67a551cde860e98da5f202f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 471, "license_type": "permissive", "max_line_length": 78, "num_lines": 19, "path": "/src/descriptor_runner/image/canvas.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * @module webdnn/image\n */\n/** Don't Remove This comment block */\n\n/**\n * Get canvas rendering context and check whether it is nonnull value.\n *\n * @param {CanvasRenderingContext2D} canvas\n * @protected\n */\nexport function getContext2D(\n canvas: HTMLCanvasElement\n): CanvasRenderingContext2D {\n const context = canvas.getContext(\"2d\");\n if (!context) throw Error(\"CanvasRenderingContext2D initialization failed\");\n\n return context as CanvasRenderingContext2D;\n}\n" }, { "alpha_fraction": 0.5861538648605347, "alphanum_fraction": 0.5953845977783203, "avg_line_length": 27.88888931274414, "blob_id": "56a1c8973c00073d0e4b7a8ab5e3d4ec30d4cf62", "content_id": "7429dc880bedaef5e1b2a78184b4369e951a8c67", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1300, "license_type": "permissive", "max_line_length": 88, "num_lines": 45, "path": "/src/descriptor_runner/operators/cpu/operators/standard/gather.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Gather extends OperatorImpl {\n axis!: number;\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axis = getAttrInt(attribute, \"axis\", 0);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const data = inputs[0],\n indices = inputs[1],\n { axis } = this;\n if (!(data.ndim === 1 && indices.ndim === 0 && axis === 0)) {\n throw new Error(\n \"Gather: currently supports data.ndim === 1 && indices.ndim === 0 && axis === 0\"\n );\n }\n const output = context.emptyTensor([], data.dataType);\n output.data[0] = data.data[indices.data[0]];\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Gather\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Gather(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6149466037750244, "alphanum_fraction": 0.6252669095993042, "avg_line_length": 27.383838653564453, "blob_id": "06b04d862ab61a93584b154fd40b05d072f632da", "content_id": "b21dc2766e4d9bb45d243190cc5359dd93c95e84", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2880, "license_type": "permissive", "max_line_length": 87, "num_lines": 99, "path": "/src/descriptor_runner/backend/webgpu/webgpuMetaBuffer.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebGPUMetaBufferContent } from \"../../interface/backend/webgpu/webgpuContext\";\nimport { WebDNNWebGPUContextImpl } from \"./webgpuContextImpl\";\nimport { WebGPUTensorImpl } from \"./webgpuTensorImpl\";\n\nexport class WebGPUMetaBuffer {\n constructor(\n public context: WebDNNWebGPUContextImpl,\n public tensor: WebGPUTensorImpl,\n private cpuBuffer: Uint8Array,\n private cpuBufferHash: number\n ) {}\n\n private static buildCPUBuffer(content: WebGPUMetaBufferContent) {\n const byteLength = content.elements.length * 4,\n cpuBuffer = new Uint8Array(byteLength),\n cpuBufferView = new DataView(cpuBuffer.buffer);\n let ofs = 0;\n for (const element of content.elements) {\n switch (element.type) {\n case \"int32\":\n cpuBufferView.setInt32(ofs, element.value, true);\n break;\n case \"uint32\":\n cpuBufferView.setUint32(ofs, element.value, true);\n break;\n case \"float32\":\n cpuBufferView.setFloat32(ofs, element.value, true);\n break;\n default:\n throw new Error();\n }\n ofs += 4;\n }\n\n return cpuBuffer;\n }\n\n private static calcBufferHash(cpuBuffer: Uint8Array): number {\n let v = 0;\n for (let i = 0; i < cpuBuffer.length; i++) {\n v += cpuBuffer[i];\n }\n return v;\n }\n\n private static findPooled(\n context: WebDNNWebGPUContextImpl,\n cpuBuffer: Uint8Array,\n cpuBufferHash: number\n ): WebGPUMetaBuffer | null {\n const pooled = context.pooledMetaBuffer;\n for (let i = 0; i < pooled.length; i++) {\n const item = pooled[i];\n if (\n item.cpuBuffer.length === cpuBuffer.length &&\n item.cpuBufferHash === cpuBufferHash\n ) {\n let diff = false;\n for (let j = 0; j < cpuBuffer.length; j++) {\n if (cpuBuffer[j] !== item.cpuBuffer[j]) {\n diff = true;\n break;\n }\n }\n if (!diff) {\n pooled.splice(i, 1);\n return item;\n }\n }\n }\n return null;\n }\n\n static async createBuffer(\n context: WebDNNWebGPUContextImpl,\n content: WebGPUMetaBufferContent\n ): Promise<WebGPUMetaBuffer> {\n const cpuBuffer = WebGPUMetaBuffer.buildCPUBuffer(content),\n cpuBufferHash = WebGPUMetaBuffer.calcBufferHash(cpuBuffer),\n // 全く同じ内容がプールにあればそれを使い、なければバッファ作成とGPUへの転送\n found = WebGPUMetaBuffer.findPooled(context, cpuBuffer, cpuBufferHash);\n if (found) {\n return found;\n }\n const tensor = new WebGPUTensorImpl(\n context,\n [cpuBuffer.length / 4],\n \"float32\",\n true,\n false\n );\n await tensor.setData(new Float32Array(cpuBuffer.buffer));\n return new WebGPUMetaBuffer(context, tensor, cpuBuffer, cpuBufferHash);\n }\n\n pushToPool(): void {\n this.context.pooledMetaBuffer.push(this);\n }\n}\n" }, { "alpha_fraction": 0.5518900156021118, "alphanum_fraction": 0.5670102834701538, "avg_line_length": 22.467741012573242, "blob_id": "645bd6d31bd1234011b26cda867bd6f60e51c700", "content_id": "3c7ffbc006fa24ae51739e641db545200628b16f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1455, "license_type": "permissive", "max_line_length": 73, "num_lines": 62, "path": "/src/descriptor_runner/operators/base/gemm.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { Backend } from \"../../interface/core/constants\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { getAttrFloat, getAttrInt } from \"../operatorUtil\";\n\n// Version 13\nexport abstract class Gemm extends OperatorImpl {\n alpha!: number;\n\n beta!: number;\n\n transA!: number;\n\n transB!: number;\n\n constructor(backend: Backend) {\n super(backend);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 1.0);\n this.beta = getAttrFloat(attribute, \"beta\", 1.0);\n this.transA = getAttrInt(attribute, \"transA\", 0);\n this.transB = getAttrInt(attribute, \"transB\", 0);\n }\n\n calcShape(dimsA: ReadonlyArray<number>, dimsB: ReadonlyArray<number>) {\n let k: number,\n kcheck: number,\n m: number,\n n: number,\n strideA: number[],\n strideB: number[];\n if (dimsA.length !== 2 || dimsB.length !== 2) {\n throw new Error();\n }\n if (this.transA) {\n k = dimsA[0];\n m = dimsA[1];\n strideA = [1, m];\n } else {\n m = dimsA[0];\n k = dimsA[1];\n strideA = [k, 1];\n }\n if (this.transB) {\n n = dimsB[0];\n kcheck = dimsB[1];\n strideB = [1, kcheck];\n } else {\n kcheck = dimsB[0];\n n = dimsB[1];\n strideB = [n, 1];\n }\n if (k !== kcheck) {\n throw new Error();\n }\n\n return { m, n, k, strideA, strideB };\n }\n}\n" }, { "alpha_fraction": 0.5958486199378967, "alphanum_fraction": 0.5982906222343445, "avg_line_length": 24.59375, "blob_id": "5068a9e2ec3789f90a3352f8adeaeddb59f42b39", "content_id": "8d92421d0e60b2ada803e99a4bbbf3dfe1b267ac", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 819, "license_type": "permissive", "max_line_length": 80, "num_lines": 32, "path": "/src/descriptor_runner/operators/cpu/operators/standard/flatten.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Flatten } from \"../../../base/flatten\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass CPUFlatten extends Flatten {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n output = context.emptyTensor(\n this.calcShape(input),\n input.dataType,\n input.data\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Flatten\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUFlatten(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5481002330780029, "alphanum_fraction": 0.569118857383728, "avg_line_length": 28.807228088378906, "blob_id": "5c522630dd18d299c365c847688e841ebeb2b174", "content_id": "ae8661ef4f413435a7de6d447a452821245f49cb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2474, "license_type": "permissive", "max_line_length": 89, "num_lines": 83, "path": "/src/descriptor_runner/operators/webgpu/operators/standard/gemm.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNWebGPUContext } from \"../../../../interface/backend/webgpu/webgpuContext\";\nimport { WebGPUTensor } from \"../../../../interface/backend/webgpu/webgpuTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Gemm } from \"../../../base/gemm\";\nimport { broadcastUni } from \"../../../operatorUtil\";\nimport { webgpuShaders } from \"../../shaders\";\n\nexport class WebGPUGemm extends Gemm {\n constructor() {\n super(\"webgpu\");\n }\n\n async run(context: WebDNNWebGPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGPUTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1],\n inputC = inputs[2];\n if (inputC) {\n return this.runWithC(context, inputA, inputB, inputC);\n }\n throw new Error();\n }\n\n async runWithC(\n context: WebDNNWebGPUContext,\n inputA: WebGPUTensor,\n inputB: WebGPUTensor,\n inputC: WebGPUTensor\n ): Promise<WebGPUTensor[]> {\n if (inputA.dataType !== \"float32\") {\n throw new Error();\n }\n const {\n m,\n n,\n k,\n strideA: [strideA0, strideA1],\n strideB: [strideB0, strideB1],\n } = this.calcShape(inputA.dims, inputB.dims),\n [strideC0, strideC1] = broadcastUni([m, n], inputC.dims),\n outputTensor = context.emptyTensor([m, n], \"float32\"),\n shaderName = \"gemm\";\n\n if (!context.hasPipeline(shaderName)) {\n context.createPipeline(shaderName, webgpuShaders.gemm, 5);\n }\n\n await context.run({\n pipelineName: shaderName,\n tensors: [inputA, inputB, inputC, outputTensor],\n meta: {\n elements: [\n { value: m, type: \"uint32\" },\n { value: n, type: \"uint32\" },\n { value: k, type: \"uint32\" },\n { value: strideA0, type: \"uint32\" },\n { value: strideA1, type: \"uint32\" },\n { value: strideB0, type: \"uint32\" },\n { value: strideB1, type: \"uint32\" },\n { value: strideC0, type: \"uint32\" },\n { value: strideC1, type: \"uint32\" },\n { value: this.alpha, type: \"float32\" },\n { value: this.beta, type: \"float32\" },\n ],\n },\n workGroups: { x: 256 / 8, y: 256 / 8, z: 1 },\n });\n\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Gemm\",\n backend: \"webgpu\",\n opsetMin: 1,\n factory: () => new WebGPUGemm(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6743362545967102, "alphanum_fraction": 0.6743362545967102, "avg_line_length": 30.38888931274414, "blob_id": "576bd147efcb23b4d3a79ed252c607e2721cd664", "content_id": "3ce1613814875378b4908ead684ded621b2ad8c5", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 565, "license_type": "permissive", "max_line_length": 84, "num_lines": 18, "path": "/src/descriptor_runner/image.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * @module webdnn/image\n * @preferred\n *\n * Module `WebDNN.Image` provides basic image processing operations like follows.\n *\n * - Load image by various way (File picker dialog, url string, canvas, video, etc.)\n * - Pack image data into TypedArray\n * - Crop and resize.\n * - Show result on canvas element\n *\n */\n/** Don't Remove This comment block */\n// Export * from \"./image/canvas\" // internal API\nexport * from \"./image/enums\";\nexport * from \"./image/image_array\";\n// Export * from \"./image/image_data\" // internal API\nexport * from \"./image/image_source\";\n" }, { "alpha_fraction": 0.4937736988067627, "alphanum_fraction": 0.508933424949646, "avg_line_length": 24.30137062072754, "blob_id": "51d77987bc7b6333a85f7d83743f657eff1c6968", "content_id": "fd72c59b5d4e4a8fd19c7c97a90a7e44c13e19cb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1847, "license_type": "permissive", "max_line_length": 80, "num_lines": 73, "path": "/src/descriptor_runner/operators/cpu/operators/standard/gemm.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { broadcastUni } from \"../../../operatorUtil\";\nimport { Gemm } from \"../../../base/gemm\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Version 13\nclass CpuGemm extends Gemm {\n alpha!: number;\n\n beta!: number;\n\n transA!: number;\n\n transB!: number;\n\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputA = inputs[0],\n inputB = inputs[1],\n inputC = inputs[2],\n {\n m,\n n,\n k,\n strideA: [strideA0, strideA1],\n strideB: [strideB0, strideB1],\n } = this.calcShape(inputA.dims, inputB.dims),\n newData = new Float32Array(m * n),\n dA = inputA.data,\n dB = inputB.data,\n { alpha } = this;\n for (let i = 0; i < m; i++) {\n for (let j = 0; j < n; j++) {\n let sum = 0;\n for (let x = 0; x < k; x++) {\n sum +=\n dA[i * strideA0 + x * strideA1] * dB[x * strideB0 + j * strideB1];\n }\n sum *= alpha;\n newData[i * n + j] = sum;\n }\n }\n\n if (inputC) {\n const [strideC0, strideC1] = broadcastUni([m, n], inputC.dims),\n dC = inputC.data,\n { beta } = this;\n for (let i = 0; i < m; i++) {\n for (let j = 0; j < n; j++) {\n newData[i * n + j] += dC[i * strideC0 + j * strideC1] * beta;\n }\n }\n }\n const output = context.emptyTensor([m, n], \"float32\", newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Gemm\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuGemm(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5673688054084778, "alphanum_fraction": 0.5840674042701721, "avg_line_length": 25.32056427001953, "blob_id": "baebb794365a374d02f0457b89f6f1bdae9f9e21", "content_id": "85279e7b9ece6f709cdb2c1cacb91358a7f67250", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 13085, "license_type": "permissive", "max_line_length": 164, "num_lines": 496, "path": "/src/descriptor_runner/operators/webgl/operators/standard/convtranspose.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { WebGLTensor } from \"../../../../interface/backend/webgl/webglTensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { ConvTranspose } from \"../../../base/convtranspose\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\n\nexport class WebGLConvTranspose extends ConvTranspose {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const inputX = inputs[0],\n inputW = inputs[1],\n inputB = inputs[2];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"ConvTranspose other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chIn,\n chInPerGroup,\n chOut,\n chOutPerGroup,\n } = this.calcShape(inputX.dims, inputW.dims);\n if (\n inputX.dimPerPixel !== 1 ||\n inputW.dimPerPixel !== 1 ||\n (inputB && inputB.dimPerPixel !== 1)\n ) {\n throw new Error();\n }\n const inputTransposeData = context.emptyTensor([\n chIn * batch * inShape[0] * inShape[1],\n ]);\n await this.transposeInput(\n context,\n inputX,\n inputTransposeData,\n group,\n batch,\n inShape[0] * inShape[1],\n chInPerGroup\n );\n const weightTransposeData = context.emptyTensor([\n chOut * kernelShape[0] * kernelShape[1] * chInPerGroup,\n ]);\n await this.transposeWeight(\n context,\n inputW,\n weightTransposeData,\n group,\n chInPerGroup,\n chOutPerGroup,\n kernelShape[0] * kernelShape[1]\n );\n const matmulData = context.emptyTensor([\n chOut * batch * inShape[0] * inShape[1] * kernelShape[0] * kernelShape[1],\n ]);\n await this.matmul(\n context,\n inputTransposeData,\n weightTransposeData,\n matmulData,\n group,\n batch * inShape[0] * inShape[1],\n chOutPerGroup * kernelShape[0] * kernelShape[1],\n chInPerGroup\n );\n inputTransposeData.dispose();\n weightTransposeData.dispose();\n const output = context.emptyTensor([\n batch,\n chOut,\n outShape[0],\n outShape[1],\n ]);\n\n if (inputB) {\n const col2ImData = context.emptyTensor([\n batch * chOut * outShape[0] * outShape[1],\n ]);\n await this.col2im(\n context,\n matmulData,\n col2ImData,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chOutPerGroup\n );\n matmulData.dispose();\n await this.bias(\n context,\n col2ImData,\n inputB,\n output,\n batch,\n chOut,\n outShape[0] * outShape[1]\n );\n col2ImData.dispose();\n } else {\n await this.col2im(\n context,\n matmulData,\n output,\n batch,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n chOutPerGroup\n );\n matmulData.dispose();\n }\n return [output];\n }\n\n private async col2im(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dY: WebGLTensor,\n batch: number,\n dilations: number[],\n group: number,\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n chOutPerGroup: number\n ) {\n // dI: group, batch, inShape[0], inShape[1], chOutPerGroup, kernelShape[0], kernelShape[1]\n // dY: batch, group, chOutPerGroup, outShape[0], outShape[1]\n const kernelName = `convtranspose_col2im_${kernelShape[0]}_${kernelShape[1]}_${strides[0]}_${strides[1]}_${pads[0]}_${pads[1]}_${dilations[0]}_${dilations[1]}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define K0 ${kernelShape[0]}\n #define K1 ${kernelShape[1]}\n #define S0 ${strides[0]}\n #define S1 ${strides[1]}\n #define P0 ${pads[0]}\n #define P1 ${pads[1]}\n #define D0 ${dilations[0]}\n #define D1 ${dilations[1]}\n uniform int GROUP;\n uniform int BATCH;\n uniform int O0;\n uniform int O1;\n uniform int COPG;\n uniform int IS0;\n uniform int IS1;\n \n ${shaderGenTensorNDGet(\"tex_input\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / O1;\n int o1 = rem - quo * O1;\n rem = quo;\n quo = rem / O0;\n int o0 = rem - quo * O0;\n rem = quo;\n quo = rem / COPG;\n int co = rem - quo * COPG;\n rem = quo;\n quo = rem / GROUP;\n int g = rem - quo * GROUP;\n int b = quo;\n \n float s = 0.0;\n for (int k0 = 0; k0 < K0; k0++) {\n for (int k1 = 0; k1 < K1; k1++) {\n int i0s = o0 + P0 - k0 * D0;\n int i1s = o1 + P1 - k1 * D1;\n int i0 = i0s / S0;\n if (i0s - i0 * S0 != 0 || i0 < 0 || i0 >= IS0) {\n continue;\n }\n int i1 = i1s / S1;\n if (i1s - i1 * S1 != 0 || i1 < 0 || i1 >= IS1) {\n continue;\n }\n s += get_tex_input((((((g * BATCH + b) * IS0 + i0) * IS1 + i1) * COPG + co) * K0 + k0) * K1 + k1);\n }\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\"tex_input\", [1], dI, context.webgl2),\n ...shaderGenTensorOutputUniformItem([dY.length], dY, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"O0\", type: \"int\", value: outShape[0] },\n { name: \"O1\", type: \"int\", value: outShape[1] },\n { name: \"COPG\", type: \"int\", value: chOutPerGroup },\n { name: \"IS0\", type: \"int\", value: inShape[0] },\n { name: \"IS1\", type: \"int\", value: inShape[1] },\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dI, name: \"tex_input\" }],\n dY,\n uniforms\n );\n }\n\n private async matmul(\n context: WebDNNWebGLContext,\n dTX: WebGLTensor,\n dTW: WebGLTensor,\n dI: WebGLTensor,\n group: number,\n bin: number,\n cks: number,\n chInPerGroup: number\n ) {\n /*\n dTX(group, batch*inShape[0]*inShape[1]=bin, chInPerGroup) * dTW(group, chOutPerGroup*kernelShape[0]*kernelShape[1]=cks, chInPerGroup) -> dI(group, bin, cks)\n * ループ回数は定数が必要\n */\n const kernelName = `convtranspose_matmul_${chInPerGroup}`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n #define cipg ${chInPerGroup}\n uniform int GROUP;\n uniform int BIN;\n uniform int CKS;\n \n ${shaderGenTensorNDGet(\"tex_input_w\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / CKS;\n int x = rem - quo * CKS;\n rem = quo;\n quo = rem / BIN;\n int y = rem - quo * BIN;\n int g = quo;\n \n float s = 0.0;\n for (int ip = 0; ip < cipg; ip++) {\n s += get_tex_input_i((g * BIN + y) * cipg + ip) * get_tex_input_w((g * CKS + x) * cipg + ip);\n }\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_w\",\n [1],\n dTW,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dTX,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dI.length], dI, context.webgl2),\n { name: \"GROUP\", type: \"int\", value: group },\n { name: \"BIN\", type: \"int\", value: bin },\n { name: \"CKS\", type: \"int\", value: cks },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dTW, name: \"tex_input_w\" },\n { tensor: dTX, name: \"tex_input_i\" },\n ],\n dI,\n uniforms\n );\n }\n\n private async transposeInput(\n context: WebDNNWebGLContext,\n dX: WebGLTensor,\n dTX: WebGLTensor,\n group: number,\n batch: number,\n inarea: number,\n chInPerGroup: number\n ) {\n // dX(batch, group, chInPerGroup, inShape[0], inShape[1]) -> dTX(group, batch, inShape[0], inShape[1], chInPerGroup)\n const kernelName = `convtranspose_transpose_input`;\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(4)}\n\n${shaderGenTensorNDGet(\"tex_input\", 4, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(4)}\n float s = get_tex_input(tex_output_0, tex_output_1, tex_output_2, tex_output_3);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [chInPerGroup * inarea, group * chInPerGroup * inarea, 1, inarea],\n dX,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [group, batch, inarea, chInPerGroup],\n dTX,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dX, name: \"tex_input\" }],\n dTX,\n uniforms\n );\n }\n\n private async transposeWeight(\n context: WebDNNWebGLContext,\n dW: WebGLTensor,\n dTW: WebGLTensor,\n group: number,\n chInPerGroup: number,\n chOutPerGroup: number,\n karea: number\n ) {\n // dW(group, chInPerGroup, chOutPerGroup, kernelShape[0], kernelShape[1]) -> dTW(group, chOutPerGroup, kernelShape[0], kernelShape[1], cInPerGroup)\n const kernelName = `convtranspose_transpose_weight`;\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n\n${shaderGenTensorOutputUniform(4)}\n\n${shaderGenTensorNDGet(\"tex_input\", 4, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(4)}\n float s = get_tex_input(tex_output_0, tex_output_1, tex_output_2, tex_output_3);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [chInPerGroup * chOutPerGroup * karea, karea, 1, chOutPerGroup * karea],\n dW,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [group, chOutPerGroup, karea, chInPerGroup],\n dTW,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: dW, name: \"tex_input\" }],\n dTW,\n uniforms\n );\n }\n\n private async bias(\n context: WebDNNWebGLContext,\n dI: WebGLTensor,\n dB: WebGLTensor,\n dO: WebGLTensor,\n batch: number,\n chOut: number,\n outarea: number\n ) {\n const kernelName = `convtranspose_bias`;\n if (!context.hasKernel(kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n \n ${shaderGenTensorOutputUniform(1)}\n uniform int BATCH;\n uniform int COUT;\n uniform int OUTAREA;\n \n ${shaderGenTensorNDGet(\"tex_input_i\", 1, context.webgl2)}\n ${shaderGenTensorNDGet(\"tex_input_b\", 1, context.webgl2)}\n \n void main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n int rem = tex_output_flat;\n int quo = rem / OUTAREA;\n int x = rem - quo * OUTAREA;\n rem = quo;\n quo = rem / COUT;\n int c = rem - quo * COUT;\n int b = quo;\n \n float s = 0.0;\n s = get_tex_input_i(tex_output_flat) + get_tex_input_b(c);\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_i\",\n [1],\n dI,\n context.webgl2\n ),\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input_b\",\n [1],\n dB,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem([dO.length], dO, context.webgl2),\n { name: \"BATCH\", type: \"int\", value: batch },\n { name: \"COUT\", type: \"int\", value: chOut },\n { name: \"OUTAREA\", type: \"int\", value: outarea },\n ];\n await context.runKernel(\n kernelName,\n [\n { tensor: dI, name: \"tex_input_i\" },\n { tensor: dB, name: \"tex_input_b\" },\n ],\n dO,\n uniforms\n );\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"ConvTranspose\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLConvTranspose(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6492370367050171, "alphanum_fraction": 0.6590030789375305, "avg_line_length": 25.143617630004883, "blob_id": "6bd8d27a8fe0efaf5bc8f69fae36719004045a1a", "content_id": "fa63a55987d49b68982c151e7542d2fa877a4174", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4915, "license_type": "permissive", "max_line_length": 143, "num_lines": 188, "path": "/src/descriptor_runner/image/image_data.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * @module webdnn/image\n */\n/** Don't Remove This comment block */\n\nimport { getContext2D } from \"./canvas\";\n\n/**\n * The rectangle of source position of image\n */\nexport interface SourceRect {\n srcX?: number;\n srcY?: number;\n srcW?: number;\n srcH?: number;\n}\n\n/**\n * The rectangle of destination position of image\n */\nexport interface DestinationRect {\n dstX?: number;\n dstY?: number;\n dstW?: number;\n dstH?: number;\n}\n\n/**\n * @protected\n */\nexport function getImageDataFromCanvas(\n canvas: HTMLCanvasElement,\n options: SourceRect & DestinationRect = {}\n): ImageData {\n const {\n srcX = 0,\n srcY = 0,\n srcW = canvas.width,\n srcH = canvas.height,\n dstX = 0,\n dstY = 0,\n } = options,\n { dstW = srcW, dstH = srcH } = options;\n\n let imageData = getContext2D(canvas).getImageData(srcX, srcY, srcW, srcH);\n\n if (dstX !== 0 || dstY !== 0 || srcW !== dstW || srcH !== dstH) {\n imageData = cropAndResizeImageData(imageData, { dstX, dstY, dstW, dstH });\n }\n\n return imageData;\n}\n\n/**\n * @protected\n */\nexport function getImageDataFromDrawable(\n drawable: HTMLVideoElement | HTMLImageElement,\n options: SourceRect & DestinationRect = {}\n): ImageData {\n let srcH: number, srcW: number;\n\n if (drawable instanceof HTMLVideoElement) {\n srcW = drawable.videoWidth;\n srcH = drawable.videoHeight;\n } else if (drawable instanceof HTMLImageElement) {\n srcW = drawable.naturalWidth;\n srcH = drawable.naturalHeight;\n } else\n throw TypeError(\n 'Failed to execute \"getImageDataFromDrawable(drawable, options)\": \"drawable\" must be an instanceof HTMLVideoElement or HTMLImageElement'\n );\n\n const {\n srcX = 0,\n srcY = 0,\n dstX = 0,\n dstY = 0,\n dstW = srcW,\n dstH = srcH,\n } = options,\n canvas = document.createElement(\"canvas\");\n canvas.width = dstX + dstW;\n canvas.height = dstY + dstH;\n\n const context = getContext2D(canvas);\n context.drawImage(drawable, srcX, srcY, srcW, srcH, dstX, dstY, dstW, dstH);\n return context.getImageData(0, 0, dstX + dstW, dstY + dstH);\n}\n\n/**\n * Source rectangle of source image is cropped and then copied into destination rectangle of new image data\n *\n * @param {ImageData} src\n * @param {SourceRect & DestinationRect} options\n * @returns {ImageData}\n * @protected\n */\nfunction cropAndResizeImageData(\n src: ImageData,\n options: SourceRect & DestinationRect = {}\n) {\n const {\n srcX = 0,\n srcY = 0,\n srcW = src.width,\n srcH = src.height,\n dstX = 0,\n dstY = 0,\n } = options,\n { dstW = srcW, dstH = srcH } = options,\n canvas1 = document.createElement(\"canvas\");\n canvas1.width = srcW;\n canvas1.height = srcH;\n const context1 = getContext2D(canvas1);\n context1.putImageData(src, -srcX, -srcY);\n\n const canvas2 = document.createElement(\"canvas\");\n canvas2.width = dstX + dstW;\n canvas2.height = dstY + dstH;\n const context2 = getContext2D(canvas2);\n context2.drawImage(canvas1, 0, 0, srcW, srcH, dstX, dstY, dstW, dstH);\n\n return context2.getImageData(0, 0, dstX + dstW, dstY + dstH);\n}\n\n/**\n * Return canvas `ImageData` object with specified scale.\n *\n * @param {HTMLCanvasElement | HTMLVideoElement | HTMLImageElement} image\n * @param [options] Options\n * @param {number} [options.srcX=0] left position of input clipping rect\n * @param {number} [options.srcY=0] top position of input clipping rect\n * @param {number} [options.srcW=canvas.width] width of input clipping rect\n * @param {number} [options.srcH=canvas.height] height of input clipping rect\n * @param {number} [options.dstW=options.srcW] width of output\n * @param {number} [options.dstH=options.srcH] height of output\n * @returns {ImageData}\n * @protected\n */\nexport function getImageData(\n image: HTMLCanvasElement | HTMLVideoElement | HTMLImageElement,\n options: SourceRect & DestinationRect = {}\n): ImageData {\n if (image instanceof HTMLCanvasElement) {\n return getImageDataFromCanvas(image, options);\n } else if (\n image instanceof HTMLVideoElement ||\n image instanceof HTMLImageElement\n ) {\n return getImageDataFromDrawable(image, options);\n }\n throw TypeError(\n 'Failed to execute \"getImageData(image, options)\": \"image\" must be an instance of HTMLCanvasElement, HTMLVideoElement, or HTMLImageElement'\n );\n}\n\n/**\n * @protected\n */\nexport function setImageDataToCanvas(\n imageData: ImageData,\n canvas: HTMLCanvasElement,\n options: SourceRect & DestinationRect = {}\n): void {\n const {\n srcX = 0,\n srcY = 0,\n srcW = imageData.width,\n srcH = imageData.height,\n dstX = 0,\n dstY = 0,\n } = options,\n { dstW = srcW, dstH = srcH } = options;\n\n if (srcX !== 0 || srcY !== 0 || srcW !== dstW || srcH !== dstH) {\n imageData = cropAndResizeImageData(imageData, {\n srcX,\n srcY,\n srcW,\n srcH,\n dstW,\n dstH,\n });\n }\n\n getContext2D(canvas).putImageData(imageData, dstX, dstY);\n}\n" }, { "alpha_fraction": 0.6566265225410461, "alphanum_fraction": 0.661897599697113, "avg_line_length": 29.18181800842285, "blob_id": "34c6746393744eba4f5442cd0eb547cea4c1acd4", "content_id": "a7fb1efbf40155b72755de2d4a560ed1f0ae815d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1328, "license_type": "permissive", "max_line_length": 77, "num_lines": 44, "path": "/src/descriptor_runner/operators/base/unsqueeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { getAttrInts } from \"../operatorUtil\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { CPUTensor } from \"../..\";\n\nabstract class Unsqueeze extends OperatorImpl {\n protected calcShapeBase(\n inputShape: ReadonlyArray<number>,\n axes: ReadonlyArray<number>\n ): number[] {\n const expandedNdim = inputShape.length + axes.length;\n const expandedShape: number[] = [];\n let srcIdx = 0;\n const nonNegativeAxes = axes.map((a) => (a >= 0 ? a : a + expandedNdim));\n for (let d = 0; d < expandedNdim; d++) {\n if (nonNegativeAxes.includes(d)) {\n expandedShape.push(1);\n } else {\n expandedShape.push(inputShape[srcIdx++]);\n }\n }\n return expandedShape;\n }\n}\n\nexport abstract class Unsqueeze1 extends Unsqueeze {\n axes!: number[];\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axes = getAttrInts(attribute, \"axes\", []);\n }\n\n protected calcShape(input: Tensor): number[] {\n return this.calcShapeBase(input.dims, this.axes);\n }\n}\n\nexport abstract class Unsqueeze13 extends Unsqueeze {\n protected calcShape(input: Tensor, axes: CPUTensor): number[] {\n return this.calcShapeBase(input.dims, Array.from(axes.data));\n }\n}\n" }, { "alpha_fraction": 0.5694980621337891, "alphanum_fraction": 0.5743243098258972, "avg_line_length": 28.18309783935547, "blob_id": "72e3e2edc7aaba565ae441d997bb7b79c86fe017", "content_id": "f5da634ce4885102aa5be66e120b81c984c7a9f6", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2094, "license_type": "permissive", "max_line_length": 80, "num_lines": 71, "path": "/src/descriptor_runner/operators/cpu/operators/standard/softmax.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass Softmax extends OperatorImpl {\n axis!: number;\n\n constructor() {\n super(\"cpu\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n // TODO: support axis, whose default is different between opsets\n this.axis = getAttrInt(attribute, \"axis\", -1);\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n let { axis } = this;\n if (axis < 0) {\n axis += input.ndim;\n }\n if (axis !== input.ndim - 1) {\n throw new Error(\n \"Softmax: currently only reducing final axis is supported\"\n );\n }\n // 最終軸のreductionに特化した実装\n const reductionLength = input.dims[axis],\n outerLength = input.length / reductionLength,\n output = context.emptyTensor(input.dims, input.dataType),\n dI = input.data,\n dO = output.data;\n for (let outer = 0; outer < outerLength; outer++) {\n let max = -Infinity;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[outer * reductionLength + r];\n if (v > max) {\n max = v;\n }\n }\n let expsum = 0;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[outer * reductionLength + r],\n exp = Math.exp(v - max);\n dO[outer * reductionLength + r] = exp;\n expsum += exp;\n }\n for (let r = 0; r < reductionLength; r++) {\n dO[outer * reductionLength + r] /= expsum;\n }\n }\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Softmax\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Softmax(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6604189872741699, "alphanum_fraction": 0.6637265682220459, "avg_line_length": 24.19444465637207, "blob_id": "130c9e24faf25e7e453cdc51d4a9d18451d7ef14", "content_id": "dfddb9e2aea4e46b51c4c2dc9b0f6a7d3f29494d", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 945, "license_type": "permissive", "max_line_length": 80, "num_lines": 36, "path": "/src/descriptor_runner/core/tensorImpl.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend, DataArrayTypes, DataType } from \"../interface/core/constants\";\nimport { Tensor } from \"../interface/core/tensor\";\n\nexport abstract class TensorImpl implements Tensor {\n readonly dims: ReadonlyArray<number>;\n\n readonly ndim: number;\n\n readonly length: number;\n\n readonly strides: ReadonlyArray<number>;\n\n constructor(\n dims: ReadonlyArray<number>,\n readonly dataType: DataType,\n readonly backend: Backend\n ) {\n this.dims = dims.slice(); // 呼び出し元で誤って書き換えることを防止\n this.ndim = dims.length;\n let length = 1;\n const strides: number[] = [];\n for (let d = dims.length - 1; d >= 0; d--) {\n const dim = dims[d];\n strides.unshift(length);\n length *= dim;\n }\n this.length = length;\n this.strides = strides;\n }\n\n abstract getData(): Promise<DataArrayTypes>;\n\n abstract setData(data: DataArrayTypes): Promise<void>;\n\n abstract dispose(): void;\n}\n" }, { "alpha_fraction": 0.4526176154613495, "alphanum_fraction": 0.46752816438674927, "avg_line_length": 23.338708877563477, "blob_id": "f3c806f5c3f1e0c33adf66136dad34a6d32e1085", "content_id": "ccfeb5261e2b54a2a07f8290465f89dc6fe49479", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 3042, "license_type": "permissive", "max_line_length": 80, "num_lines": 124, "path": "/src/descriptor_runner/operators/cpu/operators/standard/maxpool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { MaxPool } from \"../../../base/maxpool\";\n\n// Version 11\nclass CpuMaxPool extends MaxPool {\n constructor() {\n super(\"cpu\");\n }\n\n async run(\n context: WebDNNCPUContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputX = inputs[0];\n if (nOutputs !== 1) {\n // TODO: Indicesの出力対応\n throw new Error(\"MaxPool: output indices is not yet supported\");\n }\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"MaxPool other than 2D is not yet supported\");\n }\n const {\n batch,\n dilations,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch,\n } = this.calcShape(inputX.dims),\n outputData = new Float32Array(batch * outShape[0] * outShape[1] * ch);\n this.maxpool(\n inputX.data as Float32Array,\n outputData,\n batch,\n dilations,\n kernelShape,\n pads,\n strides,\n inShape,\n outShape,\n ch\n );\n const output = context.emptyTensor(\n [batch, ch, outShape[0], outShape[1]],\n \"float32\",\n outputData\n );\n return [output];\n }\n\n private maxpool(\n dX: Float32Array,\n dI: Float32Array,\n batch: number,\n dilations: number[],\n kernelShape: number[],\n pads: number[],\n strides: number[],\n inShape: number[],\n outShape: number[],\n ch: number\n ): void {\n /*\n *Batch,\n *dilations,\n *kernelShape,\n *pads,\n *strides,\n *inShape,\n *outShape,\n *ch,\n */\n let idx = 0;\n for (let b = 0; b < batch; b++) {\n for (let c = 0; c < ch; c++) {\n for (let oy = 0; oy < outShape[0]; oy++) {\n for (let ox = 0; ox < outShape[1]; ox++) {\n let mv = -Infinity;\n for (let ky = 0; ky < kernelShape[0]; ky++) {\n for (let kx = 0; kx < kernelShape[1]; kx++) {\n const iny = oy * strides[0] - pads[0] + ky * dilations[0],\n inx = ox * strides[1] - pads[1] + kx * dilations[1];\n if (\n iny >= 0 &&\n iny < inShape[0] &&\n inx >= 0 &&\n inx < inShape[1]\n ) {\n const xidx =\n ((b * ch + c) * inShape[0] + iny) * inShape[1] + inx,\n v = dX[xidx];\n if (v > mv) {\n mv = v;\n // Max position: xidxを出力\n }\n }\n }\n }\n\n dI[idx++] = mv;\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"MaxPool\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuMaxPool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.47444725036621094, "alphanum_fraction": 0.49256977438926697, "avg_line_length": 25.15165901184082, "blob_id": "afcb7048404541c8c3822890b7d4faf620d92c52", "content_id": "d7efbcebea30185500e9d6d9b70f65bbe56c80a2", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 5518, "license_type": "permissive", "max_line_length": 80, "num_lines": 211, "path": "/src/descriptor_runner/operators/cpu/operators/standard/unary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n DataArrayConstructor,\n DataType,\n} from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nclass CPUUnary extends OperatorImpl {\n constructor(\n private op: (value: number) => number,\n private allowDataTypes: DataType[]\n ) {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n if (!this.allowDataTypes.includes(input.dataType)) {\n throw new Error(`Unary: DataType ${input.dataType} not supported`);\n }\n const newData = new DataArrayConstructor[input.dataType](input.data.length),\n { op } = this;\n for (let i = 0; i < newData.length; i++) {\n newData[i] = op(input.data[i]);\n }\n const output = context.emptyTensor(input.dims, input.dataType, newData);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Abs\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => Math.abs(value), [\"float32\", \"int32\"]),\n },\n {\n opType: \"Acos\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.acos(value), [\"float32\"]),\n },\n {\n opType: \"Acosh\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.acosh(value), [\"float32\"]),\n },\n {\n opType: \"Asin\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.asin(value), [\"float32\"]),\n },\n {\n opType: \"Asinh\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.asinh(value), [\"float32\"]),\n },\n {\n opType: \"Atan\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.atan(value), [\"float32\"]),\n },\n {\n opType: \"Atanh\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.atanh(value), [\"float32\"]),\n },\n {\n opType: \"Ceil\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.ceil(value), [\"float32\"]),\n },\n {\n opType: \"Cos\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.cos(value), [\"float32\"]),\n },\n {\n opType: \"Cosh\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.cosh(value), [\"float32\"]),\n },\n {\n opType: \"Exp\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.exp(value), [\"float32\"]),\n },\n {\n opType: \"Floor\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.floor(value), [\"float32\"]),\n },\n {\n opType: \"HardSwish\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary(\n (value) => {\n if (value <= -3) {\n return 0;\n } else if (value >= 3) {\n return value;\n } else {\n return (value * (value + 3)) / 6;\n }\n },\n [\"float32\"]\n ),\n },\n {\n opType: \"Log\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.log(value), [\"float32\"]),\n },\n {\n opType: \"Neg\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => -value, [\"float32\", \"int32\"]),\n },\n {\n opType: \"Reciprocal\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => 1 / value, [\"float32\"]),\n },\n {\n opType: \"Relu\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => Math.max(value, 0), [\"float32\", \"int32\"]),\n },\n {\n opType: \"Round\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.round(value), [\"float32\"]),\n },\n {\n opType: \"Sigmoid\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => (Math.tanh(value / 2) + 1) / 2, [\"float32\"]),\n },\n {\n opType: \"Sign\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => Math.sign(value), [\"float32\", \"int32\"]),\n },\n {\n opType: \"Sin\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.sin(value), [\"float32\"]),\n },\n {\n opType: \"Softplus\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => Math.log(Math.exp(value) + 1), [\"float32\"]),\n },\n {\n opType: \"Softsign\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () =>\n new CPUUnary((value) => value / (1 + Math.abs(value)), [\"float32\"]),\n },\n {\n opType: \"Sqrt\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.sqrt(value), [\"float32\"]),\n },\n {\n opType: \"Tan\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.tan(value), [\"float32\"]),\n },\n {\n opType: \"Tanh\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CPUUnary((value) => Math.tanh(value), [\"float32\"]),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6136363744735718, "alphanum_fraction": 0.6278409361839294, "avg_line_length": 26.60784339904785, "blob_id": "c19733ff358ac8877217d641faec787342f14c5d", "content_id": "324c1decd70c0ab9bb0d2439484010cf1ee59a4a", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1408, "license_type": "permissive", "max_line_length": 80, "num_lines": 51, "path": "/src/descriptor_runner/operators/cpu/operators/standard/unsqueeze.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Unsqueeze1, Unsqueeze13 } from \"../../../base/unsqueeze\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class CPUUnsqueeze1 extends Unsqueeze1 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n newShape = this.calcShape(input),\n output = context.emptyTensor(newShape, input.dataType, input.data);\n return [output];\n }\n}\n\nexport class CPUUnsqueeze13 extends Unsqueeze13 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n axes = inputs[1],\n newShape = this.calcShape(input, axes),\n output = context.emptyTensor(newShape, input.dataType, input.data);\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Unsqueeze\",\n backend: \"cpu\",\n opsetMin: 13,\n factory: () => new CPUUnsqueeze13(),\n },\n {\n opType: \"Unsqueeze\",\n backend: \"cpu\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new CPUUnsqueeze1(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6652892827987671, "alphanum_fraction": 0.6652892827987671, "avg_line_length": 15.133333206176758, "blob_id": "a6f0edeb19882e02b2eca8da87e61814cb9f6ab1", "content_id": "e9a1fda5b5c70aea94e25a3f4a200755f30c9ecb", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 242, "license_type": "permissive", "max_line_length": 59, "num_lines": 15, "path": "/src/shader/wasm/src/core/allocation.cpp", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "#include <cstdlib>\n#include <emscripten.h>\n\nextern \"C\"\n{\n void* EMSCRIPTEN_KEEPALIVE webdnn_malloc(int byte_length)\n {\n return malloc((size_t)byte_length);\n }\n\n void EMSCRIPTEN_KEEPALIVE webdnn_free(void *buf)\n {\n free(buf);\n }\n}\n" }, { "alpha_fraction": 0.572242021560669, "alphanum_fraction": 0.5900355577468872, "avg_line_length": 25.50943374633789, "blob_id": "fd72bf6e7063202c5ebf459a1fc245450836297d", "content_id": "125626880a13cd054ad91c3f7f8addfd34ff2329", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1433, "license_type": "permissive", "max_line_length": 80, "num_lines": 53, "path": "/src/descriptor_runner/operators/cpu/operators/standard/globalaveragepool.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { averagepool } from \"../../rawcomputation/averagepool\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class CpuGlobalAveragePool extends OperatorImpl {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const inputX = inputs[0];\n // TODO: 2D以外対応\n if (inputX.ndim !== 4) {\n throw new Error(\"MaxPool other than 2D is not yet supported\");\n }\n const batch = inputX.dims[0],\n ch = inputX.dims[1],\n inShape = [inputX.dims[2], inputX.dims[3]],\n outputData = new Float32Array(batch * ch);\n averagepool(\n inputX.data as Float32Array,\n outputData,\n true, // わずかに計算量が減る\n batch,\n inShape,\n [0, 0, 0, 0],\n [1, 1],\n inShape,\n [1, 1],\n ch\n );\n const output = context.emptyTensor(\n [batch, ch, 1, 1],\n \"float32\",\n outputData\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"GlobalAveragePool\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new CpuGlobalAveragePool(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7117516398429871, "alphanum_fraction": 0.7117516398429871, "avg_line_length": 27.1875, "blob_id": "85b8a49d1a02a84eb8b2faae8fc68f0ad7ff7844", "content_id": "21dbd7a9907de4cdfa4740682c9f1832210a3db3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 451, "license_type": "permissive", "max_line_length": 82, "num_lines": 16, "path": "/src/descriptor_runner/separateBuild/operatorCPU.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { getOpEntries as getOpEntriesCPU } from \"../operators/cpu/opEntriesAll\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ndeclare let WebDNN: any;\n\nfunction injectOperators() {\n if (WebDNN.injectOperators) {\n WebDNN.injectOperators({ operatorEntries: [...getOpEntriesCPU()] });\n } else {\n console.error(\n \"WebDNN.injectOperators not found. webdnn-core.js seems to be not imported.\"\n );\n }\n}\n\ninjectOperators();\n" }, { "alpha_fraction": 0.557823121547699, "alphanum_fraction": 0.5712224245071411, "avg_line_length": 25.801105499267578, "blob_id": "b1ffc1809a3ae476ee53a250ba527e58a30f91b8", "content_id": "eb7a3471edfac660d2d1fafeb48d33db4431b233", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4873, "license_type": "permissive", "max_line_length": 80, "num_lines": 181, "path": "/src/descriptor_runner/operators/webgl/operators/standard/reduce.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { getAttrInt, getAttrInts } from \"../../../operatorUtil\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n// Opset 1\nexport class ReduceOp extends OperatorImpl {\n axes!: number[];\n\n keepdims!: boolean;\n\n constructor(\n private opType: string,\n private shaderInit: string,\n private shaderAccum: string,\n private shaderOutput: string\n ) {\n super(\"webgl\");\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axes = getAttrInts(attribute, \"axes\", []);\n this.keepdims = getAttrInt(attribute, \"keepdims\", 1) !== 0;\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n if (this.axes.length !== 1) {\n throw new Error(`${this.opType}: only single axis is supported`);\n }\n let axis = this.axes[0];\n if (axis < 0) {\n axis += input.ndim;\n }\n if (axis !== input.ndim - 1) {\n throw new Error(\n `${this.opType}: currently only reducing final axis is supported`\n );\n }\n // 最終軸のreductionに特化した実装\n const reductionLength = input.dims[axis],\n outerLength = input.length / reductionLength,\n outShape = input.dims.slice();\n if (this.keepdims) {\n outShape[axis] = 1;\n } else {\n outShape.pop();\n }\n const output = context.emptyTensor(outShape, input.dataType),\n kernelName = `reduceop_${this.opType}_${reductionLength}`,\n kernelSource = `${shaderGenHeader(context.webgl2)}\n\n#define reductionLength ${reductionLength}\n#define reductionMul ${1 / reductionLength}\n${shaderGenTensorOutputUniform(1)}\n\n${shaderGenTensorNDGet(\"tex_input\", 2, context.webgl2)}\n\nvoid main() {\n ${shaderGenTensorOutputCoordsWithReturn(1)}\n float s = ${this.shaderInit}\n for (int i = 0; i < reductionLength; i++) {\n float v = get_tex_input(tex_output_0, i);\n ${this.shaderAccum}\n }\n ${this.shaderOutput}\n ${shaderGenOutput(\"s\", context.webgl2)}\n return;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n [reductionLength, 1],\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(\n [outerLength],\n output,\n context.webgl2\n ),\n ];\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n output,\n uniforms\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"ReduceL1\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new ReduceOp(\"ReduceL1\", \"0.0;\", \"s += abs(v);\", \"\"),\n },\n {\n opType: \"ReduceL2\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceL2\", \"0.0;\", \"s += v * v;\", \"s = sqrt(s);\"),\n },\n {\n opType: \"ReduceLogSum\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceLogSum\", \"0.0;\", \"s += v;\", \"s = log(s);\"),\n },\n {\n opType: \"ReduceLogSumExp\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceLogSumExp\", \"0.0;\", \"s += exp(v);\", \"s = log(s);\"),\n },\n {\n opType: \"ReduceMax\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceMax\", \"-65536.0;\", \"if (v > s) { s = v; }\", \"\"),\n },\n {\n opType: \"ReduceMean\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceMean\", \"0.0;\", \"s += v;\", \"s *= reductionMul;\"),\n },\n {\n opType: \"ReduceMin\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new ReduceOp(\"ReduceMin\", \"65536.0;\", \"if (v < s) { s = v; }\", \"\"),\n },\n {\n opType: \"ReduceProd\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new ReduceOp(\"ReduceProd\", \"1.0;\", \"s *= v;\", \"\"),\n },\n {\n opType: \"ReduceSum\",\n backend: \"webgl\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new ReduceOp(\"ReduceSum\", \"0.0;\", \"s += v;\", \"\"),\n },\n {\n opType: \"ReduceSumSquare\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new ReduceOp(\"ReduceSumSquare\", \"0.0;\", \"s += v * v;\", \"\"),\n },\n ];\n}\n" }, { "alpha_fraction": 0.6256186366081238, "alphanum_fraction": 0.6497816443443298, "avg_line_length": 26.70161247253418, "blob_id": "501674a7ff5b192351bd364fd6bb8f4e7ecf3a86", "content_id": "b22b5ee28f1c6e2ef07c994fcc53bbeb01f9b9e7", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 3435, "license_type": "permissive", "max_line_length": 77, "num_lines": 124, "path": "/example/resnet/index.js", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "let resultCanvas, srcCanvas;\nlet srcImageSize;\nlet inputArrays, runner;\n\nasync function loadModel(directory) {\n const optimized =location.pathname.includes(\"optimized\");\n const usp = new URLSearchParams(location.search);\n const backendOrder = (usp.get(\"backend\") || \"webgl\").split(\",\");\n if (!backendOrder.includes(\"cpu\")) {\n backendOrder.push(\"cpu\");\n }\n const options = {backendOrder};\n if (optimized) {\n directory += \"optimized/\"\n options.optimized = true;\n options.progressCallback = (loaded, total) => {\n updateMessage(`Loading model: ${loaded} / ${total} bytes`);\n };\n }\n runner = await WebDNN.load(directory, options);\n}\n\nfunction updateSrcImage(image) {\n srcImageSize = { width: image.width, height: image.height };\n srcCanvas.width = srcImageSize.width;\n srcCanvas.height = srcImageSize.height;\n resultCanvas.width = srcImageSize.width;\n resultCanvas.height = srcImageSize.height;\n const srcCtx = srcCanvas.getContext(\"2d\");\n srcCtx.drawImage(image, 0, 0);\n const resultCtx = resultCanvas.getContext(\"2d\");\n resultCtx.drawImage(image, 0, 0);\n}\n\nfunction loadDefaultImage() {\n const image = new Image();\n image.onload = () => {\n updateSrcImage(image);\n };\n image.src = `output/000000039769.jpg`;\n}\n\nfunction updateMessage(message) {\n document.getElementById(\"msg\").innerText = message;\n}\n\nfunction initDragDrop() {\n resultCanvas.addEventListener(\"dragover\", (event) => {\n event.preventDefault();\n });\n\n resultCanvas.addEventListener(\"drop\", (event) => {\n event.preventDefault();\n const file = event.dataTransfer.files[0];\n if (file) {\n const image = new Image();\n const reader = new FileReader();\n reader.onload = (readerEv) => {\n image.onload = () => {\n updateSrcImage(image);\n };\n image.src = readerEv.target.result;\n };\n reader.readAsDataURL(file);\n }\n });\n}\n\nwindow.addEventListener(\"DOMContentLoaded\", async () => {\n srcCanvas = document.getElementById(\"source\");\n resultCanvas = document.getElementById(\"result\");\n updateMessage(\"Loading model\");\n\n initDragDrop();\n loadDefaultImage();\n await loadModel(\"output/\");\n updateMessage(`Model loaded (backend: ${runner.backendName})`);\n});\n\nfunction displayTopClasses(pred_scores) {\n const records = [];\n for (let i = 0; i < imagenet_classes.length; i++) {\n records.push([imagenet_classes[i], pred_scores.getValue([0, i])]);\n }\n records.sort((a, b) => b[1] - a[1]); //sort in reverse order of probability\n console.log(records);\n\n const tbody = document.getElementById(\"result-lines\");\n let innerHTML = \"\";\n\n for (let i = 0; i < 5; i++) {\n innerHTML += `<tr><td>${records[i][0]}</td><td>${\n (records[i][1] * 100) | 0\n }%</td></tr>`;\n }\n tbody.innerHTML = innerHTML;\n}\n\nasync function run() {\n const timeStart = Date.now();\n const w = 224;\n const h = 224;\n\n const imageArray = await WebDNN.Image.getImageArray(srcCanvas, {\n dstW: w,\n dstH: h,\n color: WebDNN.Image.Color.RGB,\n order: WebDNN.Image.Order.CHW,\n bias: [0.485 * 255, 0.456 * 255, 0.406 * 255],\n scale: [0.229 * 255, 0.224 * 255, 0.225 * 255],\n });\n const transformedImage = new WebDNN.CPUTensor(\n [1, 3, h, w],\n \"float32\",\n imageArray\n );\n\n const [pred_scores] = await runner.run([transformedImage]);\n\n const timeElapsed = Date.now() - timeStart;\n displayTopClasses(pred_scores);\n\n updateMessage(`Elapsed: ${timeElapsed} ms`);\n}\n" }, { "alpha_fraction": 0.6168224215507507, "alphanum_fraction": 0.6214953064918518, "avg_line_length": 22.135135650634766, "blob_id": "7a5ba1234769181963833e75ac0aa52ff74ae985", "content_id": "f4595e3bea96e295767c6be74a1ebfdda80b5618", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 856, "license_type": "permissive", "max_line_length": 80, "num_lines": 37, "path": "/src/descriptor_runner/core/outputProxy.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import {\n DataArrayConstructor,\n DataArrayTypes,\n DataType,\n} from \"../interface/core/constants\";\nimport { arrayProd } from \"../operators/operatorUtil\";\n\nexport class OutputProxy implements ArrayLike<number> {\n readonly length: number;\n\n [n: number]: number;\n\n readonly dims: ReadonlyArray<number>;\n\n constructor(dims: ReadonlyArray<number>, public readonly dataType: DataType) {\n this.dims = dims;\n const length = arrayProd(dims);\n this.length = length;\n for (let i = 0; i < length; i++) {\n this[i] = 0;\n }\n }\n\n set(array: ArrayLike<number>): void {\n for (let i = 0; i < array.length; i++) {\n this[i] = array[i];\n }\n }\n\n toActual(): DataArrayTypes {\n const ta = new DataArrayConstructor[this.dataType](this.length);\n for (let i = 0; i < this.length; i++) {\n ta[i] = this[i];\n }\n return ta;\n }\n}\n" }, { "alpha_fraction": 0.7751196026802063, "alphanum_fraction": 0.7751196026802063, "avg_line_length": 25.125, "blob_id": "15eb9be8f88d834a90895076767189dfa02f6d7d", "content_id": "0fe6681d7070ceaa3fca68cd31c017e41951c8cc", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 209, "license_type": "permissive", "max_line_length": 44, "num_lines": 8, "path": "/src/descriptor_runner/interface/backend/wasm/wasmTensor.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Tensor } from \"../../core/tensor\";\n\nexport interface WasmSharedBufferInterface {\n backendBufferId: number;\n}\nexport interface WasmTensor extends Tensor {\n sharedBuffer: WasmSharedBufferInterface;\n}\n" }, { "alpha_fraction": 0.5117457509040833, "alphanum_fraction": 0.5228005647659302, "avg_line_length": 27.5657901763916, "blob_id": "85cd8a03b5af9f93b3000d624a476938ee139431", "content_id": "ea5919c0c81b8eb0a487c1fa20f73fe730c32824", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2171, "license_type": "permissive", "max_line_length": 72, "num_lines": 76, "path": "/src/descriptor_runner/operators/base/reshape5.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../operatorImpl\";\nimport { CPUTensor } from \"../../interface/backend/cpu/cpuTensor\";\nimport { Tensor } from \"../../interface/core/tensor\";\nimport { getAttrInt } from \"../operatorUtil\";\n\n// Opset under 5 takes shape as attribute. not compatible.\nexport abstract class Reshape5 extends OperatorImpl {\n allowzero!: boolean;\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.allowzero = getAttrInt(attribute, \"allowzero\", 0) !== 0;\n }\n\n protected calcShape(input: Tensor, shapeTensor: CPUTensor): number[] {\n const shapeInput = Array.from(shapeTensor.data);\n\n let computedShape: number[];\n if (this.allowzero) {\n let explicitProd = 1,\n minusDim = -1;\n shapeInput.forEach((s, i) => {\n if (s > 0) {\n explicitProd *= s;\n } else if (s === -1) {\n if (minusDim >= 0) {\n throw new Error(\"Reshape: multiple -1 dimensions\");\n }\n minusDim = i;\n }\n });\n if (minusDim >= 0 && explicitProd <= 0) {\n throw new Error();\n }\n const minusDimValue = input.length / explicitProd;\n computedShape = shapeInput.map((s) => {\n if (s >= 0) {\n return s;\n }\n return minusDimValue;\n });\n } else {\n let explicitProd = 1,\n minusDim = -1;\n shapeInput.forEach((s, i) => {\n if (s > 0) {\n explicitProd *= s;\n } else if (s === 0) {\n explicitProd *= input.dims[i];\n } else {\n if (s !== -1) {\n throw new Error();\n }\n if (minusDim >= 0) {\n throw new Error(\"Reshape: multiple -1 dimensions\");\n }\n minusDim = i;\n }\n });\n if (minusDim >= 0 && explicitProd <= 0) {\n throw new Error();\n }\n const minusDimValue = input.length / explicitProd;\n computedShape = shapeInput.map((s, i) => {\n if (s > 0) {\n return s;\n } else if (s === 0) {\n return input.dims[i];\n }\n return minusDimValue;\n });\n }\n return computedShape;\n }\n}\n" }, { "alpha_fraction": 0.5339635610580444, "alphanum_fraction": 0.543767511844635, "avg_line_length": 26.7281551361084, "blob_id": "edbf3e092dba9fa27749f5a389417bfd3e465d35", "content_id": "0ef04d846a9478caedf7e521b2c8c0215e3fac95", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2856, "license_type": "permissive", "max_line_length": 80, "num_lines": 103, "path": "/src/descriptor_runner/operators/cpu/operators/standard/split.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { CPUTensor } from \"../../../..\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { Split13, Split2 } from \"../../../base/split\";\n\nclass CPUSplit2 extends Split2 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(\n context: WebDNNCPUContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n {\n eachOutputParams,\n outerLength,\n innerLength,\n inOuterStride,\n inConcatStride,\n } = this.calcShape(input, nOutputs),\n outputs: CPUTensor[] = [];\n for (let i = 0; i < nOutputs; i++) {\n const { dim, offset, outShape, outerStride, splitStride } =\n eachOutputParams[i],\n ot = context.emptyTensor(outShape, input.dataType);\n for (let c = 0; c < dim; c++) {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n ot.data[c * splitStride + outer * outerStride + inner] =\n input.data[\n (c + offset) * inConcatStride + outer * inOuterStride + inner\n ];\n }\n }\n }\n outputs.push(ot);\n }\n return outputs;\n }\n}\n\nclass CPUSplit13 extends Split13 {\n constructor() {\n super(\"cpu\");\n }\n\n async run(\n context: WebDNNCPUContext,\n inputs: Tensor[],\n nOutputs: number\n ): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n splitTensor = inputs[1],\n {\n eachOutputParams,\n outerLength,\n innerLength,\n inOuterStride,\n inConcatStride,\n } = this.calcShape(input, nOutputs, splitTensor),\n outputs: CPUTensor[] = [];\n for (let i = 0; i < nOutputs; i++) {\n const { dim, offset, outShape, outerStride, splitStride } =\n eachOutputParams[i],\n ot = context.emptyTensor(outShape, input.dataType);\n for (let c = 0; c < dim; c++) {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n ot.data[c * splitStride + outer * outerStride + inner] =\n input.data[\n (c + offset) * inConcatStride + outer * inOuterStride + inner\n ];\n }\n }\n }\n outputs.push(ot);\n }\n return outputs;\n }\n}\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Split\",\n backend: \"cpu\",\n opsetMin: 13,\n factory: () => new CPUSplit13(),\n },\n {\n opType: \"Split\",\n backend: \"cpu\",\n opsetMin: 1,\n opsetMax: 13,\n factory: () => new CPUSplit2(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5790147185325623, "alphanum_fraction": 0.5871188044548035, "avg_line_length": 30.260000228881836, "blob_id": "12c0c7d60daf02dc00efefb033f0df2b0b224760", "content_id": "d0072995b1ffbfd1abcbdd935627995b42d89317", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4701, "license_type": "permissive", "max_line_length": 336, "num_lines": 150, "path": "/src/descriptor_runner/operators/webgl/operators/standard/pad11.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { Pad11 } from \"../../../base/pad11\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorNDGet,\n shaderGenTensorNDGetUniformItem,\n shaderGenTensorOutputCoordsWithReturn,\n shaderGenTensorOutputUniform,\n shaderGenTensorOutputUniformItem,\n} from \"../../shaderHelper\";\nimport { arange } from \"../../../../util\";\n\n/*\n * Opset 11\n * opset 2は互換性なし\n */\nclass WebGLPad11 extends Pad11 {\n constructor() {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n const [input, shapeTensor, constantValueTensor] = inputs;\n context.assertsWebGLTensor(input);\n context.cpuContext.assertsCPUTensor(shapeTensor);\n const { outputShape: outShape, pads } = this.calcShape(input, shapeTensor);\n let constantValue = 0;\n if (constantValueTensor) {\n context.cpuContext.assertsCPUTensor(constantValueTensor);\n constantValue = constantValueTensor.data[0];\n }\n const output = context.emptyTensor(outShape, \"float32\");\n const kernelName = `pad_${outShape.length}_${this.mode}`;\n const padUniforms = arange(outShape.length)\n .map((dim) => `uniform int pad${dim};`)\n .join(\"\");\n const inShapeUniforms = arange(outShape.length)\n .map((dim) => `uniform int inShape${dim};`)\n .join(\"\");\n const constantUniform =\n this.mode === \"constant\" ? \"uniform float padConstant;\" : \"\";\n const tex_input_idxs = arange(outShape.length)\n .map((dim) => `ti${dim}`)\n .join(\",\");\n const minusPad = arange(outShape.length)\n .map((dim) => `int ti${dim} = tex_output_${dim} - pad${dim};`)\n .join(\"\");\n const outOfBoundCond = arange(outShape.length)\n .map((dim) => `ti${dim} < 0 || ti${dim} >= inShape${dim}`)\n .join(\"||\");\n let indexAdjuster: string;\n let valueGetter: string;\n switch (this.mode) {\n case \"constant\":\n indexAdjuster = \"\";\n valueGetter = `if (${outOfBoundCond}) {s = padConstant;} else {s = get_tex_input(${tex_input_idxs});}`;\n break;\n case \"edge\":\n indexAdjuster = arange(outShape.length)\n .map(\n (dim) =>\n `if (ti${dim} < 0) {ti${dim} = 0;} else if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} - 1;}`\n )\n .join(\"\");\n valueGetter = `s = get_tex_input(${tex_input_idxs});`;\n break;\n case \"reflect\":\n indexAdjuster = arange(outShape.length)\n .map(\n (dim) =>\n `if (ti${dim} < 0) {ti${dim} = pad_mod(-ti${dim}, inShape${dim} * 2 - 2); if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} * 2 - ti${dim} - 2;}} else if (ti${dim} >= inShape${dim}) {ti${dim} = pad_mod(ti${dim}, inShape${dim} * 2 - 2); if (ti${dim} >= inShape${dim}) {ti${dim} = inShape${dim} * 2 - ti${dim} - 2;}}`\n )\n .join(\"\");\n valueGetter = `s = get_tex_input(${tex_input_idxs});`;\n break;\n }\n const kernelSource = `${shaderGenHeader(context.webgl2)}\nint pad_mod(int x, int y) {\n int z = x / y;\n return x - z * y;\n}\n${padUniforms}\n${constantUniform}\n${inShapeUniforms}\n${shaderGenTensorOutputUniform(outShape.length)}\n\n${shaderGenTensorNDGet(\"tex_input\", input.ndim, context.webgl2)}\n\nvoid main() {\n${shaderGenTensorOutputCoordsWithReturn(outShape.length)}\n${minusPad}\n${indexAdjuster}\nfloat s;\n${valueGetter}\n${shaderGenOutput(\"s\", context.webgl2)}\nreturn;\n}\n`;\n context.addKernel(kernelName, kernelSource);\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorNDGetUniformItem(\n \"tex_input\",\n input.strides,\n input,\n context.webgl2\n ),\n ...shaderGenTensorOutputUniformItem(outShape, output, context.webgl2),\n ];\n for (let dim = 0; dim < outShape.length; dim++) {\n uniforms.push({ name: `pad${dim}`, value: pads[dim], type: \"int\" });\n uniforms.push({\n name: `inShape${dim}`,\n value: input.dims[dim],\n type: \"int\",\n });\n }\n if (this.mode === \"constant\") {\n uniforms.push({\n name: \"padConstant\",\n value: constantValue,\n type: \"float\",\n });\n }\n await context.runKernel(\n kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n output,\n uniforms\n );\n return [output];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Pad\",\n backend: \"webgl\",\n opsetMin: 11,\n factory: () => new WebGLPad11(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.4747856557369232, "alphanum_fraction": 0.48989972472190857, "avg_line_length": 23.751798629760742, "blob_id": "bfa9ceeae0bad259be7a64635c31de45f2cf92b1", "content_id": "3db1751effe97a085e97b7aea3c39ac87abdac06", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 6937, "license_type": "permissive", "max_line_length": 112, "num_lines": 278, "path": "/src/descriptor_runner/operators/webgl/operators/standard/unary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { OperatorImpl } from \"../../../operatorImpl\";\nimport {\n WebDNNWebGLContext,\n WebGLUniformItem,\n} from \"../../../../interface/backend/webgl/webglContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport {\n shaderGenHeader,\n shaderGenOutput,\n shaderGenTensorElementwiseGet,\n shaderGenTensorElementwiseGetUniformItem,\n} from \"../../shaderHelper\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\nexport class WebGLUnary extends OperatorImpl {\n constructor(\n public kernelName: string,\n private unaryCalculationSource: string,\n private unaryCalculationSourceWebGL1?: string\n ) {\n super(\"webgl\");\n }\n\n async run(context: WebDNNWebGLContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsWebGLTensorArray(inputs);\n const input = inputs[0];\n if (input.dataType !== \"float32\") {\n throw new Error();\n }\n const outputTensor = context.emptyTensor(input.dims, \"float32\");\n // Elementwiseのアクセスにおいてテクスチャサイズが同じであることを仮定\n if (\n input.textureWidth !== outputTensor.textureWidth ||\n input.textureHeight !== outputTensor.textureHeight ||\n input.dimPerPixel !== 1\n ) {\n throw new Error();\n }\n\n /*\n * Gl_FragCoord.x: 0.5, 1.5, 2.5, ..., textureWidth-0.5\n * texture2D(textureName, vec2(x, y)): x=(0.5, 1.5, 2.5, ...) / textureWidth\n */\n if (!context.hasKernel(this.kernelName)) {\n const kernelSource = `${shaderGenHeader(context.webgl2)}\n ${shaderGenTensorElementwiseGet(\"tex_input\", context.webgl2)}\n void main() {\n float s = get_tex_input();\n ${\n !context.webgl2 && this.unaryCalculationSourceWebGL1\n ? this.unaryCalculationSourceWebGL1\n : this.unaryCalculationSource\n }\n ${shaderGenOutput(\"v\", context.webgl2)}\n return;\n }\n `;\n context.addKernel(this.kernelName, kernelSource);\n }\n\n const uniforms: WebGLUniformItem[] = [\n ...shaderGenTensorElementwiseGetUniformItem(\n \"tex_input\",\n input,\n context.webgl2\n ),\n ];\n\n await context.runKernel(\n this.kernelName,\n [{ tensor: input, name: \"tex_input\" }],\n outputTensor,\n uniforms\n );\n return [outputTensor];\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Abs\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"abs\", \"float v = abs(s);\"),\n },\n {\n opType: \"Acos\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"acos\", \"float v = acos(s);\"),\n },\n {\n opType: \"Acosh\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"acosh\",\n \"float v = acosh(s);\",\n \"float v = log(s + sqrt(s * s - 1.0));\"\n ),\n },\n {\n opType: \"Asin\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"asin\", \"float v = asin(s);\"),\n },\n {\n opType: \"Asinh\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"asinh\",\n \"float v = asinh(s);\",\n \"float v = log(s + sqrt(s * s + 1.0));\"\n ),\n },\n {\n opType: \"Atan\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"atan\", \"float v = atan(s);\"),\n },\n {\n opType: \"Atanh\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"atanh\",\n \"float v = atanh(s);\",\n \"float v = log((s + 1.0) / (1.0 - s)) * 0.5;\"\n ),\n },\n {\n opType: \"Ceil\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"ceil\", \"float v = ceil(s);\"),\n },\n {\n opType: \"Cos\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"cos\", \"float v = cos(s);\"),\n },\n {\n opType: \"Cosh\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"cosh\",\n \"float v = cosh(s);\",\n \"float v = (exp(s) + exp(-s)) * 0.5;\"\n ),\n },\n {\n opType: \"Exp\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"exp\", \"float v = exp(s);\"),\n },\n {\n opType: \"Floor\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"floor\", \"float v = floor(s);\"),\n },\n {\n opType: \"HardSwish\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"hardswish\",\n \"float v; if (s <= -3.0) { v = 0.0; } else if (s >= 3.0) { v = s; } else { v = s * (s + 3.0) / 6.0; }\"\n ),\n },\n {\n opType: \"Log\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"log\", \"float v = log(s);\"),\n },\n {\n opType: \"Neg\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"neg\", \"float v = -s;\"),\n },\n {\n opType: \"Reciprocal\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"neg\", \"float v = 1.0 / s;\"),\n },\n {\n opType: \"Relu\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"relu\", \"float v = max(s, 0.0);\"),\n },\n {\n opType: \"Round\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"round\",\n \"float v = round(s);\",\n \"float v = floor(s + 0.5);\"\n ),\n },\n {\n opType: \"Sigmoid\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"sigmoid\",\n \"float v = (tanh(s * 0.5) + 1.0) * 0.5;\",\n \"float v = 1.0 / (1.0 + exp(-s));\"\n ),\n },\n {\n opType: \"Sign\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"sign\", \"float v = sign(s);\"),\n },\n {\n opType: \"Sin\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"sin\", \"float v = sin(s);\"),\n },\n {\n opType: \"Softplus\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"softplus\", \"float v = log(exp(s) + 1.0);\"),\n },\n {\n opType: \"Softsign\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\"softsign\", \"float v = s / (1.0 + abs(s));\"),\n },\n {\n opType: \"Sqrt\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"sqrt\", \"float v = sqrt(s);\"),\n },\n {\n opType: \"Tan\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () => new WebGLUnary(\"tan\", \"float v = tan(s);\"),\n },\n {\n opType: \"Tanh\",\n backend: \"webgl\",\n opsetMin: 1,\n factory: () =>\n new WebGLUnary(\n \"tanh\",\n \"float v = tanh(s);\",\n \"float vt = exp(-2.0 * s); float v = (1.0 - vt) / (1.0 + vt);\"\n ),\n },\n ];\n}\n" }, { "alpha_fraction": 0.5790878534317017, "alphanum_fraction": 0.6011123657226562, "avg_line_length": 23.16666603088379, "blob_id": "0c364a51377e738d117f56078b8eb4f5791dec3e", "content_id": "53e99c6e8d04da9890560db8740ec40ba6cfdc3b", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4495, "license_type": "permissive", "max_line_length": 80, "num_lines": 186, "path": "/src/descriptor_runner/operators/cpu/operators/standard/dynamicunary.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport {\n DataArrayConstructor,\n DataType,\n} from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport { getAttrFloat } from \"../../../operatorUtil\";\n\nabstract class DynamicUnary extends OperatorImpl {\n constructor(public opType: string, private allowDataTypes: DataType[]) {\n super(\"cpu\");\n }\n\n protected abstract getUnaryOp(): (value: number) => number;\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n if (!this.allowDataTypes.includes(input.dataType)) {\n throw new Error(\n `${this.opType}: DataType ${input.dataType} not supported`\n );\n }\n const newData = new DataArrayConstructor[input.dataType](input.data.length),\n op = this.getUnaryOp();\n for (let i = 0; i < newData.length; i++) {\n newData[i] = op(input.data[i]);\n }\n const output = context.emptyTensor(input.dims, input.dataType, newData);\n return [output];\n }\n}\n\nclass Elu extends DynamicUnary {\n alpha!: number;\n\n constructor() {\n super(\"Elu\", [\"float32\"]);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 1.0);\n }\n\n getUnaryOp(): (value: number) => number {\n const alpha = this.alpha;\n return (value: number) => {\n return value >= 0 ? value : (Math.exp(value) - 1) * alpha;\n };\n }\n}\n\nclass HardSigmoid extends DynamicUnary {\n alpha!: number;\n beta!: number;\n\n constructor() {\n super(\"HardSigmoid\", [\"float32\"]);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 0.2);\n this.beta = getAttrFloat(attribute, \"beta\", 0.5);\n }\n\n getUnaryOp(): (value: number) => number {\n const alpha = this.alpha;\n const beta = this.beta;\n return (value: number) => {\n return Math.max(0, Math.min(1, value * alpha + beta));\n };\n }\n}\n\nclass LeakyRelu extends DynamicUnary {\n alpha!: number;\n\n constructor() {\n super(\"LeakyRelu\", [\"float32\"]);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 0.01);\n }\n\n getUnaryOp(): (value: number) => number {\n const alpha = this.alpha;\n return (value: number) => {\n return value >= 0 ? value : value * alpha;\n };\n }\n}\n\nclass Selu extends DynamicUnary {\n alpha!: number;\n gamma!: number;\n\n constructor() {\n super(\"Selu\", [\"float32\"]);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(\n attribute,\n \"alpha\",\n 1.6732632423543772848170429916717\n );\n this.gamma = getAttrFloat(\n attribute,\n \"gamma\",\n 1.0507009873554804934193349852946\n );\n }\n\n getUnaryOp(): (value: number) => number {\n const alpha = this.alpha;\n const gamma = this.gamma;\n return (value: number) => {\n return value > 0\n ? gamma * value\n : gamma * (alpha * Math.exp(value) - alpha);\n };\n }\n}\n\nclass ThresholdedRelu extends DynamicUnary {\n alpha!: number;\n\n constructor() {\n super(\"ThresholdedRelu\", [\"float32\"]);\n }\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.alpha = getAttrFloat(attribute, \"alpha\", 1.0);\n }\n\n getUnaryOp(): (value: number) => number {\n const alpha = this.alpha;\n return (value: number) => {\n return value > alpha ? value : 0;\n };\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Elu\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Elu(),\n },\n {\n opType: \"HardSigmoid\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new HardSigmoid(),\n },\n {\n opType: \"LeakyRelu\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new LeakyRelu(),\n },\n {\n opType: \"Selu\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new Selu(),\n },\n {\n opType: \"ThresholdedRelu\",\n backend: \"cpu\",\n opsetMin: 1,\n factory: () => new ThresholdedRelu(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.680672287940979, "alphanum_fraction": 0.680672287940979, "avg_line_length": 22.799999237060547, "blob_id": "d1745152f34c570bf097dc09cc29a5fa2bb29fd2", "content_id": "6cc59c5b547a05a1f7b0002d2e39482dbc2cf072", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 238, "license_type": "permissive", "max_line_length": 86, "num_lines": 10, "path": "/src/descriptor_runner/math.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * @module webdnn/math\n * @preferred\n *\n * Module `WebDNN.Math` provides basic mathematics operations for pre/post-processing.\n */\n/** Don't Remove This comment block */\n\nexport * from \"./math/argsort\";\nexport * from \"./math/random\";\n" }, { "alpha_fraction": 0.6383561491966248, "alphanum_fraction": 0.6383561491966248, "avg_line_length": 29.41666603088379, "blob_id": "6ac87f8efd56ff2eed6e093e43c1f8831bc839cb", "content_id": "9c563692346c793a3761d9c3c288b84762838c31", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 365, "license_type": "permissive", "max_line_length": 79, "num_lines": 12, "path": "/src/graph_transpiler/webdnn/operator_shader_webgpu.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "from webdnn.operator_shader import OperatorShader\n\nclass OperatorShaderWebGPU(OperatorShader):\n ts_code: str\n shader_name: str\n glsl_code: str\n\n def __init__(self, ts_code: str, shader_name: str, glsl_code: str) -> None:\n super().__init__()\n self.ts_code = ts_code\n self.glsl_code = glsl_code\n self.shader_name = shader_name\n" }, { "alpha_fraction": 0.5996996760368347, "alphanum_fraction": 0.6098781824111938, "avg_line_length": 23.361787796020508, "blob_id": "d326828de73e366fce28b09c39963b1462550866", "content_id": "362cbd8802820b0462fbe35c0daf8876e0a641a8", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 6171, "license_type": "permissive", "max_line_length": 80, "num_lines": 246, "path": "/src/descriptor_runner/operators/operatorUtil.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import Long from \"long\";\nimport { onnx } from \"onnx-proto\";\nimport { DataArrayTypes, DataType } from \"../interface/core/constants\";\nimport { clipLong, intOrLongToInt, intOrLongToIntVector } from \"../util\";\n\nfunction getAttr(\n attribute: onnx.IAttributeProto[],\n name: string\n): onnx.IAttributeProto | null {\n for (const attr of attribute) {\n if (attr.name === name) {\n return attr;\n }\n }\n\n return null;\n}\n\nexport function getAttrFloat(\n attribute: onnx.IAttributeProto[],\n name: string,\n defaultValue: number\n): number {\n const attr = getAttr(attribute, name);\n if (!attr) {\n return defaultValue;\n }\n const v = attr.f;\n if (v == null) {\n throw new Error(`Attribute ${name} is not float`);\n }\n return v;\n}\n\nexport function getAttrInt(\n attribute: onnx.IAttributeProto[],\n name: string,\n defaultValue: number\n): number {\n const attr = getAttr(attribute, name);\n if (!attr) {\n return defaultValue;\n }\n const v = attr.i;\n if (v == null) {\n throw new Error(`Attribute ${name} is not int`);\n }\n return intOrLongToInt(v);\n}\n\nexport function getAttrInts(\n attribute: onnx.IAttributeProto[],\n name: string,\n defaultValue: number[]\n): number[] {\n const attr = getAttr(attribute, name);\n if (!attr) {\n return defaultValue;\n }\n const v = attr.ints;\n if (v == null) {\n throw new Error(`Attribute ${name} is not int`);\n }\n return intOrLongToIntVector(v);\n}\n\nexport function getAttrTensor(\n attribute: onnx.IAttributeProto[],\n name: string\n): { data: DataArrayTypes; dataType: DataType; dims: number[] } | null {\n const attr = getAttr(attribute, name);\n if (!attr) {\n return null;\n }\n const v = attr.t;\n if (v == null) {\n throw new Error(`Attribute ${name} is not int`);\n }\n // eslint-disable-next-line @typescript-eslint/no-non-null-assertion\n const dims = intOrLongToIntVector(v.dims!),\n { rawData } = v;\n if (!rawData) {\n throw new Error(`rawData in TensorProto is empty`);\n }\n switch (v.dataType) {\n case onnx.TensorProto.DataType.FLOAT: {\n const data = new Uint8Array(rawData.length);\n data.set(rawData);\n const ab = new Float32Array(\n data.buffer,\n 0,\n data.length / Float32Array.BYTES_PER_ELEMENT\n );\n return { dims, dataType: \"float32\", data: ab };\n }\n case onnx.TensorProto.DataType.INT64: {\n // 1要素が8byte (int64)\n const view = new DataView(\n rawData.buffer,\n rawData.byteOffset,\n rawData.byteLength\n ),\n ab = new Int32Array(view.byteLength / 8);\n for (let idx = 0; idx < ab.length; idx++) {\n ab[idx] = clipLong(\n new Long(\n view.getUint32(idx * 8, true),\n view.getUint32(idx * 8 + 4, true)\n )\n );\n }\n return { dims, dataType: \"int32\", data: ab };\n }\n default:\n throw new Error(`dataType ${v.dataType} of TensorProto is not supported`);\n }\n}\n\nexport function getAttrString(\n attribute: onnx.IAttributeProto[],\n name: string,\n defaultValue: string\n): string {\n const attr = getAttr(attribute, name);\n if (!attr) {\n return defaultValue;\n }\n // Only ASCII chars are considered\n const v = attr.s;\n if (v == null) {\n throw new Error(`Attribute ${name} is not string`);\n }\n return String.fromCharCode(...Array.from(v));\n}\n\nexport function arraySum(vec: ArrayLike<number>): number {\n let x = 0;\n for (let i = 0; i < vec.length; i++) {\n x += vec[i];\n }\n return x;\n}\n\nexport function arrayProd(vec: ArrayLike<number>): number {\n let x = 1;\n for (let i = 0; i < vec.length; i++) {\n x *= vec[i];\n }\n return x;\n}\n\nexport function arrayEqual(\n vec1: ArrayLike<number>,\n vec2: ArrayLike<number>\n): boolean {\n if (vec1.length !== vec2.length) {\n return false;\n }\n\n for (let i = 0; i < vec1.length; i++) {\n if (vec1[i] !== vec2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\nexport function calcStrides(dims: number[]): number[] {\n const strides = [];\n let length = 1;\n for (let i = dims.length - 1; i >= 0; i--) {\n strides.unshift(length);\n length *= dims[i];\n }\n return strides;\n}\n\nexport function broadcastUni(\n dimsA: ReadonlyArray<number>,\n dimsB: ReadonlyArray<number>\n): number[] {\n /*\n * 行列Bを行列Aのshapeに合うようにbroadcast\n * 行列Bのstridesを返す\n */\n\n if (dimsA.length < dimsB.length) {\n throw new Error(`Unidirectional broadcast error: ${dimsA}, ${dimsB}`);\n }\n // Step1 次元数が合うように先頭に1を付加\n const expandedDimsB = dimsB.slice();\n while (expandedDimsB.length < dimsA.length) {\n expandedDimsB.unshift(1);\n }\n const stridesB = calcStrides(expandedDimsB);\n // Step2 行列Bの次元サイズが1の箇所はstrideを0にする\n for (let i = 0; i < dimsA.length; i++) {\n if (dimsA[i] !== expandedDimsB[i]) {\n if (expandedDimsB[i] === 1) {\n // Broadcast\n stridesB[i] = 0;\n } else {\n throw new Error(`Unidirectional broadcast error: ${dimsA}, ${dimsB}`);\n }\n }\n }\n\n return stridesB;\n}\n\nexport function broadcastMulti(allDims: ReadonlyArray<number>[]): {\n dims: number[];\n allStrides: number[][];\n} {\n // 全行列をbroadcast\n const expandedNdims = Math.max(...allDims.map((dims) => dims.length)),\n // Step1 次元数が合うように先頭に1を付加\n expandedAllDims = allDims.map((dims) => {\n const expandedDims = dims.slice();\n while (expandedDims.length < expandedNdims) {\n expandedDims.unshift(1);\n }\n return expandedDims;\n }),\n expandedDims: number[] = [];\n for (let i = 0; i < expandedNdims; i++) {\n expandedDims.push(Math.max(...expandedAllDims.map((ad) => ad[i])));\n }\n // Step2 行列の次元サイズが1の箇所はstrideを0にする\n const allStrides = expandedAllDims.map((dims) => {\n const strides = calcStrides(dims);\n for (let i = 0; i < expandedNdims; i++) {\n if (dims[i] !== expandedDims[i]) {\n if (dims[i] === 1) {\n strides[i] = 0;\n } else {\n throw new Error(`Multidirectional broadcasting error: ${allDims}`);\n }\n }\n }\n return strides;\n });\n\n return { dims: expandedDims, allStrides };\n}\n" }, { "alpha_fraction": 0.5001111030578613, "alphanum_fraction": 0.526993989944458, "avg_line_length": 25.95209503173828, "blob_id": "090dc7acbe62f20b5cd31b6abbfb6112b7f0c71a", "content_id": "1707c1cdcb7d2934713e17ab18382467ae7610b7", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 4513, "license_type": "permissive", "max_line_length": 80, "num_lines": 167, "path": "/src/descriptor_runner/operators/cpu/operators/standard/tile.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { DataArrayTypes } from \"../../../../interface/core/constants\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\n\n/*\n * Opset 6\n * opset 1は互換性なし\n */\nclass Tile6 extends OperatorImpl {\n constructor() {\n super(\"cpu\");\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n repeats = inputs[1];\n const outputShape: number[] = [];\n for (let i = 0; i < input.ndim; i++) {\n outputShape.push(input.dims[i] * repeats.data[i]);\n }\n const output = context.emptyTensor(outputShape, input.dataType);\n if (input.ndim === 1) {\n this.copy1d(\n input.data,\n output.data,\n input.dims,\n outputShape,\n input.strides,\n output.strides\n );\n } else if (input.ndim === 2) {\n this.copy2d(\n input.data,\n output.data,\n input.dims,\n outputShape,\n input.strides,\n output.strides\n );\n } else if (input.ndim === 3) {\n this.copy3d(\n input.data,\n output.data,\n input.dims,\n outputShape,\n input.strides,\n output.strides\n );\n } else if (input.ndim === 4) {\n this.copy4d(\n input.data,\n output.data,\n input.dims,\n outputShape,\n input.strides,\n output.strides\n );\n } else {\n throw new Error(\n `Tile: input.ndim = ${input.ndim} > 4 is not yet supported`\n );\n }\n return [output];\n }\n\n copy1d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n dO[d0 * outputStrides[0]] = dI[(d0 % inputShape[0]) * inputStrides[0]];\n }\n }\n\n copy2d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n dO[d0 * outputStrides[0] + d1 * outputStrides[1]] =\n dI[\n (d0 % inputShape[0]) * inputStrides[0] +\n (d1 % inputShape[1]) * inputStrides[1]\n ];\n }\n }\n }\n\n copy3d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2]\n ] =\n dI[\n (d0 % inputShape[0]) * inputStrides[0] +\n (d1 % inputShape[1]) * inputStrides[1] +\n (d2 % inputShape[2]) * inputStrides[2]\n ];\n }\n }\n }\n }\n\n copy4d(\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n inputShape: ReadonlyArray<number>,\n outputShape: ReadonlyArray<number>,\n inputStrides: ReadonlyArray<number>,\n outputStrides: ReadonlyArray<number>\n ) {\n for (let d0 = 0; d0 < outputShape[0]; d0++) {\n for (let d1 = 0; d1 < outputShape[1]; d1++) {\n for (let d2 = 0; d2 < outputShape[2]; d2++) {\n for (let d3 = 0; d3 < outputShape[3]; d3++) {\n dO[\n d0 * outputStrides[0] +\n d1 * outputStrides[1] +\n d2 * outputStrides[2] +\n d3 * outputStrides[3]\n ] =\n dI[\n (d0 % inputShape[0]) * inputStrides[0] +\n (d1 % inputShape[1]) * inputStrides[1] +\n (d2 % inputShape[2]) * inputStrides[2] +\n (d3 % inputShape[3]) * inputStrides[3]\n ];\n }\n }\n }\n }\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n return [\n {\n opType: \"Tile\",\n backend: \"cpu\",\n opsetMin: 6,\n factory: () => new Tile6(),\n },\n ];\n}\n" }, { "alpha_fraction": 0.7417721748352051, "alphanum_fraction": 0.7417721748352051, "avg_line_length": 27.214284896850586, "blob_id": "e623e205fd388368d015a8a83ecb66d7dd4f1a37", "content_id": "b5383c2aa040a33a4c1fcacd26a40b0b47637bc4", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 395, "license_type": "permissive", "max_line_length": 64, "num_lines": 14, "path": "/src/descriptor_runner/interface/core/tensor.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { Backend, DataArrayTypes, DataType } from \"./constants\";\n\nexport interface Tensor {\n readonly dims: ReadonlyArray<number>;\n readonly ndim: number;\n readonly length: number;\n readonly strides: ReadonlyArray<number>;\n readonly dataType: DataType;\n readonly backend: Backend;\n\n getData(): Promise<DataArrayTypes>;\n setData(data: DataArrayTypes): Promise<void>;\n dispose(): void;\n}\n" }, { "alpha_fraction": 0.6098149418830872, "alphanum_fraction": 0.6130329966545105, "avg_line_length": 24.89583396911621, "blob_id": "6f9313ead3701ed04ce2a88ae35d81fe1b7fa48e", "content_id": "3b91ef75611a51897212b3cc517b2da4f97f9a5f", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1243, "license_type": "permissive", "max_line_length": 87, "num_lines": 48, "path": "/example/custom_operator/make_model.py", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import os\nimport torch\nfrom torch.autograd import Function\nimport torch.nn.functional as F\nimport numpy as np\n\nclass TwiceFunction(Function):\n @staticmethod\n def symbolic(g, x):\n y = g.op(\"foo_domain::Twice\", x)\n return y\n\n @staticmethod\n def forward(ctx, input):\n numpy_input = input.detach().numpy()\n result = numpy_input * 2\n return input.new(result)\n \n @staticmethod\n def backward(ctx, grad_output):\n numpy_go = grad_output.numpy()\n result = numpy_go * 2\n return grad_output.new(result)\n\ndef twice(input):\n return TwiceFunction.apply(input)\n\nclass MyModel(torch.nn.Module):\n def __init__(self):\n super().__init__()\n \n def forward(self, x):\n h = twice(x)\n h = F.relu(h)\n return h\n\ndef main():\n output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"output\")\n os.makedirs(output_dir, exist_ok=True)\n\n model = MyModel()\n example_input = torch.zeros((2, 3))\n # ONNX model contains two operators: Twice (custom), Relu (standard)\n torch.onnx.export(model, (example_input, ), os.path.join(output_dir, \"model.onnx\"))\n # TODO: how to avoid TracerWarning?\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7454677224159241, "alphanum_fraction": 0.7541697025299072, "avg_line_length": 31.83333396911621, "blob_id": "151aec3b80f48059343e7cbc253cfae2501e32c0", "content_id": "29d49e7b50e47cbe81f83532659eb2270d0aa688", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 1379, "license_type": "permissive", "max_line_length": 77, "num_lines": 42, "path": "/src/descriptor_runner/interface/backend/wasm/wasmContext.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { BackendContext } from \"../../core/backendContext\";\nimport { DataType } from \"../../core/constants\";\nimport { Tensor } from \"../../core/tensor\";\nimport { WebDNNCPUContext } from \"../cpu/cpuContext\";\nimport { WasmTensor } from \"./wasmTensor\";\n\n// for future use\n// eslint-disable-next-line @typescript-eslint/no-empty-interface\nexport interface WebDNNWasmContextOption {}\n\nexport interface WasmKernelArgumentTensor {\n type: \"tensor\";\n value: WasmTensor;\n}\n\nexport interface WasmKernelArgumentFloat32 {\n type: \"float32\";\n value: number;\n}\n\nexport interface WasmKernelArgumentInt32 {\n type: \"int32\";\n value: number;\n}\n\nexport type WasmKernelArgument =\n | WasmKernelArgumentTensor\n | WasmKernelArgumentFloat32\n | WasmKernelArgumentInt32;\n\nexport interface WebDNNWasmContext extends BackendContext {\n backend: \"wasm\";\n cpuContext: WebDNNCPUContext;\n initialize(wasmWorkerSrcUrl: string): Promise<void>;\n isWasmTensor(tensor: Tensor): tensor is WasmTensor;\n assertsWasmTensor(tensor: Tensor): asserts tensor is WasmTensor;\n assertsWasmTensorArray(tensors: Tensor[]): asserts tensors is WasmTensor[];\n emptyTensor(dims: ReadonlyArray<number>, dataType?: DataType): WasmTensor;\n // eslint-disable-next-line @typescript-eslint/ban-types\n moveTensor(tensor: Tensor, option: {}): Promise<WasmTensor>;\n runKernel(name: string, args: WasmKernelArgument[]): void;\n}\n" }, { "alpha_fraction": 0.4535614550113678, "alphanum_fraction": 0.5048882961273193, "avg_line_length": 25.03636360168457, "blob_id": "53219143ffde891a99dc00a6c5fb279c7531735e", "content_id": "ad2a2567dd8696f1bf7010d1e483b1d071db8ef3", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 2866, "license_type": "permissive", "max_line_length": 57, "num_lines": 110, "path": "/src/descriptor_runner/math/random.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "/**\n * Random number / vector generator.\n */\nexport class Random {\n x: number;\n y: number;\n z: number;\n w: number;\n\n constructor(seed = 0) {\n // Algorithm: XorShift\n this.x = seed | 0;\n this.y = 362436069;\n this.z = 521288629;\n this.w = 88675123;\n // skip some initial values to decorrelate seed\n for (let i = 0; i < 40; i++) {\n this.randomRaw();\n }\n }\n\n /**\n * Generates random integer\n * @returns Random integer [-2**31, 2**31-1]\n */\n randomRaw(): number {\n const x = this.x;\n const t = x ^ (x << 11);\n this.x = this.y;\n this.y = this.z;\n const w = this.w;\n this.z = w;\n const nw = w ^ (w >>> 19) ^ (t ^ (t >>> 8));\n this.w = nw;\n return nw; // 32bit signed integer\n }\n\n /**\n * Generates random number between [0, 1)\n * @param size spceify number to specify vector length\n */\n random(size?: null): number;\n random(size: number): Float32Array;\n random(size?: null | number): number | Float32Array {\n if (size == null) {\n // scalar number\n let raw = this.randomRaw(); // [-2**31, 2**31-1]\n raw += 2147483648; // [0, 2**32-1]\n return raw / 4294967296; // [0, 1)\n } else {\n // Float32Array\n const v = new Float32Array(size);\n for (let i = 0; i < size; i++) {\n let raw = this.randomRaw(); // [-2**31, 2**31-1]\n raw += 2147483648; // [0, 2**32-1]\n const s = raw / 4294967296; // [0, 1)\n v[i] = s;\n }\n return v;\n }\n }\n\n /**\n * Generates random number from normal distribution.\n * @param size spceify number to specify vector length\n */\n normal(size?: null): number;\n normal(size: number): Float32Array;\n normal(size?: null | number): number | Float32Array {\n // Box–Muller's method\n if (size == null) {\n // scalar number\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const x = this.random();\n const alpha = Math.sqrt(-2 * Math.log(x));\n if (!Number.isFinite(alpha)) {\n // very rare case\n continue;\n }\n const y = this.random();\n const z1 = alpha * Math.cos(Math.PI * 2 * y);\n return z1;\n }\n } else {\n // Float32Array\n const v = new Float32Array(size);\n for (let i = 0; i < size; i += 2) {\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const x = this.random();\n const alpha = Math.sqrt(-2 * Math.log(x));\n if (!Number.isFinite(alpha)) {\n // very rare case\n continue;\n }\n const y = this.random();\n const z1 = alpha * Math.cos(Math.PI * 2 * y);\n const z2 = alpha * Math.sin(Math.PI * 2 * y);\n v[i] = z1;\n if (i + 1 < size) {\n v[i + 1] = z2;\n }\n break;\n }\n }\n return v;\n }\n }\n}\n" }, { "alpha_fraction": 0.5252229571342468, "alphanum_fraction": 0.532314658164978, "avg_line_length": 26.68825912475586, "blob_id": "98957c23f893f7a59230fc0a4a2d03cd945df30b", "content_id": "014b737c387df86494b3bb3305f98d8b010ab1b9", "detected_licenses": [ "Zlib", "MIT" ], "is_generated": false, "is_vendor": false, "language": "TypeScript", "length_bytes": 13678, "license_type": "permissive", "max_line_length": 80, "num_lines": 494, "path": "/src/descriptor_runner/operators/cpu/operators/standard/reduce.ts", "repo_name": "nagyist/mil-tokyo.webdnn", "src_encoding": "UTF-8", "text": "import { onnx } from \"onnx-proto\";\nimport { OperatorImpl } from \"../../../operatorImpl\";\nimport { getAttrInt, getAttrInts } from \"../../../operatorUtil\";\nimport { WebDNNCPUContext } from \"../../../../interface/backend/cpu/cpuContext\";\nimport { Tensor } from \"../../../../interface/core/tensor\";\nimport { OperatorEntry } from \"../../../../interface/core/operator\";\nimport {\n DataArrayConstructor,\n DataArrayTypes,\n} from \"../../../../interface/core/constants\";\nimport { arrayProd } from \"../../../../util\";\nimport { CPUTensor } from \"../../../../interface/backend/cpu/cpuTensor\";\n\n// Opset 1\nabstract class ReduceOp extends OperatorImpl {\n axes!: number[];\n\n keepdims!: boolean;\n\n constructor(\n private opType: string,\n private opNotFinalAxis: (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ) => void,\n private opFinalAxis?: (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number,\n totalReductionLength: number\n ) => void\n ) {\n super(\"cpu\");\n }\n\n protected async runCore(\n context: WebDNNCPUContext,\n input: CPUTensor,\n sortedAxes: number[]\n ): Promise<Tensor[]> {\n let lastOutputData = input.data;\n let lastShape = input.dims;\n let totalReductionLength = 1;\n for (let i = 0; i < sortedAxes.length; i++) {\n const axis = sortedAxes[i];\n const newShape = lastShape.slice();\n newShape[axis] = 1;\n const reductionLength = lastShape[axis];\n totalReductionLength *= reductionLength;\n const outerLength = arrayProd(lastShape.slice(0, axis));\n const innerLength = arrayProd(lastShape.slice(axis + 1));\n const newOutputData = new DataArrayConstructor[input.dataType](\n outerLength * innerLength\n );\n if (i < sortedAxes.length - 1) {\n this.opNotFinalAxis(\n lastOutputData,\n newOutputData,\n outerLength,\n innerLength,\n reductionLength\n );\n } else {\n if (this.opFinalAxis) {\n this.opFinalAxis(\n lastOutputData,\n newOutputData,\n outerLength,\n innerLength,\n reductionLength,\n totalReductionLength\n );\n } else {\n this.opNotFinalAxis(\n lastOutputData,\n newOutputData,\n outerLength,\n innerLength,\n reductionLength\n );\n }\n }\n lastOutputData = newOutputData;\n lastShape = newShape;\n }\n let finalShape: ReadonlyArray<number>;\n if (this.keepdims) {\n finalShape = lastShape;\n } else {\n finalShape = lastShape.filter((_, i) => !sortedAxes.includes(i));\n }\n const output = context.emptyTensor(\n finalShape,\n input.dataType,\n lastOutputData\n );\n return [output];\n }\n}\n\nclass ReduceOp1 extends ReduceOp {\n axes!: number[];\n keepdims!: boolean;\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.axes = getAttrInts(attribute, \"axes\", []);\n this.keepdims = getAttrInt(attribute, \"keepdims\", 1) !== 0;\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0];\n let sortedAxes: number[];\n if (this.axes.length > 0) {\n sortedAxes = this.axes.map((a) => (a >= 0 ? a : input.ndim + a));\n sortedAxes.sort((a, b) => a - b);\n } else {\n sortedAxes = [];\n for (let i = 0; i < input.ndim; i++) {\n sortedAxes.push(i);\n }\n }\n\n return this.runCore(context, input, sortedAxes);\n }\n}\n\n// Only ReduceSum has backward-incompatible opset 13\nclass ReduceSum13 extends ReduceOp {\n keepdims!: boolean;\n noopWithEmptyAxes!: boolean;\n\n initialize(attribute: onnx.IAttributeProto[]): void {\n super.initialize(attribute);\n this.keepdims = getAttrInt(attribute, \"keepdims\", 1) !== 0;\n this.noopWithEmptyAxes =\n getAttrInt(attribute, \"noop_with_empty_axes\", 0) !== 0;\n }\n\n async run(context: WebDNNCPUContext, inputs: Tensor[]): Promise<Tensor[]> {\n context.assertsCPUTensorArray(inputs);\n const input = inputs[0],\n axes = inputs[1];\n let sortedAxes: number[];\n if (axes.length > 0) {\n sortedAxes = Array.from(axes.data).map((a) =>\n a >= 0 ? a : input.ndim + a\n );\n sortedAxes.sort((a, b) => a - b);\n } else {\n sortedAxes = [];\n if (!this.noopWithEmptyAxes) {\n for (let i = 0; i < input.ndim; i++) {\n sortedAxes.push(i);\n }\n }\n }\n\n return this.runCore(context, input, sortedAxes);\n }\n}\n\nexport function getOpEntries(): OperatorEntry[] {\n const opEntries: OperatorEntry[] = [];\n const addOps = (\n opType: string,\n opNotFinalAxis: (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ) => void,\n opFinalAxis?: (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number,\n totalReductionLength: number\n ) => void,\n reduceSum13?: boolean\n ) => {\n opEntries.push({\n opType: opType,\n backend: \"cpu\",\n opsetMin: 1,\n opsetMax: reduceSum13 ? 13 : undefined,\n factory: () => new ReduceOp1(opType, opNotFinalAxis, opFinalAxis),\n });\n if (reduceSum13) {\n opEntries.push({\n opType: opType,\n backend: \"cpu\",\n opsetMin: 13,\n factory: () => new ReduceSum13(opType, opNotFinalAxis, opFinalAxis),\n });\n }\n };\n addOps(\n \"ReduceL1\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += Math.abs(\n dI[(outer * reductionLength + r) * innerLength + inner]\n );\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n }\n );\n addOps(\n \"ReduceL2\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[(outer * reductionLength + r) * innerLength + inner];\n s += v * v;\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n },\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[(outer * reductionLength + r) * innerLength + inner];\n s += v * v;\n }\n dO[outer * innerLength + inner] = Math.sqrt(s);\n }\n }\n }\n );\n addOps(\n \"ReduceLogSum\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += dI[outer * reductionLength + r];\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n },\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += dI[outer * reductionLength + r];\n }\n dO[outer * innerLength + inner] = Math.log(s);\n }\n }\n }\n );\n addOps(\n \"ReduceLogSumExp\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += Math.exp(\n dI[(outer * reductionLength + r) * innerLength + inner]\n );\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n },\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += Math.exp(\n dI[(outer * reductionLength + r) * innerLength + inner]\n );\n }\n dO[outer * innerLength + inner] = Math.log(s);\n }\n }\n }\n );\n addOps(\n \"ReduceMax\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = dI[outer * reductionLength * innerLength + inner];\n for (let r = 1; r < reductionLength; r++) {\n const v = dI[(outer * reductionLength + r) * innerLength + inner];\n if (v > s) {\n s = v;\n }\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n }\n );\n addOps(\n \"ReduceMean\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += dI[(outer * reductionLength + r) * innerLength + inner];\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n },\n\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number,\n totalReductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += dI[(outer * reductionLength + r) * innerLength + inner];\n }\n dO[outer * innerLength + inner] = s / totalReductionLength;\n }\n }\n }\n );\n addOps(\n \"ReduceMin\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = dI[outer * reductionLength * innerLength + inner];\n for (let r = 1; r < reductionLength; r++) {\n const v = dI[(outer * reductionLength + r) * innerLength + inner];\n if (v < s) {\n s = v;\n }\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n }\n );\n addOps(\n \"ReduceProd\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 1;\n for (let r = 0; r < reductionLength; r++) {\n s *= dI[(outer * reductionLength + r) * innerLength + inner];\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n }\n );\n addOps(\n \"ReduceSum\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n s += dI[(outer * reductionLength + r) * innerLength + inner];\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n },\n undefined,\n true\n );\n addOps(\n \"ReduceSumSquare\",\n (\n dI: DataArrayTypes,\n dO: DataArrayTypes,\n outerLength: number,\n innerLength: number,\n reductionLength: number\n ): void => {\n for (let outer = 0; outer < outerLength; outer++) {\n for (let inner = 0; inner < innerLength; inner++) {\n let s = 0;\n for (let r = 0; r < reductionLength; r++) {\n const v = dI[(outer * reductionLength + r) * innerLength + inner];\n s += v * v;\n }\n dO[outer * innerLength + inner] = s;\n }\n }\n }\n );\n return opEntries;\n}\n" } ]
171
prajjawal98/calculator
https://github.com/prajjawal98/calculator
aebaecba1cbada558309b17a20dc53960d428721
9e4626a5cfe1b317723997b2f0356a49049dad2a
b7f543db3ce5245d71accc04085b9eb944060143
refs/heads/master
2020-05-26T13:32:38.592300
2019-10-25T15:28:04
2019-10-25T15:28:04
188,248,579
0
2
null
2019-05-23T14:19:19
2019-10-25T14:19:11
2019-10-25T15:28:04
Python
[ { "alpha_fraction": 0.5817022919654846, "alphanum_fraction": 0.6649976968765259, "avg_line_length": 22.988636016845703, "blob_id": "1d658d50595cd70fb7a287c919c821586edbad79", "content_id": "5c96cda0c3fb5c2ee8485fe0ababb9a32fa505aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2197, "license_type": "no_license", "max_line_length": 95, "num_lines": 88, "path": "/calc.py", "repo_name": "prajjawal98/calculator", "src_encoding": "UTF-8", "text": "from tkinter import*\r\nobj=Tk()\r\nobj.title(\"calculator\")\r\nobj.geometry(\"160x280+100+50\")\r\n\r\n\r\n\r\ntextin=StringVar()\r\noperator=\"\"\r\n\r\ndef clickbut(number):\r\n global operator\r\n operator=operator+str(number)\r\n textin.set(operator)\r\n\r\ndef equalbut():\r\n global operator\r\n add=str(eval(operator))\r\n textin.set(add)\r\n operator=''\r\n\r\ndef equalbut():\r\n global operator\r\n sub=str(eval(operator))\r\n textin.set(sub)\r\n operator=''\r\n\r\ndef equalbut():\r\n global operator\r\n mul=str(eval(operator))\r\n textin.set(mul)\r\n operator=''\r\n\r\ndef equalbut():\r\n global operator\r\n div=str(eval(operator))\r\n textin.set(div)\r\n operator=''\r\n\r\ndef clrbut():\r\n textin.set('')\r\n\r\n\r\nobjtext=Entry(obj,font=(\"courier New\",15,'bold'),textvar=textin,width=25,bd=5,bg='powder blue')\r\nobjtext.pack()\r\n\r\n\r\nb1=Button(obj,text=\"c\",width=10,command=clrbut,bg=\"cyan\")\r\nb2=Button(obj,text=\"/\",width=4,command=lambda:clickbut(\"/\"))\r\n\r\nb5=Button(obj,text=\"7\",width=4,command=lambda:clickbut(7))\r\nb6=Button(obj,text=\"8\",width=4,command=lambda:clickbut(8))\r\nb7=Button(obj,text=\"9\",width=4,command=lambda:clickbut(9))\r\nb8=Button(obj,text=\"-\",width=4,command=lambda:clickbut(\"-\"))\r\nb9=Button(obj,text=\"4\",width=4,command=lambda:clickbut(4))\r\nb10=Button(obj,text=\"5\",width=4,command=lambda:clickbut(5))\r\nb11=Button(obj,text=\"6\",width=4,command=lambda:clickbut(6))\r\nb12=Button(obj,text=\"+\",width=4,command=lambda:clickbut(\"+\"))\r\nb13=Button(obj,text=\"1\",width=4,command=lambda:clickbut(1))\r\nb14=Button(obj,text=\"2\",width=4,command=lambda:clickbut(2))\r\nb15=Button(obj,text=\"3\",width=4,command=lambda:clickbut(3))\r\nb16=Button(obj,text=\"=\",width=4,height=3,bg=\"cyan\",command=equalbut)\r\n\r\nb17=Button(obj,text=\"0\",width=9,command=lambda:clickbut(0))\r\nb18=Button(obj,text=\".\",width=4,command=lambda:clickbut(\".\"))\r\n\r\n\r\nb1.place(x=0,y=130,)\r\nb2.place(x=120,y=130)\r\n\r\nb5.place(x=0,y=160)\r\nb6.place(x=40,y=160)\r\nb7.place(x=80,y=160)\r\nb8.place(x=120,y=160)\r\nb9.place(x=0,y=190)\r\nb10.place(x=40,y=190)\r\nb11.place(x=80,y=190)\r\nb12.place(x=120,y=190)\r\nb13.place(x=0,y=220)\r\nb14.place(x=40,y=220)\r\nb15.place(x=80,y=220)\r\nb16.place(x=120,y=220)\r\n\r\nb17.place(x=0,y=250)\r\nb18.place(x=80,y=250)\r\n\r\n\r\nobj.mainloop()" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 20.5, "blob_id": "612dce0d660fe4c8218757f64e96ff395aba135a", "content_id": "1aa901458ff117854b12bb43f933aa426bb845ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 44, "license_type": "no_license", "max_line_length": 29, "num_lines": 2, "path": "/README.md", "repo_name": "prajjawal98/calculator", "src_encoding": "UTF-8", "text": "# calculator\nThis is for only calculated \n" } ]
2
studyzy/gae_image
https://github.com/studyzy/gae_image
e15cf58aab9e2368c2db32d10c8cfe8bad9d6a57
1e421e212fe345957b1a34a8017fea31e425b80c
77a9027b87033668633ea73e5c2839a6a9e3c9b9
refs/heads/master
2016-09-06T11:12:06.602145
2014-05-10T03:34:31
2014-05-10T03:34:31
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6450137495994568, "alphanum_fraction": 0.6459286212921143, "avg_line_length": 27.05128288269043, "blob_id": "99757ad9026bef6eb46a885c72f9653b6ed1f538", "content_id": "55ea94f8f21ae388987e1732bdefbb1338887670", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1093, "license_type": "permissive", "max_line_length": 79, "num_lines": 39, "path": "/src/models.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "#coding:utf-8\nimport re,logging,os\nfrom google.appengine.ext import db\nfrom google.appengine.api import memcache\nclass Images(db.Model):\n name = db.StringProperty()\n mime = db.StringProperty()\n size = db.IntegerProperty()\n created_at = db.DateTimeProperty(auto_now_add=True)\n description = db.StringProperty()\n width = db.IntegerProperty()\n height = db.IntegerProperty()\n filetype=db.StringProperty()\n tag=db.StringListProperty()\n\n bf = db.BlobProperty() #binary file\n \n iswebm=False\n def put(self):\n super(Images,self).put()\n \n def delete(self):\n key=str(self.key().id())+\"image\"\n memcache.delete(key)\n super(Images,self).delete()\n \n @property\n def id(self):\n return str(self.key().id())\n \n @property\n def imgurl(self):\n return \"http://%s/image/%s/\" %(os.environ['HTTP_HOST'],self.key().id())\nclass ImageViewCount(db.Model):\n imageid=db.IntegerProperty()\n viewcount=db.IntegerProperty()\nclass Tag(db.Model):\n tagName=db.StringProperty()\n useCount=db.IntegerProperty()" }, { "alpha_fraction": 0.6514285802841187, "alphanum_fraction": 0.6612698435783386, "avg_line_length": 21.5, "blob_id": "3d610f7cae37050e3faa67e211e88220534e0efe", "content_id": "4887ca472f8e0bcc6b8c579fcd5765447c970c94", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3150, "license_type": "permissive", "max_line_length": 65, "num_lines": 140, "path": "/bigfile/gaefile.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "import sys\nimport os\nfrom google.appengine.ext import db\nimport logging\n'''\nsimulate python file operation.\ngf=GaeFile(\"/a/test.txt\",'rw'); \ngf.write(\"hello gaefile\");\ngf,close()\nlogging.info(gf.read())\n\n'''\nM_BUFFLEN=1000000-1\n\n\nclass GFBits(db.Model):\n\tbits=db.BlobProperty()\n\t\nclass GFInfoTable(db.Model):\n\tname=db.StringProperty()\n\tpath=db.StringProperty()\n\tproperty=db.StringProperty()\n\tbuffList=db.ListProperty(db.Key)\n\tfilesize=db.IntegerProperty()\n\tdownload=db.IntegerProperty()\n\tdate=db.DateTimeProperty(auto_now_add=True)\n\n\t\nclass GFDir(db.Model):\n\tpath=db.StringProperty()\n\tFileList=db.ListProperty(db.Key)\n\nclass GaeFile():\n\tpath=\"\"\n\tproperty=\"\"\n\tfilebuff=\"\"\n\tret=0\n\tgfInfoTable=None\n\tgfDir = GFDir()\n\tdef __init__(self,path=\"\",property=\"wr\"):\n\t\tself.gfInfoTable=GFInfoTable()\n\t\tdirs=self.gfDir.all().filter(\"path\",\"/\").fetch(20)\n\t\tlogging.info(\"len(dirs) %d \",len(dirs))\n\t\t\n\t\tif len(dirs) == 1:\n\t\t\tself.gfDir=dirs[0]\n\t\telif len(dirs) == 0:\n\t\t\tself.gfDir.path=\"/\"\n\n\tdef open(self,path,property):\n\t\tself.path=path\n\t\tself.property=property\n\t\tif len(self.gfDir.FileList) == 0:\n\t\t\tself.gfInfoTable.path=path\n\t\t\tself.gfInfoTable.property=property\n\t\t\tself.gfInfoTable.download=0\n\t\t\treturn\n\t\t\t\n\t\tfor filekey in self.gfDir.FileList:\n\t\t\tfile=db.get(filekey)\n\t\t\tif file == None:\n\t\t\t\tself.gfDir.FileList.remove(filekey)\n\t\t\telif file.path == path:\n\t\t\t\tself.ret=1\n\t\t\t\treturn 1\n\t\t\telse:\n\t\t\t\tself.gfInfoTable.path=path\n\t\t\t\tself.gfInfoTable.property=property\n\t\t\t\tself.gfInfoTable.download=0\n\t\t\t\n\tdef write(self,buff):\n\t\tif self.gfInfoTable == None:\n\t\t\tself.ret=1\n\t\t\treturn 1\n\t\tbuffLen=len(buff)\t\t\n\t\tself.gfInfoTable.filesize= buffLen\n\t\ti=0\n\t\tif buffLen > M_BUFFLEN :\n\t\t\tfor i in range(0,buffLen/M_BUFFLEN+1):\t\t\t\t\n\t\t\t\tself.filebuff=self.filebuff+buff[i*M_BUFFLEN:(i+1)*M_BUFFLEN]\n\t\t\t\tgfBits=GFBits()\t\t\t\t\n\t\t\t\tgfBits.bits=buff[i*M_BUFFLEN:(i+1)*M_BUFFLEN]\n\t\t\t\tkey=gfBits.put()\t\t\t\t\n\t\t\t\tself.gfInfoTable.buffList.append(key)\n\t\telse:\n\t\t\tgfBits=GFBits()\n\t\t\tgfBits.bits=buff\n\t\t\tkey=gfBits.put()\t\t\t\n\t\t\tself.gfInfoTable.buffList.append(key)\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\tpass\n\tdef read(self,path):\n\t\tfb=\"\"\n\t\tfilebits=GFBits()\n\t\tfor filekey in self.gfDir.FileList:\n\t\t\tfile = db.get(filekey)\n\t\t\tif file.path == path:\n\t\t\t\tfor i in range(0,len(file.buffList)):\n\t\t\t\t\tkey=file.buffList[i]\n\t\t\t\t\tfilebits=db.get(key)\t\n\t\t\t\t\tfb=fb+filebits.bits\n\t\t\t\treturn fb\n\t\t\telse:\n\t\t\t\tlogging.info(\"can't find file %s\",path)\n\t\t\t\n\tdef remove(self,path):\n\t\tfor filekey in self.gfDir.FileList:\n\t\t\tfile = db.get(filekey)\n\t\t\tif file.path == path:\n\t\t\t\tfor i in range(0,len(file.buffList)):\n\t\t\t\t\tkey=file.buffList[i]\n\t\t\t\t\tbit=db.get(key)\n\t\t\t\t\tif bit != None:\n\t\t\t\t\t\tdb.delete(bit)\n\t\t\t\tfile.delete()\n\t\t\t\tself.gfDir.FileList.remove(filekey)\n\t\t\t\tself.gfDir.put()\n\t\t\t\tlogging.info(\"delete file %s ok\",path)\n\t\t\t\treturn\n\t\telse:\n\t\t\tlogging.info(\"can't find file %s\",path)\n\t\t\t\n\tdef close(self):\n\t\tif self.gfInfoTable == None:\n\t\t\tlogging.info(\"close error filehandle=None\");\n\t\t\treturn 1\n\t\tif self.ret != 0:\n\t\t\tlogging.info(\"can't close\")\n\t\t\treturn 1\n\t\tkey=self.gfInfoTable.put()\n\t\tfile = db.get(key)\n\t\tself.gfDir.FileList.append(key)\n\t\tself.gfDir.put()\n\t\t\n\t\ndef main():\t\t\n\tpass\n\t\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5989212393760681, "alphanum_fraction": 0.6116504669189453, "avg_line_length": 28.452228546142578, "blob_id": "0f52af550cc832e1d9ac4f75e428ba10a90dad78", "content_id": "d2ba75afff8318bb0947c2d4fe58273de664f42f", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4635, "license_type": "permissive", "max_line_length": 88, "num_lines": 157, "path": "/src/methods.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "#coding:utf-8\nfrom models import Images\nfrom models import Tag\nfrom models import ImageViewCount\nfrom google.appengine.api import memcache\nfrom google.appengine.api import images\nfrom getimageinfo import getImageInfo\nfrom google.appengine.ext import db\nfrom google.appengine.api import urlfetch\nfrom gaefile import *\n\ndef addImage(name, mime,description,tag,bf):\n 'Add Image'\n image=Images(name=name, mime=mime,description=description,tag=tag.split(','), bf=bf)\n image.size=len(image.bf)\n image.filetype,image.width,image.height=getImageInfo(bf)\n image.put()\n AddTags(image.tag)\n return image\n\ndef addImage2(bf):\n image=Images(bf=bf)\n image.size=len(bf)\n image.filetype,image.width,image.height=getImageInfo(bf)\n if not image.filetype:return None\n image.mime=image.filetype\n image.put()\n return image\n\ndef getImage(id):\n id=int(id)\n return Images.get_by_id(id)\n\ndef resizeImage(id,size=\"image\"):\n image=getImage(id)\n if not image:return None\n if size==\"image\":return image\n if image.width==-1:return image\n img=images.Image(image.bf)\n img.resize(width=240, height=240)\n img.im_feeling_lucky()\n image.bf=img.execute_transforms(output_encoding=images.JPEG)\n return image\n\ndef downImage(id,size=\"image\"):\n key=id+size\n image=memcache.get(key)\n if not image:\n image=resizeImage(id, size)\n memcache.set(key,image,3600*24)\n imagecount= db.GqlQuery(\"SELECT * FROM ImageViewCount WHERE imageid=:1\",int(id))\n x=imagecount.count()\n \n if x:\n ivc=imagecount[0]\n ivc.viewcount=ivc.viewcount+1\n db.put(ivc)\n else:\n ivc=ImageViewCount(imageid=int(id),viewcount=1) \n db.put(ivc) \n return image\n\ndef delImage(key):\n image=Images.get(key)\n if image:\n DelTags(image.tag)\n image.delete()\n\ndef delImageByid(id):\n image=Images.get_by_id(int(id))\n if image:\n DelTags(image.tag)\n image.delete()\n\ndef getAllImages(index=0):\n return Images.all().order('-created_at').fetch(25,index*24)\n\ndef getAllImagesByTag(tag):\n #return db.GqlQuery(u\"SELECT * FROM Images WHERE tag=:1\",unicode(tag, 'UTF-8'))\n return Images.all().filter('tag =', unicode(tag, 'UTF-8')).order('-created_at') \n\ndef getPageing(index,page=0):\n s=\"/%s/\"\n if page==0:\n if index==25:return (None,\"/1/\")\n else:return (None,None)\n if index==25:\n return (\"/\",s%(page+1)) if page==1 else (s %(page-1),s%(page+1))\n return (\"/\",None) if page==1 else (s %(page-1),None)\n\ndef AddImageByUrl(url,fileName,tag):\n result = urlfetch.fetch(url)\n if result.status_code == 200:\n name = fileName\n mtype = result.headers.get('Content-Type', '')\n bits = result.content\n gf=GaeFile()\n gf.open(name,mtype);\n gf.write(bits)\n id=gf.close()\n \n image=Images(description=\"/media/?key=\"+str(id))\n image.mime=result.headers.get('Content-Type', '')\n image.filetype=image.mime\n # if image.mime.find('image')==-1:\n # return None\n image.size=len(bits)\n image.width=-1;\n image.height=-1;\n # image.name=fileName\n # image.filetype,image.width,image.height=getImageInfo(image.bf)\n image.tag=tag.split(',')\n image.put()\n AddTags(image.tag)\n return image\n else:\n return None\n\ndef AddImageByUrlBak(url,fileName,tag):\n result = urlfetch.fetch(url)\n if result.status_code == 200:\n image=Images(description=url,bf=result.content)\n image.mime=result.headers.get('Content-Type', '')\n if image.mime.find('image')==-1:\n return None\n image.size=len(image.bf)\n image.name=fileName\n image.filetype,image.width,image.height=getImageInfo(image.bf)\n image.tag=tag.split(',')\n image.put()\n AddTags(image.tag)\n return image\n else:\n return None\n\n \n \ndef getAllTags():\n return Tag.all().order('-useCount')\ndef AddTags(tags):\n for t in tags:\n if t:\n tag= db.GqlQuery(\"SELECT * FROM Tag WHERE tagName=:1\",t) \n if tag.count()>0:\n thistag=tag[0]\n thistag.useCount=thistag.useCount+1\n db.put(thistag)\n else:\n newtag=Tag(tagName=t,useCount=1)\n db.put(newtag)\ndef DelTags(tags):\n for t in tags:\n tag= db.GqlQuery(\"SELECT * FROM Tag WHERE tagName=:1\",t)\n if tag.count()>0:\n thistag=tag[0]\n thistag.useCount=thistag.useCount-1\n db.put(thistag) " }, { "alpha_fraction": 0.6189797520637512, "alphanum_fraction": 0.6309489607810974, "avg_line_length": 24.992591857910156, "blob_id": "1256b41bbbb07c9182053c8fce5ce4475633e20a", "content_id": "afd875ea154f9bcaca89e80659318c15bfb28a7e", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3509, "license_type": "permissive", "max_line_length": 85, "num_lines": 135, "path": "/bigfile/main.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "import wsgiref.handlers\nimport logging\nimport os\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp import template\nfrom google.appengine.ext import db\nfrom gaefile import *\nimport time\n\n\nclass Media():\n\tname =\"\"\n\tmtype=\"\"\n\tfilesize=0\n\tdate=time.strftime(\"%Y-%m-%d %X\", time.localtime() ) \n\tdownload=0 \n\tkeyid=db.Key\n\t\n\tdef size(self):\n\t\treturn self.filesize\n\n\nclass Pager(object):\n\tdef __init__(self, model=None,query=None, items_per_page=10):\n\t\tif model:\n\t\t\tself.query = model.all()\n\t\telif query:\n\t\t\tself.query=query\n\t\tself.items_per_page = items_per_page\n\n\tdef fetch(self, p):\n\t\tmax_offset = self.query.count()\n\t\tn = max_offset / self.items_per_page\n\t\tif max_offset % self.items_per_page != 0:\n\t\t\tn += 1\n\t\tif p < 0 or p > n:\n\t\t\tp = 1\n\t\toffset = (p - 1) * self.items_per_page\n\t\tresults = self.query.fetch(self.items_per_page, offset)\n\t\tlinks = {'count':max_offset,'page_index':p,'prev': p - 1, 'next': p + 1, 'last': n}\n\t\tif links['next'] > n:\n\t\t\tlinks['next'] = 0\n\t\treturn (results, links)\n\nclass getMedia(webapp.RequestHandler):\n\tdef get(self,slug):\n\t\tmedia=0\n\t\tbuf=\"\"\n\t\tfile=GFInfoTable()\n\t\tgf=GaeFile()\n\t\tkey=self.request.get('key')\n\t\tfor filekey in gf.gfDir.FileList:\n\t\t\tif str(filekey.id()) == key:\n\t\t\t\tfile=db.get(filekey)\n\t\t\t\tfile.download+=1\n\t\t\t\tbuf=gf.read(file.path)\n\t\t\t\tfile.put()\n\t\t\t\tmtype=file.property\n\t\t\t\tmedia=1\n\t\t\n\t\tif media:\n\t\t\tself.response.headers['Expires'] = 'Thu, 15 Apr 3010 20:00:00 GMT'\n\t\t\tself.response.headers['Cache-Control'] = 'max-age=3600,public'\n\t\t\tself.response.headers['Content-Type'] = str(mtype)\n\t\t\tself.response.out.write(buf)\n\t\t\t\n\t\t\t \nclass Upload(webapp.RequestHandler):\n def post(self):\n name = self.request.get('filename')\n mtype = self.request.get('fileext')\n bits = self.request.get('upfile') \n gf=GaeFile()\n gf.open(name,mtype); \n gf.write(bits)\n gf.close()\n self.redirect('/')\n\nclass FileManager(webapp.RequestHandler):\n\n\tdef __init__(self):\n\t\tself.current='files'\n\n\tdef get(self):\n\t\ttry:\n\t\t\tpage_index=int(self.request.get('page'))\n\t\texcept:\n\t\t\tpage_index=1\n\t\tfiles=[]\n\t\tgf=GaeFile()\n\t\tfor filekey in gf.gfDir.FileList:\n\t\t\tfile = db.get(filekey)\t\t\t\n\t\t\ttmp = Media()\n\t\t\ttmp.name = file.path\n\t\t\ttmp.filesize=file.filesize\n\t\t\ttmp.mtype= file.property\n\t\t\ttmp.keyid = filekey.id()\n\t\t\ttmp.date=file.date\n\t\t\ttmp.download=file.download\n\t\t\tfiles.append(tmp)\n\t\tlinks = {'count':10,'page_index':5,'prev': 5 - 1, 'next': 5 + 1, 'last': 2}\t\t\n\t\ttemplate_values = {'files' : files,'pager':links,}\n\t\tpath = os.path.join(os.path.dirname(__file__), 'views/base.html') \n\t\tself.response.out.write(template.render(path, template_values))\n\t\t\n\tdef post(self): # delete files\n\t\tdelids = self.request.POST.getall('del')\n\t\tif delids:\n\t\t\tfor id in delids:\n\t\t\t\tgf=GaeFile()\n\t\t\t\tfor filekey in gf.gfDir.FileList:\n\t\t\t\t\tif str(filekey.id()) == id:\n\t\t\t\t\t\tfile=db.get(filekey)\n\t\t\t\t\t\tgf.remove(file.path)\n\t\tself.redirect('/') \n\nclass Map(webapp.RequestHandler):\n\tdef get(self):\n\t\tpath = os.path.join(os.path.dirname(__file__), 'views/map.html') \n\t\ttemplate_values = {}\n\t\tself.response.out.write(template.render(path, template_values))\n\t\t\ndef main():\n webapp.template.register_template_library('filter')\n application = webapp.WSGIApplication(\n [('/',FileManager),\n ('/upload',Upload), \n\t\t('/media/([^/]*)/{0,1}.*',getMedia),\n\t\t('/map',Map),\n ],\n debug=True)\n wsgiref.handlers.CGIHandler().run(application)\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5501968264579773, "alphanum_fraction": 0.5610235929489136, "avg_line_length": 27.22222137451172, "blob_id": "78a268f214ca342d9e48c5e8230793c1977e4d2f", "content_id": "9179204cba9a00b44ea377cc073ae25f71a48a29", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1116, "license_type": "permissive", "max_line_length": 129, "num_lines": 36, "path": "/src/views/index.html", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "{% extends \"base.html\" %}\n{% block title %}深蓝GAE图片分享{% endblock %}\n{% block main %}\n<h2>\n<a href=\"/\">图片列表</a> \n<a href=\"/admin/upload/\" title=\"上传图片\">上传图片</a>\n <a href=\"/admin/upload2/\" title=\"上传图片\">批量上传图片</a>\n <a href=\"/admin/upload3/\" title=\"Upload\">保存网络图片</a>\n <a href=\"/file\" title=\"Upload\">管理大文件</a>\n</h2>\n<div id=\"showtag\">\n<fieldset>\n<legend>Tag列表</legend>\n{% for t in tags%}\n<a href=\"/tag/{{t.tagName}}\" title=\"数量:{{t.useCount}}\" >{{t.tagName}}</>\n{% endfor%}\n</fieldset>\n</div>\n<ul id=\"album\">\n\t{% for image in images %}\n\t<li>\n {% if image.iswebm %}\n<video controls=\"\" loop=\"\" autoplay=\"0\" class=\"expandedWebm\" src=\"/image/{{image.id}}/?.webm\" style=\"max-width: 1299px;\"></video>\n\n{% else %}\n <a href=\"/show/{{image.id}}/\" target=_black>\n\t<img src=\"/image/{{image.id}}/\" alt=\"{{image.name}}\" />\n </a>\n {% endif %}\n </li>\n\t{% endfor %}\n</ul>\n<h2 id=\"page\">{% if prev %}<a href=\"{{prev}}\">上一页</a>{% endif%} {%if next%}<a href=\"{{next}}\">下一页</a>{% endif %}</h2>\n{% endblock %}\n</body>\n</html>\n" }, { "alpha_fraction": 0.6818181872367859, "alphanum_fraction": 0.6818181872367859, "avg_line_length": 15.5, "blob_id": "ce36fbebf9bbe4670b0ce780091eb3e49618dbbf", "content_id": "caf244b639d5f2c026af23d1e275957421cdb6ec", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 66, "license_type": "permissive", "max_line_length": 44, "num_lines": 4, "path": "/README.md", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "gae_image\n=========\n\nSave my favorite gif and image from internet\n" }, { "alpha_fraction": 0.7727272510528564, "alphanum_fraction": 0.7878788113594055, "avg_line_length": 26.5, "blob_id": "214e72ad1001fc9b9b5ed754a4832e5b79f3b880", "content_id": "c5a30fd198c60aa9bdef786486744bb58a16491f", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 330, "license_type": "permissive", "max_line_length": 56, "num_lines": 12, "path": "/bigfile/filter.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport logging\nfrom django import template\nimport django.template.defaultfilters as defaultfilters\nimport urllib\nregister = template.Library()\nfrom datetime import *\n\[email protected]\ndef datetz(date,format): #datetime with timedelta\n\tt=timedelta(seconds=3600)\n\treturn defaultfilters.date(date+t,format)\n" }, { "alpha_fraction": 0.5804108381271362, "alphanum_fraction": 0.5846693515777588, "avg_line_length": 33.42241287231445, "blob_id": "d267fd2e423b6765c4cf4163f5b7148682d4eb19", "content_id": "15b30c65c378c90b51aeeb0c01ec7103e76aa25e", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3992, "license_type": "permissive", "max_line_length": 94, "num_lines": 116, "path": "/src/admin.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "#coding:utf-8\nimport wsgiref.handlers\nimport os\nfrom functools import wraps\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp import template\nfrom google.appengine.api import users\nimport methods,logging\nfrom django.utils import simplejson\n\nadminFlag=True\n\nclass AdminControl(webapp.RequestHandler):\n def render(self,template_file,template_value):\n path=os.path.join(os.path.dirname(__file__),template_file)\n self.response.out.write(template.render(path, template_value))\n def returnjson(self,dit):\n self.response.headers['Content-Type'] = \"application/json\"\n self.response.out.write(simplejson.dumps(dit))\n \ndef requires_admin(method):\n @wraps(method)\n def wrapper(self, *args, **kwargs):\n if not users.is_current_user_admin() and adminFlag:\n self.redirect(users.create_login_url(self.request.uri))\n else:\n return method(self, *args, **kwargs)\n return wrapper\n\nclass Admin_Upload(AdminControl):\n @requires_admin\n def get(self):\n tags=methods.getAllTags()\n template_value={\"tags\":tags}\n self.render('views/upload.html', template_value)\n @requires_admin\n def post(self):\n bf=self.request.get(\"file\")\n name= self.request.get('filename')\n if not bf:\n return self.redirect('/admin/upload/')\n# name=self.request.body_file.vars['file'].filename\n mime = self.request.body_file.vars['file'].headers['content-type']\n if mime.find('image')==-1:\n return self.redirect('/admin/upload/')\n description=self.request.get(\"description\")\n tag=self.request.get(\"txbTag\")\n image=methods.addImage(name, mime, description,tag, bf)\n \n self.redirect('/show/%s/' %image.id)\nclass Admin_Upload3(AdminControl):\n @requires_admin\n def get(self):\n tags=methods.getAllTags()\n template_value={\"tags\":tags}\n self.render('views/upload3.html', template_value)\n @requires_admin\n def post(self):\n imageUrl=self.request.get(\"imageUrl\")\n fileName=self.request.get(\"fileName\")\n \n tag=self.request.get(\"txbTag\")\n if not imageUrl:\n return self.redirect('/admin/upload3/')\n image=methods.AddImageByUrl(imageUrl,fileName,tag)\n if not image:\n self.redirect('/admin/upload3/')\n else:\n self.redirect('/show/%s/' %image.id)\nclass Admin_Upload2(AdminControl):\n @requires_admin\n def get(self):\n self.render('views/upload2.html', {})\n @requires_admin\n def post(self):\n dit={\"result\":\"error\"}\n bf=self.request.get(\"Filedata\")\n if not bf:\n return self.returnjson(dit)\n image=methods.addImage2(bf)\n if not image:\n return self.returnjson(dit)\n dit[\"result\"]=\"ok\"\n dit[\"id\"]=image.id\n return self.returnjson(dit)\n \nclass Delete_Image(AdminControl):\n @requires_admin\n def get(self,key):\n methods.delImage(key)\n self.redirect('/')\n \nclass Delete_Image_ID(AdminControl):\n @requires_admin\n def get(self,id):\n methods.delImageByid(id)\n self.redirect('/')\n\nclass Admin_Login(AdminControl):\n @requires_admin\n def get(self):\n self.redirect('/')\n \ndef main():\n application = webapp.WSGIApplication(\n [(r'/admin/upload/', Admin_Upload),\n (r'/admin/upload2/', Admin_Upload2),\n (r'/admin/upload3/', Admin_Upload3),\n (r'/admin/del/(?P<key>[a-z,A-Z,0-9]+)', Delete_Image),\n (r'/admin/delid/(?P<id>[0-9]+)/', Delete_Image_ID),\n (r'/admin/', Admin_Login),\n ], debug=True)\n wsgiref.handlers.CGIHandler().run(application)\n\nif __name__ == \"__main__\":\n main()" }, { "alpha_fraction": 0.5206491947174072, "alphanum_fraction": 0.5323866009712219, "avg_line_length": 33.16831588745117, "blob_id": "f0f0aa7a0f14c4319a43e1ad0dab4c7cb3406805", "content_id": "25b9dfcf8ecade7b0fc4729da443788580be917f", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6901, "license_type": "permissive", "max_line_length": 98, "num_lines": 202, "path": "/src/main.py", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "#coding:utf-8\nimport wsgiref.handlers\nimport os\nfrom google.appengine.ext import webapp\nfrom google.appengine.ext.webapp import template\nfrom google.appengine.api import users\nfrom google.appengine.ext import db\nfrom gaefile import *\nfrom admin import requires_admin\nimport time\nimport methods\nimport logging\nimport urllib\n\n\ndef format_date(dt):\n return dt.strftime('%a, %d %b %Y %H:%M:%S GMT')\n\nclass PublicPage(webapp.RequestHandler):\n def render(self, template_file, template_value):\n path = os.path.join(os.path.dirname(__file__), template_file)\n self.response.out.write(template.render(path, template_value))\n \n def error(self,code):\n if code==400:\n self.response.set_status(code)\n else:\n self.response.set_status(code)\n \n def is_admin(self):\n return users.is_current_user_admin()\n \n def head(self, *args):\n return self.get(*args) \n \nclass MainPage(PublicPage):\n @requires_admin\n def get(self,page):\n index=0 if page==\"\" else int(page)\n images=methods.getAllImages(index)\n prev,next=methods.getPageing(len(images), index)\n tags=methods.getAllTags()\n template_value={\"images\":images[:24],\"prev\":prev,\"next\":next,\"tags\":tags}\n self.render('views/index.html', template_value)\n\nclass ShowImage(PublicPage):\n def get(self,id):\n image=methods.getImage(id)\n if not image:return self.error(404)\n template_value={\"image\":image,\"admin\":self.is_admin(),\"webm\":image.filetype==\"video/webm\"}\n self.render('views/show.html', template_value)\nclass ShowTagImage(PublicPage):\n def get(self,tag):\n tagString=urllib.unquote_plus(tag)\n images=methods.getAllImagesByTag(tagString)\n template_value={\"images\":images[:24],\"tag\": tagString }\n self.render('views/tagimage.html', template_value) \n \nclass GetImage(PublicPage):\n def get(self,size,id):\n dic=self.request.headers\n key=dic.get(\"If-None-Match\")\n self.response.headers['ETag']=size+id\n if key and key==size+id:\n return self.error(304)\n image=methods.downImage(id, size)\n if not image:\n return self.error(404)\n if image.width==-1:\n self.redirect(image.description)\n else:\n self.response.headers['Content-Type'] = str(image.mime) \n self.response.headers['Cache-Control']=\"max-age=315360000\"\n self.response.headers['Last-Modified']=format_date(image.created_at)\n self.response.out.write(image.bf)\n\nclass Error(PublicPage):\n def get(self):\n return self.error(404)\n\nclass Media():\n name =\"\"\n mtype=\"\"\n filesize=0\n date=time.strftime(\"%Y-%m-%d %X\", time.localtime() ) \n download=0 \n keyid=db.Key\n \n def size(self):\n return self.filesize\n\n\nclass Pager(object):\n def __init__(self, model=None,query=None, items_per_page=10):\n if model:\n self.query = model.all()\n elif query:\n self.query=query\n self.items_per_page = items_per_page\n\n def fetch(self, p):\n max_offset = self.query.count()\n n = max_offset / self.items_per_page\n if max_offset % self.items_per_page != 0:\n n += 1\n if p < 0 or p > n:\n p = 1\n offset = (p - 1) * self.items_per_page\n results = self.query.fetch(self.items_per_page, offset)\n links = {'count':max_offset,'page_index':p,'prev': p - 1, 'next': p + 1, 'last': n}\n if links['next'] > n:\n links['next'] = 0\n return (results, links)\n\nclass getMedia(webapp.RequestHandler):\n def get(self,slug):\n media=0\n buf=\"\"\n file=GFInfoTable()\n gf=GaeFile()\n key=self.request.get('key')\n for filekey in gf.gfDir.FileList:\n if str(filekey.id()) == key:\n file=db.get(filekey)\n file.download+=1\n buf=gf.read(file.path)\n file.put()\n mtype=file.property\n media=1\n \n if media:\n self.response.headers['Expires'] = 'Thu, 15 Apr 3010 20:00:00 GMT'\n self.response.headers['Cache-Control'] = 'max-age=3600,public'\n self.response.headers['Content-Type'] = str(mtype)\n self.response.out.write(buf)\n \n \nclass Upload(webapp.RequestHandler):\n def post(self):\n name = self.request.get('filename')\n mtype = self.request.get('fileext')\n bits = self.request.get('upfile') \n gf=GaeFile()\n gf.open(name,mtype); \n gf.write(bits)\n gf.close()\n self.redirect('/')\n\nclass FileManager(webapp.RequestHandler):\n\n def __init__(self):\n self.current='files'\n\n def get(self):\n try:\n page_index=int(self.request.get('page'))\n except:\n page_index=1\n files=[]\n gf=GaeFile()\n for filekey in gf.gfDir.FileList:\n file = db.get(filekey) \n tmp = Media()\n tmp.name = file.path\n tmp.filesize=file.filesize\n tmp.mtype= file.property\n tmp.keyid = filekey.id()\n tmp.date=file.date\n tmp.download=file.download\n files.append(tmp)\n links = {'count':10,'page_index':5,'prev': 5 - 1, 'next': 5 + 1, 'last': 2} \n template_values = {'files' : files,'pager':links,}\n path = os.path.join(os.path.dirname(__file__), 'views/filebase.html') \n self.response.out.write(template.render(path, template_values))\n \n def post(self): # delete files\n delids = self.request.POST.getall('del')\n if delids:\n for id in delids:\n gf=GaeFile()\n for filekey in gf.gfDir.FileList:\n if str(filekey.id()) == id:\n file=db.get(filekey)\n gf.remove(file.path)\n self.redirect('/') \n\ndef main():\n application = webapp.WSGIApplication(\n [('/(?P<page>[0-9]*)/?', MainPage),\n (r'/(?P<size>image)/(?P<id>[0-9]+)/?',GetImage),\n (r'/(?P<size>s)/(?P<id>[0-9]+)/?',GetImage),\n (r'/tag/(?P<tag>.+)/?',ShowTagImage),\n (r'/show/(?P<id>[0-9]+)/?',ShowImage),\n ('/file',FileManager),\n ('/upload',Upload), \n ('/media/([^/]*)/{0,1}.*',getMedia),\n ('.*',Error)\n ], debug=True)\n wsgiref.handlers.CGIHandler().run(application)\n\nif __name__ == \"__main__\":\n main()" }, { "alpha_fraction": 0.6284403800964355, "alphanum_fraction": 0.633537232875824, "avg_line_length": 29.184616088867188, "blob_id": "be9f6f4d0f7a0b625f5b63b180b5b2fafe14c574", "content_id": "4d47515da8eb30f891f9da510c15eeecefcf16d1", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2014, "license_type": "permissive", "max_line_length": 127, "num_lines": 65, "path": "/src/static/main.js", "repo_name": "studyzy/gae_image", "src_encoding": "UTF-8", "text": "var isAjaxSupported = (window.ActiveXObject != \"undefined\" || window.XMLHttpRequest != \"undefined\");\nfunction $(id) {\n return document.getElementById(id);\n}\n\nfunction copyCode(e) {\n var text = '';\n if (typeof e != 'string') {\n var iptEle = e.parentNode.parentNode.getElementsByTagName('input')[0];\n iptEle.select();\n text = iptEle.value;\n\n } else {\n text = e;\n }\n copyCode1(text);\n}\n\nfunction copyCode1(testCode) {\n if (copy2Clipboard(testCode) != false) {\n alert(\"复制成功\");\n }\n}\n\ncopy2Clipboard = function(txt) {\n if (window.clipboardData) {\n window.clipboardData.clearData();\n window.clipboardData.setData(\"Text\", txt);\n }\n else if (navigator.userAgent.indexOf(\"Opera\") != -1) {\n window.location = txt;\n }\n else if (window.netscape) {\n try {\n netscape.security.PrivilegeManager.enablePrivilege\n\n(\"UniversalXPConnect\");\n }\n catch (e) {\n alert(\"您的firefox限制了本操作,请打开:firefox根目录/greprefs/all.js,将'signed.applets.codebase_principal_support'设置为true之后重试! \");\n return false;\n }\n var clip = Components.classes\n\n['@mozilla.org/widget/clipboard;1'].createInstance\n\n(Components.interfaces.nsIClipboard);\n if (!clip) return;\n var trans = Components.classes\n\n['@mozilla.org/widget/transferable;1'].createInstance\n\n(Components.interfaces.nsITransferable);\n if (!trans) return;\n trans.addDataFlavor('text/unicode');\n var str = new Object();\n var len = new Object();\n var str = Components.classes[\"@mozilla.org/supports-string;1\"].createInstance(Components.interfaces.nsISupportsString);\n var copytext = txt; str.data = copytext;\n trans.setTransferData(\"text/unicode\", str, copytext.length * 2);\n var clipid = Components.interfaces.nsIClipboard;\n if (!clip) return false;\n clip.setData(trans, null, clipid.kGlobalClipboard);\n }\n}\n" } ]
10
clercrobin/nmtlab
https://github.com/clercrobin/nmtlab
d6273e1010d33c75ee2752e8dd7b502c92934aad
81cc97208cf63314893cbdfef9e630db02f1ac8d
7e72d3a083cf56760ac1f1df719d7387ff92f453
refs/heads/master
2020-03-29T07:21:09.224173
2018-09-19T08:11:55
2018-09-19T08:11:55
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5464876294136047, "alphanum_fraction": 0.547520637512207, "avg_line_length": 22.609756469726562, "blob_id": "47b535d8af598ec3ddbaf102363c346657b7314f", "content_id": "b53167c959f4b1ddb23992fca9d2058800bc105f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 968, "license_type": "permissive", "max_line_length": 46, "num_lines": 41, "path": "/nmtlab/utils/lazydict.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom collections import Mapping\n\n\nclass LazyDict(Mapping):\n \"\"\"Lazily evaluated map\n \"\"\"\n \n def __init__(self, *args, **kwargs):\n self._raw_dict = dict(*args, **kwargs)\n \n def __getattr__(self, attr):\n return self._raw_dict.get(attr)(attr)\n \n def __getitem__(self, item):\n return self._raw_dict.get(item)(item)\n \n def __setitem__(self, key, func):\n self._raw_dict.update({key: func})\n \n def __delattr__(self, item):\n self._raw_dict.__delitem__(item)\n \n def __delitem__(self, key):\n self._raw_dict.__delitem__(key)\n \n def __iter__(self):\n return iter(self._raw_dict)\n \n def __len__(self):\n return len(self._raw_dict)\n \n def update(self, m):\n for k, v in m.items():\n self[k] = v\n" }, { "alpha_fraction": 0.5378457307815552, "alphanum_fraction": 0.5519165396690369, "avg_line_length": 43.31182861328125, "blob_id": "91c5c420c088f87630db3e5a939618fd37b9e7ed", "content_id": "d1df3e7d2a31a3e8a38ae854ad0a8c7c35df60cd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4122, "license_type": "permissive", "max_line_length": 130, "num_lines": 93, "path": "/nmtlab/modules/multihead_attention.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nclass MultiHeadAttention(nn.Module):\n \n def __init__(self, num_head=4, hidden_size=None, additive=False):\n super(MultiHeadAttention, self).__init__()\n self._num_head = num_head\n self._hidden_size = hidden_size\n self._additive = additive\n if additive and hidden_size is None:\n raise Exception(\"hidden_size can not be None for additive attention.\")\n if additive:\n self.W_q = nn.Parameter(torch.randn((hidden_size, hidden_size)))\n self.W_k = nn.Parameter(torch.randn((hidden_size, hidden_size)))\n self.V_a = nn.Parameter(torch.randn(hidden_size))\n \n def compute_logits(self, query, keys):\n if self._additive:\n h_q = torch.matmul(query, self.W_q)\n h_k = torch.matmul(keys, self.W_k)\n if query.dim() == 2:\n h = h_q[:, None, :] + h_k\n h = torch.tanh(h)\n h = h * self.V_a[None, None, :]\n new_size = list(h.shape[:2]) + [self._num_head, -1]\n logits = h.view(new_size).sum(-1)\n logits = logits.permute(0, 2, 1) # ~ B x head N x enc N\n else:\n h = h_q[:, :, None, :] + h_k[:, None, :, :]\n h = torch.tanh(h)\n h = h * self.V_a[None, None, None, :]\n new_size = list(h.shape[:3]) + [self._num_head, -1]\n logits = h.view(new_size).sum(-1) # ~ B x dec N x enc N x head N\n logits = logits.permute(0, 3, 1, 2) # ~ B x head N x dec N x enc N\n else:\n raise NotImplementedError\n return logits\n \n def forward_2d(self, query, keys, values, mask=None):\n \"\"\"Compute attention for 2-dimensional queries (batch x hidden).\n \"\"\"\n logits = self.compute_logits(query, keys)\n if mask is not None:\n penalty = (1 - mask.float()) * 99.\n logits -= penalty[:, None, :]\n weights = F.softmax(logits, dim=-1)\n if weights.shape[0] != values.shape[0]:\n values = values.expand(\n [weights.shape[0]] + list(values.shape)[1:])\n n_batch, n_head, _ = list(weights.shape)\n _, n_enc, n_hidden = list(values.shape)\n new_values = values.view(n_batch, n_enc, n_head, -1).permute(0, 2, 1, 3).contiguous().view(n_batch * n_head, n_enc, -1)\n context_vector = torch.bmm(weights.view(n_batch * n_head, 1, n_enc), new_values).squeeze(1)\n context_vector = context_vector.view(n_batch, n_head, -1).view(n_batch, -1)\n return context_vector, weights\n \n def forward_3d(self, query, keys, values, mask=None):\n \"\"\"Compute attention for 3-dimensional input (batch x step x hidden).\n \"\"\"\n logits = self.compute_logits(query, keys)\n if mask is not None:\n penalty = (1 - mask.float()) * 99.\n logits -= penalty[:, None, None, :]\n weights = F.softmax(logits, dim=-1)\n n_batch, n_head, n_dec, _ = list(weights.shape)\n _, n_enc, n_hidden = list(values.shape)\n new_values = values.view(n_batch, n_enc, n_head, -1).permute(0, 2, 1, 3).contiguous().view(n_batch * n_head, n_enc, -1)\n context_vector = torch.bmm(weights.view(n_batch * n_head, n_dec, n_enc), new_values)\n context_vector = context_vector.view(n_batch, n_head, n_dec, -1).permute(0, 2, 1, 3).contiguous().view(n_batch, n_dec, -1)\n return context_vector, weights\n \n def forward(self, query, keys, values, mask=None):\n \"\"\"Compute the context vector with key value attention.\n \n Returns:\n context vector and attention weights.\n \"\"\"\n if query.dim() == 2:\n return self.forward_2d(query, keys, values, mask)\n elif query.dim() == 3:\n return self.forward_3d(query, keys, values, mask)\n else:\n raise NotImplementedError\n\n" }, { "alpha_fraction": 0.5589544177055359, "alphanum_fraction": 0.5723025798797607, "avg_line_length": 34.235294342041016, "blob_id": "b1d396e17eb041347e399815ae81ce38f20ea6c3", "content_id": "89703f8bb39106e6bb4fc90121651d47365ded51", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1798, "license_type": "permissive", "max_line_length": 77, "num_lines": 51, "path": "/nmtlab/modules/kv_attention.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\n\nclass KeyValAttention(nn.Module):\n \n def forward_2d(self, query, keys, values, mask=None):\n \"\"\"Compute attention for 2-dimensional queries (batch x hidden).\n \"\"\"\n logits = (query[:, None, :] * keys).sum(dim=2)\n if mask is not None:\n penalty = (1 - mask.float()) * 99.\n logits -= penalty\n weights = F.softmax(logits, dim=1)\n if weights.shape[0] != values.shape[0]:\n values = values.expand(\n [weights.shape[0]] + list(values.shape)[1:])\n context_vector = torch.bmm(weights[:, None, :], values).squeeze(1)\n return context_vector, weights\n \n def forward_3d(self, query, keys, values, mask=None):\n \"\"\"Compute attention for 3-dimensional input (batch x step x hidden).\n \"\"\"\n logits = (query[:, :, None, :] * keys[:, None, :, :]).sum(dim=3)\n if mask is not None:\n penalty = (1 - mask.float()) * 99.\n logits -= penalty[:, None, :]\n weights = F.softmax(logits, dim=2)\n context_vector = torch.bmm(weights, values)\n return context_vector, weights\n \n def forward(self, query, keys, values, mask=None):\n \"\"\"Compute the context vector with key value attention.\n \n Returns:\n context vector and attention weights.\n \"\"\"\n if query.dim() == 2:\n return self.forward_2d(query, keys, values, mask)\n elif query.dim() == 3:\n return self.forward_3d(query, keys, values, mask)\n else:\n raise NotImplementedError\n\n" }, { "alpha_fraction": 0.7451737523078918, "alphanum_fraction": 0.7490347623825073, "avg_line_length": 24.899999618530273, "blob_id": "ded2842dd9c1a4e22e732df310de5411062a4e2e", "content_id": "e668a6f89d4384f6c2b204c149b1221d26b316e0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 259, "license_type": "permissive", "max_line_length": 64, "num_lines": 10, "path": "/nmtlab/utils/__init__.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import, print_function, division\n\nfrom .mapdict import MapDict\nfrom .lazydict import LazyDict\nfrom .vocab import Vocab\nfrom .bleu import bleu, smoothed_bleu\nfrom .opts import OPTS\n" }, { "alpha_fraction": 0.5606422424316406, "alphanum_fraction": 0.5671512484550476, "avg_line_length": 38.562232971191406, "blob_id": "52fd405ee40da0279432f2b1422bd26522a6a112", "content_id": "594c4b8b5c37931f9337e3dbb13cb195bb82f5b9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9218, "license_type": "permissive", "max_line_length": 130, "num_lines": 233, "path": "/nmtlab/models/encoder_decoder.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nfrom six.moves import zip\n\nfrom abc import abstractmethod, ABCMeta\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\n\nfrom nmtlab.utils import MapDict, LazyDict\nfrom nmtlab.utils import OPTS\n\n\nclass EncoderDecoderModel(nn.Module):\n \n __metaclass__ = ABCMeta\n\n def __init__(self, hidden_size=512, embed_size=512,\n src_vocab_size=None, tgt_vocab_size=None,\n dataset=None,\n state_names=None, state_sizes=None,\n label_uncertainty=0):\n super(EncoderDecoderModel, self).__init__()\n if dataset is None and (src_vocab_size is None or tgt_vocab_size is None):\n raise SystemError(\"src_vocab_size and tgt_vocab_size must be specified.\")\n self._hidden_size = hidden_size\n self._embed_size = embed_size\n self._stepwise_training = True\n self._label_uncertainty = label_uncertainty\n if dataset is not None:\n self._src_vocab_size, self._tgt_vocab_size = dataset.vocab_sizes()\n else:\n self._src_vocab_size = src_vocab_size\n self._tgt_vocab_size = tgt_vocab_size\n self._state_names = state_names if state_names else [\"hidden\", \"cell\"]\n self._state_sizes = state_sizes if state_sizes else [self._hidden_size] * len(\n self._state_names)\n self._monitors = {}\n self._layers = []\n self.prepare()\n self.initialize_parameters()\n \n def initialize_parameters(self):\n \"\"\"Initialize the parameters in the model.\"\"\"\n # Initialize weights\n def get_fans(shape):\n fan_in = shape[0] if len(shape) == 2 else np.prod(shape[1:])\n fan_out = shape[1] if len(shape) == 2 else shape[0]\n return fan_in, fan_out\n for param in self.parameters():\n shape = param.shape\n if len(shape) > 1:\n nn.init.xavier_uniform_(param)\n # scale = np.sqrt(6. / sum(get_fans(shape)))\n # param.data.uniform_(- scale, scale)\n for module in self.modules():\n if isinstance(module, nn.Linear):\n nn.init.constant_(module.bias, 0.0)\n # Initilalize LSTM\n if isinstance(module, nn.LSTM):\n for name, param in module.named_parameters():\n if \"bias\" in name:\n nn.init.constant_(param, 0.0)\n n = param.size(0)\n param.data[n//4: n//2].fill_(1.)\n \n def set_states(self, state_names, state_sizes=None):\n \"\"\"Set state names and sizes for the decoder.\n \"\"\"\n self._state_names = state_names\n if state_sizes is not None:\n self._state_sizes = state_sizes\n else:\n self._state_sizes = [self._hidden_size] * len(state_names)\n \n def set_stepwise_training(self, flag=True):\n \"\"\"Set whether the model is autoregressive when training.\n \"\"\"\n self._stepwise_training = flag\n\n @abstractmethod\n def prepare(self):\n \"\"\"Create layers.\n \"\"\"\n\n @abstractmethod\n def encode(self, src_seq, src_mask=None):\n \"\"\"Encode input sequence and return a value map.\n \"\"\"\n\n @abstractmethod\n def lookup_feedback(self, feedback):\n \"\"\"Get the word embeddings of feedback tokens.\n \"\"\"\n\n @abstractmethod\n def decode_step(self, context, states, full_sequence=False):\n \"\"\"Computations of each decoding step.\n \"\"\"\n\n def decode(self, context, states, sampling=False):\n \"\"\"Decode the output states.\n \"\"\"\n if not self._stepwise_training and not sampling:\n states.feedback_embed = self.lookup_feedback(context.feedbacks)\n self.decode_step(context, states, full_sequence=True)\n return states\n else:\n T = context.feedbacks.shape[1]\n state_stack = []\n steps = T + 9 if sampling else T - 1\n for t in range(steps):\n states = states.copy()\n states.t = t\n if sampling:\n if t == 0:\n feedback = context.feedbacks[:, 0].unsqueeze(0)\n else:\n logits = self.expand(states)\n feedback = logits.argmax(-1)\n states.sampled_token = feedback\n states.feedback_embed = self.lookup_feedback(feedback.squeeze(0))\n else:\n states.feedback_embed = context.feedback_embeds[:, t]\n self.decode_step(context, states)\n state_stack.append(states)\n return self.combine_states(state_stack)\n\n def combine_states(self, state_stack):\n lazydict = LazyDict()\n for state_name in state_stack[0]:\n tensor = state_stack[0][state_name]\n if hasattr(tensor, \"shape\") and len(tensor.shape) >= 2:\n lazydict[state_name] = lambda name: torch.cat([m[name] for m in state_stack], 0).transpose(1, 0)\n return lazydict\n \n def pre_decode(self, encoder_outputs, tgt_seq, extra_states=None, src_mask=None, tgt_mask=None):\n \"\"\"Prepare the context and initial states for decoding.\n \"\"\"\n feedback_embeds = self.lookup_feedback(tgt_seq)\n\n B = tgt_seq.shape[0]\n context = encoder_outputs\n states = MapDict({\"t\": 0})\n context[\"feedbacks\"] = tgt_seq\n context[\"feedback_embeds\"] = feedback_embeds\n # Process initial states\n for state_name, size in zip(self._state_names, self._state_sizes):\n if \"init_{}\".format(state_name) in context:\n states[state_name] = context[\"init_{}\".format(state_name)]\n if len(states[state_name].shape) == 2:\n states[state_name] = states[state_name].unsqueeze(0)\n del context[\"init_{}\".format(state_name)]\n else:\n states[state_name] = Variable(torch.zeros((1, B, size)))\n if torch.cuda.is_available():\n states[state_name] = states[state_name].cuda()\n if extra_states is not None:\n extra_states.update(extra_states)\n # Process mask\n if src_mask is not None:\n context[\"src_mask\"] = src_mask\n return context, states\n \n @abstractmethod\n def expand(self, states):\n \"\"\"\n Expand decoder outputs to a vocab-size tensor.\n \"\"\"\n \n def compute_loss(self, logits, tgt_seq, tgt_mask):\n if self._label_uncertainty > 0 and self.training:\n uniform_seq = tgt_seq.float().uniform_(0, self._tgt_vocab_size)\n smooth_mask = tgt_seq.float().bernoulli_(self._label_uncertainty)\n tgt_seq = (1 - smooth_mask) * tgt_seq.float() + smooth_mask * uniform_seq\n tgt_seq = tgt_seq.long()\n B, T, _ = logits.shape\n logits = F.log_softmax(logits, dim=2)\n flat_logits = logits.contiguous().view(B * T, self._tgt_vocab_size)\n flat_targets = tgt_seq[:, 1:].contiguous().view(B * T)\n flat_mask = tgt_mask[:, 1:].contiguous().view(B * T)\n loss = nn.NLLLoss(ignore_index=0, reduce=False).forward(flat_logits, flat_targets)\n if OPTS.wordloss:\n loss = loss.sum() / tgt_mask[:, 1:].sum().float()\n else:\n loss = (loss.view(B, T).sum(1) / (tgt_mask.sum(1) - 1).float()).mean()\n word_acc = (flat_logits.argmax(1).eq(flat_targets) * flat_mask).view(B, T).sum(1).float() / tgt_mask[:, 1:].sum(1).float()\n word_acc = word_acc.mean()\n self.monitor(\"word_acc\", word_acc)\n return loss\n \n def monitor(self, key, value):\n \"\"\"Monitor a value with the key.\n \"\"\"\n self._monitors[key] = value\n \n def forward(self, src_seq, tgt_seq, sampling=False):\n \"\"\"\n Forward to compute the loss.\n \"\"\"\n src_mask = torch.ne(src_seq, 0)\n tgt_mask = torch.ne(tgt_seq, 0)\n encoder_outputs = MapDict(self.encode(src_seq, src_mask))\n context, states = self.pre_decode(encoder_outputs, tgt_seq, src_mask=src_mask, tgt_mask=tgt_mask)\n decoder_outputs = self.decode(context, states)\n logits = self.expand(decoder_outputs)\n if sampling:\n context, states = self.pre_decode(encoder_outputs, tgt_seq, src_mask=src_mask, tgt_mask=tgt_mask)\n sample_outputs = self.decode(context, states, sampling=True)\n self.monitor(\"sampled_tokens\", sample_outputs.sampled_token)\n loss = self.compute_loss(logits, tgt_seq, tgt_mask)\n self.monitor(\"loss\", loss)\n return self._monitors\n\n def load(self, path):\n state_dict = torch.load(path)\n if \"model_state\" in state_dict:\n state_dict = state_dict[\"model_state\"]\n self.load_state_dict(state_dict)\n \n def state_names(self):\n return self._state_names\n \n def state_sizes(self):\n return self._state_sizes\n" }, { "alpha_fraction": 0.5992779731750488, "alphanum_fraction": 0.603145956993103, "avg_line_length": 38.9793815612793, "blob_id": "088d537ed0e78cc78db014ad819cf71c2b6aeae1", "content_id": "496eada378b1856f3bd376c5d2f48cc9db6cae47", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3878, "license_type": "permissive", "max_line_length": 174, "num_lines": 97, "path": "/nmtlab/dataset/mt_dataset.py", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport torchtext\nimport numpy as np\nfrom nmtlab.utils.vocab import Vocab\nfrom nmtlab.dataset.bilingual_dataset import BilingualDataset\nfrom nmtlab.dataset.base import Dataset\n\n\nclass MTDataset(Dataset):\n \"\"\"Bilingual dataset.\n \"\"\"\n \n def __init__(self, corpus_path=None, src_corpus=None, tgt_corpus=None, src_vocab=None, tgt_vocab=None, batch_size=64, max_length=60, n_valid_samples=1000, truncate=None):\n \n assert corpus_path is not None or (src_corpus is not None and tgt_corpus is not None)\n assert src_vocab is not None and tgt_vocab is not None\n \n self._max_length = max_length\n self._n_valid_samples = n_valid_samples\n \n src = torchtext.data.Field(pad_token=\"<null>\", preprocessing=lambda seq: [\"<s>\"] + seq + [\"</s>\"])\n self._src_vocab = src.vocab = Vocab(src_vocab)\n tgt = torchtext.data.Field(pad_token=\"<null>\", preprocessing=lambda seq: [\"<s>\"] + seq + [\"</s>\"])\n self._tgt_vocab = tgt.vocab = Vocab(tgt_vocab)\n # Make data\n if corpus_path is not None:\n self._data = torchtext.data.TabularDataset(\n path=corpus_path, format='tsv',\n fields=[('src', src), ('tgt', tgt)],\n filter_pred=self._len_filter\n )\n else:\n self._data = BilingualDataset(src_corpus, tgt_corpus, src, tgt, filter_pred=self._len_filter)\n # Create training and valid dataset\n examples = self._data.examples\n if truncate is not None:\n assert type(truncate) == int\n examples = examples[:truncate]\n n_train_samples = len(examples) - n_valid_samples\n n_train_samples = int(n_train_samples / self._batch_size) * self._batch_size\n np.random.RandomState(3).shuffle(examples)\n valid_data = torchtext.data.Dataset(\n examples[:n_valid_samples],\n fields=[('src', src), ('tgt', tgt)],\n filter_pred=self._len_filter\n )\n train_data = torchtext.data.Dataset(\n examples[n_valid_samples:n_valid_samples + n_train_samples],\n fields=[('src', src), ('tgt', tgt)],\n filter_pred=self._len_filter\n )\n super(MTDataset, self).__init__(train_data=train_data, valid_data=valid_data, batch_size=batch_size)\n\n def set_gpu_scope(self, scope_index, n_scopes):\n \"\"\"Training a specific part of data for multigpu environment.\n \"\"\"\n examples = self._train_data.examples\n scope_size = int(float(len(examples)) / n_scopes)\n self._train_data.examples = examples[scope_index * scope_size: (scope_index + 1) * scope_size]\n self._batch_size = self._batch_size / n_scopes\n \n @staticmethod\n def _len_filter(sample):\n return len(sample.src) <= 60 and len(sample.tgt) <= 60\n \n def n_train_samples(self):\n return len(self._train_data.examples)\n \n def train_set(self):\n batch_iterator = torchtext.data.BucketIterator(\n dataset=self._train_data, batch_size=self._batch_size,\n sort=False, sort_within_batch=True,\n shuffle=True,\n sort_key=lambda x: len(x.src),\n device=None, repeat=False)\n return iter(batch_iterator)\n \n def valid_set(self):\n batch_iterator = torchtext.data.BucketIterator(\n dataset=self._valid_data, batch_size=self._batch_size,\n sort=True, sort_within_batch=True,\n shuffle=False, train=False,\n sort_key=lambda x: len(x.src),\n device=None, repeat=False)\n return batch_iterator\n \n def src_vocab(self):\n return self._src_vocab\n \n def tgt_vocab(self):\n return self._tgt_vocab\n" }, { "alpha_fraction": 0.34375, "alphanum_fraction": 0.5625, "avg_line_length": 15, "blob_id": "e6f5d61211afc0b5c86c34e9038859dfc3d2531c", "content_id": "55bb738b94af0ab88b34571a38e8977819cb9323", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 64, "license_type": "permissive", "max_line_length": 18, "num_lines": 4, "path": "/requirements.txt", "repo_name": "clercrobin/nmtlab", "src_encoding": "UTF-8", "text": "six >= 1.11.0\ntorchtext >= 0.2.3\ntorch >= 0.4.0\nnumpy >= 1.14.0\n" } ]
7
tripuspiit/tri
https://github.com/tripuspiit/tri
e7d258fed88202ce7540b5610094568a57dc0e18
b7485402c943683251ce8373fa5ad8de43b1e308
165ab1fca241903858f519d8d2150c5c62e42e6d
refs/heads/main
2023-06-05T15:44:27.400994
2021-06-22T14:11:44
2021-06-22T14:11:44
379,289,931
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.548170268535614, "alphanum_fraction": 0.6168782711029053, "avg_line_length": 24.780000686645508, "blob_id": "1149b854eef4f2e54ebb1605c1892386238d33fa", "content_id": "c6b5ec96d85b3c4cef0827377e3ed65231683fca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1339, "license_type": "no_license", "max_line_length": 98, "num_lines": 50, "path": "/main.py", "repo_name": "tripuspiit/tri", "src_encoding": "UTF-8", "text": "# This is a sample Python script.\r\n\r\n# Press Shift+F10 to execute it or replace it with your code.\r\n# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.\r\n\r\n\r\ndef print_hi(name):\r\n # Use a breakpoint in the code line below to debug your script.\r\n print(f'Hi, {name}') # Press Ctrl+F8 to toggle the breakpoint.\r\n\r\n\r\n# Press the green button in the gutter to run the script.\r\nif __name__ == '__main__':\r\n print_hi('PyCharm')\r\n\r\n# See PyCharm help at https://www.jetbrains.com/help/pycharm/\r\n# CONTOH HASIL NILAI LATIHAN SOAL MAHASISWA\r\n\r\n#Mhs 1 - 80 88 89 90\r\n#Mhs 2 - 90 89 88\r\n#Mhs 3 - 88 80 90 89\r\n#Mhs 4 - 89 80 90 88\r\n\r\nNilai = [[80, 88, 89, 90], [90, 89, 88], [88, 80, 90, 89], [89, 80, 90, 88]]\r\n #Mengisi data array 2 dimensi, perhatikan cara berikut ini :\r\nprint(Nilai[0])\r\nprint(Nilai[2])\r\nprint(Nilai[0][2])\r\nprint(Nilai[2][0])\r\n\r\nprint(\"Menampilkan Array dalam bentuk tabel atau matrix\")\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\nprint(\"Menambahkan Data Baru di Array\")\r\nNilai.insert(3,(90, 99, 98, 97))\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\nprint(\"Mengupdet Data didalam Array\")\r\nNilai[3] = [80, 90]\r\nNilai[0][3] = 98\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n" }, { "alpha_fraction": 0.47330960631370544, "alphanum_fraction": 0.47686833143234253, "avg_line_length": 27.36842155456543, "blob_id": "53a85a521816c6f6ab76e3c9e8d006bd70c95c2b", "content_id": "ce635a0d16b3d105f9af789996f8813290e1fe53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 562, "license_type": "no_license", "max_line_length": 71, "num_lines": 19, "path": "/arraydimensi1.py", "repo_name": "tripuspiit/tri", "src_encoding": "UTF-8", "text": "print(\"Array Dimensi Satu\")\r\nbuah=[ ]\r\nstop = False\r\ni = 0\r\n\r\n# Mengisi data buah melalui input user\r\nwhile(not stop):\r\n buah_baru = input(\"Input Nama Buah-buahan yang ke- {} :\".format(i))\r\n buah.append(buah_baru)\r\n #Updet nilai i\r\n i+=1\r\n tanya = input(\"Mau isi data lagi??? (y/t):\")\r\n if(tanya==\"t\"):\r\n stop = True\r\n #Cetak Semua Data Buah\r\n print(\"===========================================\")\r\n print(\"Hasil input nama buat dalam format array : \", buah)\r\n\r\nprint(\"=======================================\")\r\n\r\n\r\n" }, { "alpha_fraction": 0.5279107093811035, "alphanum_fraction": 0.5800106525421143, "avg_line_length": 23.79452133178711, "blob_id": "3d088adbafe5821154a84b364c84f55eb49755aa", "content_id": "3ea55c45ba4d3fcd23ee52e8e6582876c5e42cfc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1881, "license_type": "no_license", "max_line_length": 80, "num_lines": 73, "path": "/arraydimensi2.py", "repo_name": "tripuspiit/tri", "src_encoding": "UTF-8", "text": "# CONTOH HASIL NILAI LATIHAN SOAL MAHASISWA\r\n\r\n#Mhs 1 - 80 88 89 90\r\n#Mhs 2 - 90 89 88\r\n#Mhs 3 - 88 80 90 89\r\n#Mhs 4 - 89 80 90 88\r\n\r\nNilai = [[80, 88, 89, 90], [90, 89, 88], [88, 80, 90, 89], [89, 80, 90, 88]]\r\n #Mengisi data array 2 dimensi, perhatikan cara berikut ini :\r\nprint(Nilai[0])\r\nprint(Nilai[2])\r\nprint(Nilai[0][2])\r\nprint(Nilai[2][0])\r\n\r\nprint(\"Menampilkan Array dalam bentuk tabel atau matrix\")\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\nprint(\"Menambahkan Data Baru di Array\")\r\nNilai.insert(3,(90, 99, 98, 97))\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\nprint(\"Mengupdet Data didalam Array\")\r\nNilai[3] = [80, 90]\r\nNilai[0][3] = 98\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\nprint(\"Menghapus Data didalam Array\")\r\ndel Nilai[3]\r\nfor a in Nilai:\r\n for b in a:\r\n print(b, end = \" \")\r\n print()\r\n\r\n#Bagaimana menggukan format input user?\r\n#Studi kasus input nilai diatas\r\n\r\n#Deklarasi Variabel\r\nNilai_mahasiswa=[ ]\r\nstop1 = False\r\ni=0\r\n\r\nprint(\"Mengisi data Nilai Mahasiswa melalui input user\")\r\nwhile(not stop1):\r\n jumlah=int(input(\"Berapa Nilai akan diinput, pada array ke- {}:\".format(i)))\r\n a = 0\r\n tampung = [ ]\r\n for a in range(jumlah):\r\n Nilai_baru = int(input(\"Input Hasil Nilai ke-{}\".format(a)))\r\n tampung.append(Nilai_baru)\r\n Nilai_mahasiswa.insert(i,tampung)\r\n i+=1\r\n tanya = input(\"Mau isi data mahsiswa selanjutnya???(y/t):\")\r\n if(tanya == \"t\"):\r\n stop1 = True\r\n #cetak semua data nilai\r\nprint(\"===========================\")\r\nprint(\"Cetak Hasil Nilai Dalam data asli array 2 Dimensi\", Nilai_mahasiswa)\r\nprint()\r\nprint(\"===========================\")\r\nprint(\"Hasil Array 2 Dimensi dalam bentuk Tabel\")\r\nfor x in Nilai_mahasiswa:\r\n for b in x:\r\n print(b, end = \" \")\r\n print()" } ]
3
oriko1010/ZipZap
https://github.com/oriko1010/ZipZap
fbdc23d1ca23cb5afea5eaf8ebffb466f4931fd5
0695507ca7bcd4951dfaeccc48d074a04c37e55a
0058d0682a4e8f85a5b21231adba719b0825e3ee
refs/heads/master
2023-01-03T05:09:54.992596
2020-10-22T06:55:10
2020-10-22T06:55:10
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5365853905677795, "alphanum_fraction": 0.7317073345184326, "avg_line_length": 13, "blob_id": "41f5ebe1ad039c4e77ad11181c1b731a6ac37d09", "content_id": "f309f6f30466b75dce7977141e6430f4d7560ed1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 41, "license_type": "no_license", "max_line_length": 14, "num_lines": 3, "path": "/requirements.txt", "repo_name": "oriko1010/ZipZap", "src_encoding": "UTF-8", "text": "mitmproxy~=5.2\nnumpy~=1.19\nrequests~=2.24" }, { "alpha_fraction": 0.7560844421386719, "alphanum_fraction": 0.7618663311004639, "avg_line_length": 52.89130401611328, "blob_id": "b5cb27bc6a7246ff4182112b5e20bc908a6af4f4", "content_id": "d8849ac3ac8c3a02c530dc552897a88c7571cecb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7437, "license_type": "no_license", "max_line_length": 123, "num_lines": 138, "path": "/README.md", "repo_name": "oriko1010/ZipZap", "src_encoding": "UTF-8", "text": "# ZipZap: MagiReco NA Private Server\n\n### Installation and running\n\n1. Make sure you have python3, either in a separate env (recommended because system-wide Python dependency graphs are \ngross) or on your system\n2. Run `pip install -r requirements.txt` in a command line\n3. Run `mitmproxy -s server.py` or `mitmweb -s server.py` in a command line, and do not close this command line. (If you\nrun mitmweb though, you can close the browser window.)\n\n### Connecting to the private server\n\n1. Make sure MagiReco is already installed on your device. I've only tested it on when it's past the tutorial, so no \nguarantees it'll work for a fresh install. But it works even if you haven't updated.\n2. Find the IP address of the computer you're running the server on. I've only tested this with a local IP address (ones\nthat start with 192.168) but external IP addresses should work.\n3. Configure a proxy on your device with the server address as your computer's IP address, and the port as 8080. You can\ngoogle this if you don't know how to.\n4. By the way, if you're new to this, this needs to be done with the server running on your computer.\n5. Open mitm.it in a browser, and download and enable a certificate for your device\n6. Try loading google.com through the proxy -- if it works, then you can open the app and everything will be through the\nprivate server.\n\nI had a horrible time getting mitmproxy to work on some of my devices; specifically, I never got it to work with my Mac\nrunning mitmproxy and my iPhone 6 trying to get through it. Try googling any errors you have with mitmproxy; it may or\nmay not help. In the future I might move off of mitmproxy and find a different solution.\n\n---\n### Porting over your existing data\n\nOn Windows, run getUserData.bat. You can close the browser window that pops up, but not the command line. \n\nOn another system, follow steps 1 and 2 in the Installation and Running section, then run\n`mitmproxy -s getUserData.py` or `mitmweb -s getUserData.py` in a command line without closing it.\n\nFollow the instructions for connecting to the private server to get your device connected to the script. Then, get to the\ntitle screen (the one that has the MagiReco logo on it, and from which you can see the ToS and transfer your account).\nYou should now see the command line print a lot of lines saying \"writing to ------\". Once the command line says it's done,\nclose the command line. You can run the private server now, and it will serve your your own data.\n\nUser data is stored in the files in data/user. When you run getUserData.py, the data is backed up to in data/userBackup.\nYou can change the data in the data/user folder and the data displayed in the game will change as well.\n\nThe default user is a level 999 account with only Iroha and no memoria but 999 of every material, including summon tickets.\n\n---\n\n### Currently supported functions:\n- displaying any page in the app (api/page.py)\n + as well as displaying anything in the archive\n + and listing memoria\n- improving magical girls (api/userCard.py)\n - level up\n - magia level up\n - awaken\n - limit break\n- managing memoria (api/userPiece.py and api/userPieceSet.py)\n - level up\n - limit break\n - making memoria sets\n - putting memoria into the vault and taking them out\n- gacha (api/gacha.py)\n - pull premium, x1 and x10, using stones and tickets\n - this currently includes all limited megucas and memes, and welfare ones as well. no reason not to lol\n - pull normal\n - view history\n- changing user settings (api/gameUser.py, api/user.py, api/userChara.py)\n - set name\n - set comment\n - set leader\n - set background (only two backgrounds are available, but...)\n\n### Currently missing functions:\n- can't customize magical girls' looks (e.g. in disks)\n- can't recover AP\n- can't lock or sell memoria\n- you can't clear any missions or accept their rewards\n- mirrors, quests, and team-making are entirely nonfunctional\n- can't buy anything from shop\n\n### What's next?\nI coded very fast, and very not well, because I wanted to get as many features out before the 30th. Code quality is still\nimportant to me, though, and I really don't want to get anything done outside of the basics before improving it enough\nthat maintenance will not be horrendous. But before the 30th, my priority is to get all the basic functions implemented\nso that we won't have to rely on hitting the real server to figure out what it does for some important thing like \nbattles.\n\nThe features are in order of the most overlap with the knowledge I have currently, to the least, because when I \nimplement a new feature I don't know much about at least half of the time is spent researching how it fits in with all \nthe user's data.\n\n- implement shop\n- implement team-making\n- implement quests\n- implement mirrors\n- implement missions\n- implement random things I left off, like AP recovering\n- add unit tests\n- refactor\n - put all the data reading and writing into a util module to avoid race conditions with 50 different functions \n writing/reading to a file at the same time\n - improve response headers, perhaps add compression\n - maybe make a class each API has to extend that removes repeated code?\n - maybe make classes that represent each type of object used in the game (e.g. item, card, userCard)?\n- add support for events\n- add support for multiple users (using a database like S3)\n- add support for finding other users, following and using supports\n- move server to the cloud\n- hack app to call the server's address rather than having to rely on mitmproxy\n\n### Structure of this package\nAs you can tell from the instructions, server.py is the main event handler. It intercepts requests from the app thanks\nto mitmproxy, and decides what to do.\n\nMost resources are going to be retrieved from an archive. For now that's en.rika.ren, but I don't know anything about\nrika.ren other than that it stores assets, and I might switch to my own archive later if we want to have custom assets \n(like if we want to bring JP events in).\n\nThe exception is website assets (html, js, css, and the like). These are stored locally because they're small in size,\nnot likely to change, storing them speeds up loading, and we might want to edit them later to support new features like\nsetting a bunch of awaken mats at once, or SE. These are stored in the assets folder.\n\nUser data right now is stored locally as well, in the data/user directory. You can actually modify the files in here to\nchange which megucas or memes or items you have. Just be careful or the app will error if you messed up on a line or \nsomething.\n\nAll the support for different API endpoints is stored in the api directory; each endpoint has its own file. They all\ntake a request from the app, and generate the response. They edit the user data files directly, and also access the\ngeneral data files in the data directory that store things like a list of all the megucas in the game.\n\nThe page endpoint handles all of the info that the app needs to display different views, like the \"Magical Girls\"\nscreen or the different shops. None of the other endpoints are called until you actually try to change the user's\ngame data.\n\n----\nIf you have suggestions or want to help (you can help even if you don't know how to code!), please contact me at\nu/rhyme_or_rationale or rinsfouriertransform#2303. And feel free to send a pull request \nanytime~\n" }, { "alpha_fraction": 0.6355769038200378, "alphanum_fraction": 0.6451923251152039, "avg_line_length": 32.58064651489258, "blob_id": "8a5f716b5b49b321edb68c1225dc0b47ab4974d3", "content_id": "bfc7fcc94f9be907bdaadb43b2da08e9ab3de438", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1040, "license_type": "no_license", "max_line_length": 127, "num_lines": 31, "path": "/api/user.py", "repo_name": "oriko1010/ZipZap", "src_encoding": "UTF-8", "text": "import json\nfrom mitmproxy import http\n\ndef isAnswered(flow):\n response = {\n \"resultCode\": \"success\",\n 'isAnswered': True\n }\n flow.response = http.HTTPResponse.make(200, json.dumps(response, ensure_ascii=False), {\"Content-Type\": \"application/json\"})\n\ndef setPassword(flow):\n with open('data/user/gameUser.json', 'w+', encoding='utf-8') as f:\n gameUser = json.load(f)\n gameUser['passwordNotice'] = False\n json.dump(gameUser, f, ensure_ascii=False)\n\n response = {\n \"resultCode\": \"success\",\n 'gameUser': gameUser\n }\n flow.response = http.HTTPResponse.make(200, json.dumps(response, ensure_ascii=False), {\"Content-Type\": \"application/json\"})\n\ndef handleUser(flow):\n endpoint = flow.request.path.replace('/magica/api/user', '')\n if endpoint.endswith('/isAnswered'):\n isAnswered(flow)\n elif endpoint.endswith('/setPassword'):\n setPassword(flow)\n else:\n print(endpoint)\n flow.response = http.HTTPResponse.make(501, \"Not implemented\", {})" } ]
3
Miha-Katrasnik/RV_izziv_sk4
https://github.com/Miha-Katrasnik/RV_izziv_sk4
49de522af28ef667011cac5e8906e53fad6537d6
d9f031b7751f5a7080f7abe8056aae9bd0b48901
db01a0867e6dd26697e59b7b92208a805978ed78
refs/heads/main
2023-05-13T15:54:17.136708
2021-05-23T10:10:41
2021-05-23T10:10:41
368,561,893
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5343696475028992, "alphanum_fraction": 0.5612404346466064, "avg_line_length": 40.9664421081543, "blob_id": "213a2e6e0d68648dc7b80231369549434f9c4f58", "content_id": "dbf9e7326a37427cc9b827bac327dfcdb8b49ae8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12876, "license_type": "no_license", "max_line_length": 202, "num_lines": 298, "path": "/RV_seminarska.py", "repo_name": "Miha-Katrasnik/RV_izziv_sk4", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\r\nimport numpy as np\r\nimport cv2 as cv\r\nimport my_lib as my\r\nimport time\r\n\r\n\r\nvideo_file = ['videos/MVI_6342.MOV', 'videos/MVI_6339.MOV'] #vrstni red: leva roka, desna roka\r\nthreshold = 2 #prag za upragovljanje \"razlike\" slik\r\nksize = 5 #Gaussian blur kernel size\r\ncircle_r_small = 25 #radius of circle around pin\r\ncircle_r_large = 38 #outside radius of keep-out ring \r\ncircle_th = 15 #threshold for pin inside hole\r\nring_th = 20 #threshold for hand inside ring (near hole)\r\n\r\n#array za casa leve in desne roke\r\ncas = np.zeros(2)\r\n\r\n#vse naredimo za levo in desno roko\r\nfor i in range(2):\r\n \r\n #iz videa zajamemo prvo sliko\r\n cap = cv.VideoCapture(video_file[i])\r\n ret, frame0 = cap.read()\r\n # if frame is read correctly ret is True\r\n if not ret:\r\n raise Exception(\"Can't receive frame (wrong filename?).\")\r\n \r\n if i==0:\r\n print('\\nLEVA ROKA')\r\n if i==1:\r\n print('\\nDESNA ROKA')\r\n \r\n #določimo št. slik na sekundo\r\n fps = cap.get(cv.CAP_PROP_FPS)\r\n print('fps:', fps)\r\n \r\n #ustvarimo objekt za shranjevanje videa\r\n out = cv.VideoWriter('out_video_' + str(i) + '.avi',cv.VideoWriter_fourcc('M','J','P','G'), fps, (1280,720))\r\n \r\n #ustvarimo objekt za sivinsko razliko\r\n diffObject = my.HSDiff(frame0, threshold=2, ksize=5)\r\n \r\n \r\n #DOLOČITEV TOČK\r\n points_option = 'a' #trenutno privzeto avtomatska določitev točk\r\n #points_option = input('Določi nove točke? (y/n): ')\r\n \r\n if points_option == 'y':\r\n points = my.set_points(cv.cvtColor(frame0, cv.COLOR_BGR2RGB), n=9)\r\n elif points_option == 'a':\r\n #avtomatsko poiščemo luknje s Houghovo transformacijo\r\n circles = my.hough_find_holes(frame0)\r\n \r\n if len(circles) >= 9:\r\n points = circles[0:9,0:2]\r\n else:\r\n print('Avtomatsko določanje točk neuspešno. Prosimo določite jih ročno!')\r\n points = my.set_points(cv.cvtColor(frame0, cv.COLOR_BGR2RGB), n=9)\r\n else:\r\n #uporabimo vnaprej določene točke\r\n points = my.peg_points[i]\r\n \r\n \r\n #določimo maske za kroge okoli zatičev\r\n circle_masks_small = []\r\n circle_masks_large = []\r\n \r\n for point in points:\r\n mask = my.circle_mask((1280,720), center=point, r=circle_r_small)\r\n circle_masks_small.append(mask)\r\n \r\n mask = my.ring_mask((1280,720), center=point, r_min=circle_r_small, r_max=circle_r_large)\r\n circle_masks_large.append(mask)\r\n \r\n \r\n iteration = 1 #za štetje iteracije while zanke\r\n iter_start = 0 #spremenljivka za zap. št. slike začetka opravljanja naloge (roka pride na sceno)\r\n iter_stop = 0 #spremenljivka za zap. št. slike zaključka opravljanja naloge (roka zapusti sceno)\r\n stanje = 0 #stanje opravljanja naloge (0 - start, 1 - roka na sliki, 2 - zatiči vstavljeni, 3 - zatiči pospravljeni, 4 - roka umaknjena)\r\n \r\n time_start = time.perf_counter() #čas začetka za izračun hitrosti delovanja programa\r\n \r\n #TEST\r\n peg_states_prev = np.zeros(9, dtype='bool')\r\n\r\n max_inserted_pegs = 0 #uporabljeno pri vstavljanju zatičev\r\n min_inserted_pegs = 9 #uporabljeno pri razstavljanju zatičev\r\n pegs_inserted_time = np.zeros(9)\r\n pegs_extract_time = np.zeros(9)\r\n \r\n print('\\nČas vstavljanja zatičev:')\r\n \r\n while cap.isOpened():\r\n ret, frame = cap.read()\r\n \r\n # if frame is read correctly ret is True\r\n if not ret:\r\n #print(\"Can't receive frame (stream end?). Exiting ...\")\r\n break\r\n \r\n #določimo razliko med trenutno in prvo sliko\r\n diff = diffObject.subtract(frame)\r\n \r\n #sliko razlike upragovimo\r\n diff_th = diff > diffObject.threshold\r\n diff_th = diff_th.astype('uint8') * 255\r\n \r\n #določimo, ali je roka na sliki\r\n diff_th_mean = np.mean(diff_th)\r\n hand_in_frame = diff_th_mean > 20\r\n \r\n \r\n #določimo sliko, ki jo želimo prikazati\r\n #frame_to_show = cv.cvtColor(diff_th, cv.COLOR_GRAY2BGR)\r\n frame_to_show = frame\r\n \r\n #narišemo okvir, ki predstavlja verjetnost, da je roka na sliki\r\n box_color = my.determine_color(diff_th_mean, (30,20,10,3))\r\n #box_color = my.determine_color(diff_th_mean, (10,8,6,4))\r\n cv.rectangle(frame_to_show, (5,5), (1275,715), color=box_color, thickness=3)\r\n \r\n #določanje ali je zatič v luknji\r\n inside_circles_arr = []\r\n inside_rings_arr = []\r\n points = np.array(points).astype('int32')\r\n for j in range(len(points)):\r\n point = points[j]\r\n #povprečna vrednost upragovljene slike znotraj KROGA j-tega zatiča\r\n inside_circle = np.mean(diff_th[ circle_masks_small[j] ])\r\n inside_circles_arr.append(inside_circle)\r\n \r\n #določimo barvo in narišemo krožnico\r\n #circle_color = my.determine_color(inside_circle, (25,15,10,5))\r\n circle_color = my.determine_color(inside_circle, (circle_th+3,circle_th,circle_th-1,circle_th-2))\r\n cv.circle(frame_to_show, center=(point[0], point[1]), radius=circle_r_small, color=circle_color, thickness=2)\r\n \r\n #povprečna vrednost upragovljene slike znotraj KOLOBARJA j-tega zatiča\r\n inside_ring = np.mean(diff_th[ circle_masks_large[j] ])\r\n inside_rings_arr.append(inside_ring)\r\n \r\n #določimo barvo in narišemo krožnico\r\n #ring_color = my.determine_color(inside_ring, (100,60,20,10))\r\n ring_color = my.determine_color(inside_ring, (ring_th+2,ring_th+1,ring_th,ring_th-1))\r\n cv.circle(frame_to_show, center=(point[0], point[1]), radius=circle_r_large, color=ring_color, thickness=2)\r\n \r\n #določitev stanj zatičev (vstavljen / ni vstavljen) in št. iteracije spremembe\r\n peg_states, peg_change_iter = my.peg_states_changes(inside_circles_arr, circle_th, inside_rings_arr, ring_th, iteration)\r\n \r\n \r\n #določitev časa med vstavljanjem/razstavljanjem posameznih zatičev - (dobro bi bilo napisati bolj razumljivo)\r\n if np.any(peg_states != peg_states_prev):\r\n num_of_inserted_pegs = np.sum(peg_states)\r\n if stanje == 1 and (num_of_inserted_pegs > max_inserted_pegs):\r\n #vstavljanje zatičev\r\n pegs_inserted_time[num_of_inserted_pegs-1] = (peg_change_iter[peg_states != peg_states_prev][0] - iter_start)/fps\r\n if num_of_inserted_pegs <= 1:\r\n print(\"{0}. | {1: .2f} s\".format(num_of_inserted_pegs, pegs_inserted_time[num_of_inserted_pegs-1]))\r\n else: \r\n print(\"{0}. | {1: .2f} s\".format(num_of_inserted_pegs, pegs_inserted_time[num_of_inserted_pegs-1] - pegs_inserted_time[num_of_inserted_pegs-2]))\r\n #print('cas vstavljanja zatica:', (peg_change_iter[peg_states != peg_states_prev][0] - iter_start)/fps)\r\n max_inserted_pegs = num_of_inserted_pegs\r\n if stanje == 2 and (num_of_inserted_pegs < min_inserted_pegs):\r\n peg_index = 8-num_of_inserted_pegs\r\n pegs_extract_time[peg_index] = (peg_change_iter[peg_states != peg_states_prev][0] - iter_start)/fps\r\n if peg_index == 0:\r\n print(\"{0}. | {1: .2f} s\".format(9-num_of_inserted_pegs, pegs_extract_time[peg_index] - pegs_inserted_time[8]))\r\n else:\r\n print(\"{0}. | {1: .2f} s\".format(9-num_of_inserted_pegs, pegs_extract_time[peg_index] - pegs_extract_time[peg_index-1]))\r\n min_inserted_pegs = num_of_inserted_pegs\r\n \r\n peg_states_prev = peg_states.copy()\r\n \r\n \r\n \r\n #določimo trenutno stanje opravljanja naloge\r\n if stanje == 0:\r\n if diff_th_mean > 20:\r\n #print('Roka je na sceni')\r\n stanje = stanje+1\r\n iter_start = iteration\r\n \r\n elif stanje == 1:\r\n if np.all(peg_states == np.ones(9)):\r\n #print('Zatici so vstavljeni')\r\n print('\\nČas razstavljanja zatičev:')\r\n stanje = stanje+1\r\n elif stanje == 2:\r\n if np.all(peg_states == np.zeros(9)):\r\n print('Zatici so pospravljeni')\r\n stanje = stanje+1\r\n elif stanje == 3:\r\n if diff_th_mean < 20:\r\n print('Roka umaknjena')\r\n stanje =stanje+1\r\n iter_stop = iteration\r\n \r\n cv.putText(frame_to_show, \"Leva roka\" if i==0 else \"Desna roka\", (50, 700), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(255,255,255))\r\n \r\n #prikaz trenutnega časa opravljanja naloge na videoposnetku\r\n if iter_start == 0:\r\n time_to_show = 0\r\n elif iter_stop == 0:\r\n time_to_show = (iteration - iter_start)/fps\r\n else:\r\n time_to_show = (iter_stop - iter_start)/fps\r\n cv.putText(frame_to_show, \"time: {0: .1f}\".format(time_to_show), (50, 600), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(255,255,255))\r\n \r\n #besedila za različne podnaloge (bela - ni opravljeno, zelena - opravljeno)\r\n cv.putText(frame_to_show, \"Roka prisla v kader\", (10, 20), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(255,255,255) if stanje<1 else (0,255,0))\r\n cv.putText(frame_to_show, \"Vsi zatici vstavljeni (\" + str(np.sum(peg_states)) + \"/9)\" , (10, 40), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(255,255,255) if stanje<2 else (0,255,0))\r\n cv.putText(frame_to_show, \"Vsi zatici odstranjeni\", (10, 60), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(255,255,255) if stanje<3 else (0,255,0))\r\n cv.putText(frame_to_show, \"Roka zapustila kader\", (10, 80), fontFace=cv.FONT_HERSHEY_SIMPLEX, fontScale=0.5, color=(255,255,255) if stanje<4 else (0,255,0))\r\n \r\n #prikažemo sliko\r\n cv.imshow('frame', frame_to_show)\r\n \r\n #shranimo sliko videa\r\n out.write(frame_to_show)\r\n\r\n \r\n # ZAČETEK KODE ZA FUNKCIJE TIPK\r\n key = cv.waitKey(1)\r\n \r\n if key == ord('w'):\r\n key = cv.waitKey(0)\r\n while key != ord('w'):\r\n if key == ord('q'):\r\n break\r\n elif key == ord('f'):\r\n cv.imshow('frame', frame)\r\n elif key == ord('0'):\r\n cv.imshow('frame', frame0.astype('uint8'))\r\n elif key == ord('d'):\r\n diff_show = 10*diff.astype('float64')\r\n diff_show[diff_show<0] = 0\r\n diff_show[diff_show>255] = 255\r\n diff_show = diff_show.astype('uint8')\r\n cv.imshow('frame', diff_show)\r\n elif key == ord('t'):\r\n cv.imshow('frame', diff_th)\r\n \r\n key = cv.waitKey(0)\r\n # uporabnik zapre okno - želimo enak rezultat kot pri pritisku na 'q'\r\n if cv.getWindowProperty('frame', cv.WND_PROP_VISIBLE) == 0:\r\n key = ord('q')\r\n \r\n if key == ord('q'):\r\n #print('q')\r\n break\r\n # KONEC KODE ZA FUNKCIJE TIPK\r\n \r\n \r\n #ko uporabnik zapre okno, prenehaj z izvajanjem while zanke\r\n if cv.getWindowProperty('frame', cv.WND_PROP_VISIBLE) == 0:\r\n break\r\n \r\n \r\n iteration += 1\r\n if(iteration % 30 == 0):\r\n #iteration = 0\r\n #print('elapsed time:', time.perf_counter() - time_start)\r\n time_start = time.perf_counter()\r\n \r\n \r\n cap.release()\r\n out.release()\r\n cv.destroyAllWindows()\r\n \r\n #izračunaj čas opravljanja naloge\r\n if stanje == 4:\r\n cas[i] = (iter_stop-iter_start)/fps\r\n elif stanje == 3:\r\n print('ROKA NI ZAPUSTILA SCENE!')\r\n elif stanje == 2:\r\n print('VSI ZATIČI NISO BILI RAZSTAVLJENI!')\r\n elif stanje == 1:\r\n print('VSI ZATIČI NISO BILI VSTAVLJENI!')\r\n elif stanje == 0:\r\n print('ROKA NI PRIŠLA NA SCENO!')\r\n\r\n\r\n#izračunaj časa opravljanja naloge (če je bila naloga opravljena pravilno)\r\nif(cas[0] > 0):\r\n print('\\nCas opravljanja naloge z levo roko je:', '{0: .2f}'.format(cas[0]), 's')\r\nif(cas[1] > 0):\r\n print('\\nCas opravljanja naloge z desno roko je:', '{0: .2f}'.format(cas[1]), 's')\r\n\r\nif np.all(cas > 0):\r\n if cas[0] > cas[1]:\r\n print('Dominantna roka je desna.')\r\n else:\r\n print('Dominantna roka je leva.')\r\n \r\n print('Razlika časov je:', np.abs(cas[0] - cas[1]))\r\n\r\nprint('\\nKonec programa')" }, { "alpha_fraction": 0.4094395339488983, "alphanum_fraction": 0.5894542932510376, "avg_line_length": 38.72672653198242, "blob_id": "48a0f074029e96e6f4a41702abb807beabf07982", "content_id": "4a9eac345037bc306bd7afc4a1ed509abfc06cc2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13599, "license_type": "no_license", "max_line_length": 124, "num_lines": 333, "path": "/my_lib.py", "repo_name": "Miha-Katrasnik/RV_izziv_sk4", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\nimport numpy as np\r\nimport cv2 as cv\r\nimport matplotlib.pyplot as plt\r\n\r\n\r\n#OBJEKTNI RAZREDI ZA RAZLIČNE NAČINE DOLOČANJA RAZLIK MED SLIKAMA\r\n\r\nclass GrayDiff:\r\n def __init__(self, frame0, threshold, ksize):\r\n '''\r\n Konstruktor objekta za sivinsko razliko\r\n frame0 -> začetna slika\r\n threshold -> prag za upragovljanje\r\n ksize -> velikost jedra za Gaussov filter\r\n return: ničesar ne vrne\r\n '''\r\n self.img0 = cv.GaussianBlur(frame0, (ksize, ksize), sigmaX = 0) #sigma determined from ksize\r\n self.img0 = cv.cvtColor(frame0, cv.COLOR_BGR2GRAY)\r\n self.img0 = np.array(self.img0).astype('int32')\r\n \r\n self.threshold = threshold\r\n self.ksize = ksize\r\n\r\n def subtract(self, frame):\r\n '''\r\n Metoda za odštevanje sivinskih slik\r\n frame -> trenutna slika (lahko je barvna)\r\n return -> sivinska razlika med trenutno in začetno sliko\r\n '''\r\n gray = cv.GaussianBlur(frame, (self.ksize, self.ksize), sigmaX = 0) #sigma determined from ksize\r\n gray = cv.cvtColor(gray, cv.COLOR_BGR2GRAY)\r\n gray = np.array(gray).astype('int32')\r\n diff = np.abs(gray - self.img0)\r\n \r\n diff[diff<0] = 0\r\n diff[diff>255] = 255\r\n diff = diff.astype('uint8')\r\n \r\n return diff\r\n\r\n\r\nclass HSDiff:\r\n def __init__(self, frame0, threshold, ksize):\r\n '''\r\n Konstruktor objekta za evklidsko razdaljo v Hue Saturation polarnih koordinatah\r\n frame0 -> začetna slika\r\n threshold -> prag za upragovljanje\r\n ksize -> velikost jedra za Gaussov filter\r\n return: ničesar ne vrne\r\n '''\r\n self.img0 = cv.GaussianBlur(frame0, (ksize, ksize), sigmaX = 0) #sigma determined from ksize\r\n img0_HSV = cv.cvtColor(self.img0, cv.COLOR_BGR2HSV)\r\n img0_HSV = np.array(img0_HSV).astype('int32')\r\n \r\n self.h0 = np.array(img0_HSV[:,:,0]).astype('float64')\r\n self.s0 = np.array(img0_HSV[:,:,1]).astype('float64')\r\n \r\n #self.x0 = s0 * np.cos(2*h0*np.pi/180)\r\n #self.y0 = s0 * np.sin(2*h0*np.pi/180)\r\n \r\n self.threshold = threshold\r\n self.ksize = ksize\r\n \r\n def subtract(self, frame):\r\n '''\r\n Metoda za odštevanje sivinskih slik\r\n frame -> trenutna barvna slika\r\n return -> evklidska razdalja med piksli trenutne in začetne slike v HS polarnih koordinatah\r\n '''\r\n frame_gauss = cv.GaussianBlur(frame, (self.ksize, self.ksize), sigmaX = 0) #sigma determined from ksize\r\n \r\n frame_HSV = cv.cvtColor(frame_gauss, cv.COLOR_BGR2HSV)\r\n frame_HSV = np.array(frame_HSV).astype('int32')\r\n \r\n h = np.array(frame_HSV[:,:,0]).astype('float64')\r\n s = np.array(frame_HSV[:,:,1]).astype('float64')\r\n \r\n h_diff = np.abs(self.h0 - h).astype('uint8')\r\n diff = self.s0**2 + s**2 - 2 * self.s0 * s * cos_2deg(h_diff)\r\n diff = 255.0 * diff / (510.0**2)\r\n \r\n diff[diff<0] = 0\r\n diff[diff>255] = 255\r\n diff = diff.astype('uint8')\r\n \r\n return diff\r\n\r\n#FUNKCIJE\r\n\r\ndef set_points(iImage, n):\r\n '''\r\n Funkcija za določanje točk s klikanjem\r\n iImage -> slika na kateri določamo točke\r\n n -> št. točk, ki jih želimo določiti\r\n return: array točk\r\n '''\r\n fig = plt.figure()\r\n ax = fig.add_subplot(111)\r\n ax.imshow(iImage, cmap='gray')\r\n \r\n points = []\r\n def onclick(event):\r\n if event.key == 'shift':\r\n x, y = event.xdata, event.ydata\r\n points.append((x, y))\r\n ax.plot(x, y, 'or')\r\n fig.canvas.draw()\r\n \r\n ka = fig.canvas.mpl_connect('button_press_event', onclick)\r\n \r\n while len(points) < 9:\r\n cv.waitKey(200)\r\n \r\n return points\r\n\r\ndef determine_color(value, thresholds):\r\n if value > thresholds[0]:\r\n color = (0,255,0) #BGR\r\n elif value > thresholds[1]:\r\n color = (255,255,0) #BGR\r\n elif value > thresholds[2]:\r\n color = (255,0,0) #BGR\r\n elif value > thresholds[3]:\r\n color = (255,0,255) #BGR\r\n else:\r\n color = (0,0,255) #BGR\r\n \r\n return color\r\n\r\ndef circle_mask(imSize, center, r):\r\n #izračuna masko za krog\r\n #https://stackoverflow.com/questions/49330080/numpy-2d-array-selecting-indices-in-a-circle\r\n x = np.arange(0, imSize[0])\r\n y = np.arange(0, imSize[1])\r\n #arr = np.zeros((y.size, x.size))\r\n \r\n cx = float(center[0])\r\n cy = float(center[1])\r\n \r\n #izračun maske\r\n mask = (x[np.newaxis,:]-cx)**2 + (y[:,np.newaxis]-cy)**2 < r**2\r\n return mask\r\n \r\ndef ring_mask(imSize, center, r_min, r_max):\r\n #izračuna masko za kolobar\r\n #https://stackoverflow.com/questions/49330080/numpy-2d-array-selecting-indices-in-a-circle\r\n x = np.arange(0, imSize[0])\r\n y = np.arange(0, imSize[1])\r\n #arr = np.zeros((y.size, x.size))\r\n \r\n cx = float(center[0])\r\n cy = float(center[1])\r\n \r\n #izračun maske\r\n mask_outer = (x[np.newaxis,:]-cx)**2 + (y[:,np.newaxis]-cy)**2 < r_max**2\r\n mask_inner = (x[np.newaxis,:]-cx)**2 + (y[:,np.newaxis]-cy)**2 > r_min**2\r\n return np.logical_and(mask_outer, mask_inner)\r\n\r\npeg_prev_states_in = np.zeros((9,4), dtype='bool') #last 4 states for each peg - used for change empty->full\r\npeg_prev_states_out = np.zeros((9,4), dtype='bool') #last 4 states for each peg - used for change full->empty\r\npeg_states = np.zeros(9, dtype='bool') #are pegs in holes?\r\npeg_change_iter = np.zeros(9) #num. of iteration when last change occured\r\ndef peg_states_changes(inside_circle, circle_th, inside_ring, ring_th, iteration):\r\n '''\r\n Funkcija določi status zatičev (vstavljen / ni vstavljen).\r\n inside_circle -> array povp. vrednosti na krogih okoli lukenj\r\n circle_th -> prag za zaznavo vstavljenega zatiča\r\n inside_ring -> array povp. vrednosti na kolobarjih okoli lukenj\r\n ring_th -> prag za zaznavo roke, ki prekriva območje ob luknji\r\n ring_th -> trenutna iteracija oz. zaporadna številka slike videoposnetka\r\n return: peg_states -> stanje zatičev\r\n peg_change_iter -> iteracija zadnje spremembe\r\n '''\r\n global peg_prev_states_in\r\n global peg_prev_states_out\r\n global peg_states\r\n global peg_change_iter\r\n \r\n inside_circle = np.array(inside_circle)\r\n inside_ring = np.array(inside_ring)\r\n \r\n if(inside_circle.shape != inside_ring.shape):\r\n raise Exception('lengths of arrays inside_circle and inside_ring must be equal')\r\n \r\n \r\n peg_inside = inside_circle > circle_th\r\n ring_empty = inside_ring < ring_th\r\n peg_inside_ring_empty = np.logical_and(peg_inside, ring_empty)\r\n #print('peg_inside:', peg_inside)\r\n peg_prev_states_in = np.concatenate((np.vstack(peg_inside_ring_empty), peg_prev_states_in[:,0:3]), axis=1)\r\n \r\n peg_prev_states_out = np.concatenate((np.vstack(peg_inside), peg_prev_states_out[:,0:3]), axis=1)\r\n #print('peg_prev_states_out\\n', peg_prev_states_out.astype('int32'))\r\n \r\n for i in range(9):\r\n if peg_states[i]: #if current status: peg in hole\r\n if np.sum(peg_prev_states_out[i,0:3]) == 0:\r\n peg_states[i] = False\r\n peg_change_iter[i] = iteration - 3\r\n elif np.sum(peg_prev_states_out[i]) == 1:\r\n peg_states[i] = False\r\n peg_change_iter[i] = iteration - 4\r\n else: # if current status: hole empty\r\n if np.sum(peg_prev_states_in[i,0:3]) == 3:\r\n peg_states[i] = True\r\n peg_change_iter[i] = iteration - 3\r\n elif np.sum(peg_prev_states_in[i]) == 3:\r\n peg_states[i] = True\r\n peg_change_iter[i] = iteration - 4\r\n \r\n return peg_states, peg_change_iter\r\n\r\ndef points_near(point1, point2, maxDist):\r\n #preveri ali sta točki blizu\r\n #v trenutnem programu ni uporabljena\r\n point1 = np.array(point1)\r\n point2 = np.array(point2)\r\n \r\n dist = np.linalg.norm(point1 - point2)\r\n return dist <= maxDist\r\n\r\n\r\ndef hough_find_holes(frame0):\r\n '''\r\n Funkcija poišče lokacije lukenj za zatiče.\r\n frame0 -> slika na kateri iščemo luknje\r\n returns: correct_circles -> najdene luknje\r\n '''\r\n frame0_gauss = cv.GaussianBlur(cv.cvtColor(frame0, cv.COLOR_BGR2GRAY), (3,3), 0)\r\n \r\n circles = cv.HoughCircles(frame0_gauss, cv.HOUGH_GRADIENT, 1, 30, param1=100, param2=50, maxRadius=300, minRadius=100)\r\n frame_draw = cv.cvtColor(frame0, cv.COLOR_BGR2RGB)\r\n circles = circles[0]\r\n for circle in circles:\r\n cv.circle(frame_draw, (circle[0], circle[1]), int(circle[2]), (0,0,255), thickness = 2)\r\n cv.circle(frame_draw, (circles[0,0], circles[0,1]), int(circles[0,2]), (0,255,0), thickness = 2)\r\n big_circle = circles[0]\r\n \r\n circles = cv.HoughCircles(frame0_gauss, cv.HOUGH_GRADIENT, 1, 30, param1=120, param2=12, maxRadius=20, minRadius=5)\r\n circles = circles[0]\r\n \r\n dist_to_big_circle = np.linalg.norm(circles[:,0:2] - big_circle[0:2], axis=1)\r\n are_circles_correct = np.logical_and((dist_to_big_circle > 1.5*big_circle[2]), (dist_to_big_circle < 3.5*big_circle[2]))\r\n correct_circles = circles[are_circles_correct]\r\n \r\n return correct_circles\r\n \r\n\r\n#vnaprej izračunane vrednosti kosinusa\r\ncos_2deg_arr = np.array([ 1.00000000e+00, 9.99390827e-01, 9.97564050e-01, 9.94521895e-01,\r\n 9.90268069e-01, 9.84807753e-01, 9.78147601e-01, 9.70295726e-01,\r\n 9.61261696e-01, 9.51056516e-01, 9.39692621e-01, 9.27183855e-01,\r\n 9.13545458e-01, 8.98794046e-01, 8.82947593e-01, 8.66025404e-01,\r\n 8.48048096e-01, 8.29037573e-01, 8.09016994e-01, 7.88010754e-01,\r\n 7.66044443e-01, 7.43144825e-01, 7.19339800e-01, 6.94658370e-01,\r\n 6.69130606e-01, 6.42787610e-01, 6.15661475e-01, 5.87785252e-01,\r\n 5.59192903e-01, 5.29919264e-01, 5.00000000e-01, 4.69471563e-01,\r\n 4.38371147e-01, 4.06736643e-01, 3.74606593e-01, 3.42020143e-01,\r\n 3.09016994e-01, 2.75637356e-01, 2.41921896e-01, 2.07911691e-01,\r\n 1.73648178e-01, 1.39173101e-01, 1.04528463e-01, 6.97564737e-02,\r\n 3.48994967e-02, 6.12323400e-17, -3.48994967e-02, -6.97564737e-02,\r\n -1.04528463e-01, -1.39173101e-01, -1.73648178e-01, -2.07911691e-01,\r\n -2.41921896e-01, -2.75637356e-01, -3.09016994e-01, -3.42020143e-01,\r\n -3.74606593e-01, -4.06736643e-01, -4.38371147e-01, -4.69471563e-01,\r\n -5.00000000e-01, -5.29919264e-01, -5.59192903e-01, -5.87785252e-01,\r\n -6.15661475e-01, -6.42787610e-01, -6.69130606e-01, -6.94658370e-01,\r\n -7.19339800e-01, -7.43144825e-01, -7.66044443e-01, -7.88010754e-01,\r\n -8.09016994e-01, -8.29037573e-01, -8.48048096e-01, -8.66025404e-01,\r\n -8.82947593e-01, -8.98794046e-01, -9.13545458e-01, -9.27183855e-01,\r\n -9.39692621e-01, -9.51056516e-01, -9.61261696e-01, -9.70295726e-01,\r\n -9.78147601e-01, -9.84807753e-01, -9.90268069e-01, -9.94521895e-01,\r\n -9.97564050e-01, -9.99390827e-01, -1.00000000e+00, -9.99390827e-01,\r\n -9.97564050e-01, -9.94521895e-01, -9.90268069e-01, -9.84807753e-01,\r\n -9.78147601e-01, -9.70295726e-01, -9.61261696e-01, -9.51056516e-01,\r\n -9.39692621e-01, -9.27183855e-01, -9.13545458e-01, -8.98794046e-01,\r\n -8.82947593e-01, -8.66025404e-01, -8.48048096e-01, -8.29037573e-01,\r\n -8.09016994e-01, -7.88010754e-01, -7.66044443e-01, -7.43144825e-01,\r\n -7.19339800e-01, -6.94658370e-01, -6.69130606e-01, -6.42787610e-01,\r\n -6.15661475e-01, -5.87785252e-01, -5.59192903e-01, -5.29919264e-01,\r\n -5.00000000e-01, -4.69471563e-01, -4.38371147e-01, -4.06736643e-01,\r\n -3.74606593e-01, -3.42020143e-01, -3.09016994e-01, -2.75637356e-01,\r\n -2.41921896e-01, -2.07911691e-01, -1.73648178e-01, -1.39173101e-01,\r\n -1.04528463e-01, -6.97564737e-02, -3.48994967e-02, -1.83697020e-16,\r\n 3.48994967e-02, 6.97564737e-02, 1.04528463e-01, 1.39173101e-01,\r\n 1.73648178e-01, 2.07911691e-01, 2.41921896e-01, 2.75637356e-01,\r\n 3.09016994e-01, 3.42020143e-01, 3.74606593e-01, 4.06736643e-01,\r\n 4.38371147e-01, 4.69471563e-01, 5.00000000e-01, 5.29919264e-01,\r\n 5.59192903e-01, 5.87785252e-01, 6.15661475e-01, 6.42787610e-01,\r\n 6.69130606e-01, 6.94658370e-01, 7.19339800e-01, 7.43144825e-01,\r\n 7.66044443e-01, 7.88010754e-01, 8.09016994e-01, 8.29037573e-01,\r\n 8.48048096e-01, 8.66025404e-01, 8.82947593e-01, 8.98794046e-01,\r\n 9.13545458e-01, 9.27183855e-01, 9.39692621e-01, 9.51056516e-01,\r\n 9.61261696e-01, 9.70295726e-01, 9.78147601e-01, 9.84807753e-01,\r\n 9.90268069e-01, 9.94521895e-01, 9.97564050e-01, 9.99390827e-01])\r\n\r\ndef cos_2deg(iArr):\r\n return cos_2deg_arr[iArr]\r\n\r\n \r\n \r\n#TOČKE ZATIČEV\r\npeg_points_L = np.array([[352, 280],\r\n [460, 279],\r\n [568, 280],\r\n [351, 388],\r\n [460, 388],\r\n [568, 390],\r\n [353, 497],\r\n [461, 497],\r\n [567, 499]])\r\n \r\npeg_points_R = np.array([[757, 267],\r\n [867, 273],\r\n [981, 281],\r\n [748, 376],\r\n [860, 382],\r\n [971, 392],\r\n [741, 485],\r\n [850, 495],\r\n [959, 502]])\r\n \r\npeg_points = (peg_points_L, peg_points_R)\r\n \r\npeg_points_6322 = np.array([[727, 265],\r\n [835, 267],\r\n [948, 266],\r\n [726, 376],\r\n [835, 376],\r\n [946, 378],\r\n [725, 484],\r\n [833, 486],\r\n [944, 487]])" }, { "alpha_fraction": 0.7535680532455444, "alphanum_fraction": 0.7716460227966309, "avg_line_length": 34.03333282470703, "blob_id": "8fbfa25ee3692b1837b75e1dd89cc27871e7b67d", "content_id": "bcc237376e00544ba09c994613914abec3d4e6fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1062, "license_type": "no_license", "max_line_length": 153, "num_lines": 30, "path": "/README.md", "repo_name": "Miha-Katrasnik/RV_izziv_sk4", "src_encoding": "UTF-8", "text": "# RV_izziv_sk4\nSeminarska naloga pri predmetu Robotski vid (UL FE)\n\nPrimeri videoposnetkov in prikaz delovanja se nahaja na povezavi:\n<a href=\"https://drive.google.com/drive/folders/10Jd7BtdS0cPqYwX1KFjaPRAEqrXxwKll?usp=sharing\" target=\"_blank\">Google Drive</a>\n\nTestne videoposnetke naložite v mapo videos. V 9. vrstici datoteke <code>RV_seminarska.py</code> lahko spremenite imena datotek, ki jih želite uporabiti.\n\n```python\nvideo_file = ['videos/MVI_6342.MOV', 'videos/MVI_6339.MOV'] #vrstni red: leva roka, desna roka\n```\n\nMed delovanjem programa lahko s tipko <b>w</b> začasno ustavite program. Nato lahko prikažete različne slike:\n* d - razlika med trenutno in prvo sliko\n* t - upragovljena slika\n* f - barvna slika\n* 0 - prva slika posnetka\n\nS tipko q lahko video predčasno zaključite, lahko pa enostavno zaprete okno.\n\n## Opis programa\n\nProgram je namenjen avtomatskemu merjenju časa opravljanja preizkusa z devetimi zatiči.\n\n## Knjižnice\n\nZa uporabo programa so potrebne knjižnice:\n* OpenCV (testirano za verzijo 4.5.1)\n* NumPy\n* Matplotlib\n" } ]
3
palaniappanofficial/Competitive-Programming
https://github.com/palaniappanofficial/Competitive-Programming
e6c5de9198cd542924c1b51e9734d0cc0efa64d4
66e7756925feb957403a2ef47627ce4527fc4a9b
49b10c1fd27e774e6c9de5a4301c6d53b2e0f750
refs/heads/master
2023-05-07T12:51:41.523061
2021-06-01T12:08:33
2021-06-01T12:08:33
332,156,344
4
0
null
null
null
null
null
[ { "alpha_fraction": 0.5331069827079773, "alphanum_fraction": 0.5543293952941895, "avg_line_length": 24.085105895996094, "blob_id": "f63c6537beb889f4998ef1331ab88dcca1a8ae6f", "content_id": "3c7e1672313144888d87988de3fe3f7e10260441", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1178, "license_type": "no_license", "max_line_length": 55, "num_lines": 47, "path": "/Data Structures/Singly Linked List/delete and remove loop from a linked list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linklist:\n def __init__(self, data):\n self.data = data\n self.next = None\nclass Linkedlist:\n def __init__(self):\n self.head=None\n def addElement(self,data):\n node=Linklist(data)\n node.next=self.head\n self.head=node\n def printdata(self):\n temp=self.head\n while(temp is not None):\n print(temp.data)\n temp=temp.next\n def detectloop(self):\n slow=self.head\n fast=self.head\n while(slow and fast and fast.next):\n slow=slow.next\n fast=fast.next.next\n if(slow==fast):\n self.removeloop(slow)\n return 1\n return 0\n def removeloop(self,loop):\n ptr1=self.head\n while(1):\n ptr2 = loop\n while(ptr2.next!=loop and ptr2.next!=ptr1):\n ptr2=ptr2.next\n if(ptr2.next==ptr1):\n break\n ptr1=ptr1.next\n ptr2.next=None\n\n\nobj=Linkedlist()\nobj.addElement(50)\nobj.addElement(40)\nobj.addElement(30)\nobj.addElement(20)\nobj.addElement(10)\nobj.head.next.next.next.next.next=obj.head.next.next\nobj.detectloop()\nobj.printdata()" }, { "alpha_fraction": 0.5680271983146667, "alphanum_fraction": 0.6224489808082581, "avg_line_length": 21.69230842590332, "blob_id": "88b5bb26154b0f97b9b53ea6dad6e0a1945797d5", "content_id": "0a1e8228a1b98e8062d28e7dec86669689c8871b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 294, "license_type": "no_license", "max_line_length": 33, "num_lines": 13, "path": "/Problems/array rotation.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def rotate(array,start,end):\n for i in range(start+1):\n rotateonebyone(array,end)\n print(array)\ndef rotateonebyone(array,end):\n temp=array[0]\n for i in range(end):\n array[i]=array[i+1]\n array[i+1]=temp\n return array\n\narray=[0,1,2,3,4,5,6,7,8,9]\nrotate(array,2,9)" }, { "alpha_fraction": 0.5947712659835815, "alphanum_fraction": 0.6318082809448242, "avg_line_length": 18.125, "blob_id": "470a4eabfa03ec03d5864230daf3a5c328d2dde3", "content_id": "966003200883e7b048070ab4bbf5577d5c26da7c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 459, "license_type": "no_license", "max_line_length": 44, "num_lines": 24, "path": "/Problems/decimal to binary,octal.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#Using Built In Function\na=12\nprint(bin(a).replace(\"0b\",\"\"))\nprint(oct(a))\n\n#Without Using Built In Function\ndef dectobin(value):\n if value>=1:\n dectobin(value//2)\n print(value%2,end=\"\")\ndectobin(12)\n\n\ndef dectooct(value):\n print(\"\\n\")\n digitval=1\n countval=0\n while(value!=0):\n remainder=value%8\n countval=countval+remainder*digitval\n digitval=digitval*10\n value=value//8\n print(countval)\ndectooct(33)\n" }, { "alpha_fraction": 0.5979591608047485, "alphanum_fraction": 0.6061224341392517, "avg_line_length": 24.8157901763916, "blob_id": "ad8505d9d3144e2595c665a043c4b55e11eb7e5f", "content_id": "b9749f11f2516e1ad18ba8ee8bca5aeb84099ba2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 980, "license_type": "no_license", "max_line_length": 47, "num_lines": 38, "path": "/Data Structures/Singly Linked List/rotate a linked list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertfirst(self,data):\n newlist=Linkedlist(data)\n newlist.next=self.head\n self.head=newlist\n def rotatelinkedlist(self,k):\n count=1\n current=self.head\n while(count<k and current is not None):\n current=current.next\n count=count+1\n knode=current\n while(current.next is not None):\n current=current.next\n current.next=self.head\n self.head=knode.next\n knode.next=None\n def printlinkedlist(self):\n temp=self.head\n while(temp):\n print(temp.data, \"\",end=\"\")\n temp=temp.next\n print(\"\\n\")\nobj=Linklist()\nobj.insertfirst(1)\nobj.insertfirst(2)\nobj.insertfirst(3)\nobj.insertfirst(4)\nobj.insertfirst(5)\nobj.printlinkedlist()\nobj.rotatelinkedlist(3)\nobj.printlinkedlist()" }, { "alpha_fraction": 0.4107142984867096, "alphanum_fraction": 0.507440447807312, "avg_line_length": 15, "blob_id": "5537d63b14a64e859eaba8b4bf1e9118f2c2e37c", "content_id": "761134cc2149c1e93ff50b316497a72068920bfd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 672, "license_type": "no_license", "max_line_length": 39, "num_lines": 42, "path": "/Problems/numberlinejumps.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#!/bin/python3\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n\n# Complete the kangaroo function below.\ndef kangaroo(x1, v1, x2, v2):\n var1 = 0\n var2 = 0\n var1 = x1 + v1\n var2 = x2 + v2\n count = 1\n while (5000):\n if (var1 == var2):\n return \"YES\"\n elif (count == 5000):\n return \"NO\"\n else:\n var1 = var1 + v1\n var2 = var2 + v2\n count = count + 1\n\n\nif __name__ == '__main__':\n\n x1V1X2V2 = input().split()\n\n x1 = int(x1V1X2V2[0])\n\n v1 = int(x1V1X2V2[1])\n\n x2 = int(x1V1X2V2[2])\n\n v2 = int(x1V1X2V2[3])\n\n result = kangaroo(x1, v1, x2, v2)\n\n print(result)\n" }, { "alpha_fraction": 0.6182937622070312, "alphanum_fraction": 0.6341248750686646, "avg_line_length": 19.321428298950195, "blob_id": "a55ebede1aa8e47a6d9d474099e6075316bb729f", "content_id": "333248333224eb46bbf1fb0ee67a7ad32f43471b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1137, "license_type": "no_license", "max_line_length": 57, "num_lines": 56, "path": "/Data Structures/Binary Search Tree/binary_tree_to_binary_search_tree.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class node:\n def __init__(self,data):\n self.data=data\n self.left=None\n self.right=None\ndef inorder(root,arr):\n if root is None:\n return None\n if root.left:\n inorder(root.left,arr)\n arr.append(root.data)\n if root.right:\n inorder(root.right,arr)\n return arr\n\ndef nodescount(root):\n if root is None:\n return 0\n return nodescount(root.left)+nodescount(root.right)+1\ndef binaytreetobst(root,new):\n if root is None:\n return None\n binaytreetobst(root.left,new)\n root.data=new[0]\n new.pop(0)\n binaytreetobst(root.right,new)\n return root\n\ndef printbst(bst):\n if bst.left:\n printbst(bst.left)\n print(bst.data)\n if bst.right:\n printbst(bst.right)\n\ndef inordersort(root):\n new=[]\n arr=[]\n nodes=0\n nodes=nodescount(root)\n new=inorder(root,arr)\n new.sort()\n print(new)\n print(nodes)\n bst=binaytreetobst(root,new)\n printbst(bst)\n\n\nroot=node(10)\nroot.left=node(21)\nroot.right=node(32)\nroot.left.left=node(52)\nroot.left.right=node(12)\nroot.right.left=node(15)\nroot.right.right=node(9)\ninordersort(root)" }, { "alpha_fraction": 0.599552571773529, "alphanum_fraction": 0.6062639951705933, "avg_line_length": 23.86111068725586, "blob_id": "5d79f9c35542fcfcd737f4b528090bf58cfde2a2", "content_id": "dcc01cb2b305f1fb549c3304d1f8eb6ced6d7691", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 894, "license_type": "no_license", "max_line_length": 39, "num_lines": 36, "path": "/Data Structures/Singly Linked List/reverse a linked list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertfirst(self,data):\n newlist=Linkedlist(data)\n newlist.next=self.head\n self.head=newlist\n def reverselinkedlist(self):\n prev=None\n current=self.head\n while(current is not None):\n next=current.next\n current.next=prev\n prev=current\n current=next\n self.head=prev\n def printlinkedlist(self):\n temp=self.head\n while(temp is not None):\n print(temp.data,\" \",end=\"\")\n temp=temp.next\n print(\"\\n\")\nobj=Linklist()\nobj.insertfirst(3)\nobj.insertfirst(2)\nobj.insertfirst(1)\nobj.insertfirst(4)\nobj.insertfirst(5)\nobj.insertfirst(6)\nobj.printlinkedlist()\nobj.reverselinkedlist()\nobj.printlinkedlist()" }, { "alpha_fraction": 0.48181816935539246, "alphanum_fraction": 0.4954545497894287, "avg_line_length": 16, "blob_id": "1f9125ba6e2aa07c65821172bbe8bb7eaadbc609", "content_id": "7fe2ef4a12afc4893ebd7524d085bb608f5022e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 220, "license_type": "no_license", "max_line_length": 31, "num_lines": 13, "path": "/Problems/reverse a word in a given string.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def splitword(stringword):\n a=[]\n b=\"\"\n a=stringword.split('.')\n i=len(a)-1\n while(i>=0):\n b=b+a[i]\n b=b+\".\"\n i=i-1\n return b\ns=\"hi.palani.appan.how.are.you\"\na=splitword(s)\nprint(a)" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.568345308303833, "avg_line_length": 24.272727966308594, "blob_id": "dfed758c6381fe72b1e31a43491dc3af83c86d7f", "content_id": "59df0e9cc62c1c9f866229891d93472a563c2656", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 278, "license_type": "no_license", "max_line_length": 50, "num_lines": 11, "path": "/Problems/converting tuple matrix to tuple list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#Method 1\ntest_list = [[(4, 5), (7, 8)],\n [(10, 13), (18, 17)],\n [(0, 4), (10, 1)]]\ntest=[element for i in test_list for element in i]\nprint(tuple(zip(*test)))\n\n#Method 2\nfrom itertools import chain\na=chain.from_iterable(test_list)\nprint(tuple(zip(*a)))\n" }, { "alpha_fraction": 0.5381355881690979, "alphanum_fraction": 0.6016949415206909, "avg_line_length": 15.928571701049805, "blob_id": "3514eda2d05b0929baceb05621207f57697c0f45", "content_id": "07d014fe40c483e5d4c5edcc26b96fad8a239bc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 236, "license_type": "no_license", "max_line_length": 37, "num_lines": 14, "path": "/Problems/polymorphism in python.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "# Example 1\nprint(len(\"Palaniappan\"))\nprint(len(['1','8','1','0','6','0']))\n\n# Example 2\n\nclass new:\n def add_function(self,a,b,c=0):\n return a+b+c\nobj=new()\na=obj.add_function(1,2,3)\nb=obj.add_function(25,3)\nprint(a)\nprint(b)" }, { "alpha_fraction": 0.4971098303794861, "alphanum_fraction": 0.52601158618927, "avg_line_length": 18.27777862548828, "blob_id": "d3346acd5d894f7e923fa3fefa8171c7539db96a", "content_id": "2e223082a8bf28e53c9bd57e6db1c7a4ae65444d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 346, "license_type": "no_license", "max_line_length": 43, "num_lines": 18, "path": "/Problems/switch case in python.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def number_to_string(string):\n dictionary={\n '0':'Zero',\n '1':'One',\n '2':'Two',\n '3':'Three',\n '4':'Four',\n '5':'Five'\n }\n return dictionary.get(string,\"Nothing\")\na=number_to_string('0')\nprint(a)\na=number_to_string('8')\nprint(a)\na=number_to_string('2')\nprint(a)\na=number_to_string('3')\nprint(a)" }, { "alpha_fraction": 0.44610777497291565, "alphanum_fraction": 0.5029940009117126, "avg_line_length": 19.9375, "blob_id": "fe7289814e4434390bc64e7ca0b0088ff4cf7f03", "content_id": "5fc678b3dc8b7da0cecb00cbbb602de98ed83dae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 334, "license_type": "no_license", "max_line_length": 46, "num_lines": 16, "path": "/Problems/count the number of duplicate occurences in the list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def countoccurences(array):\n a=set(array)\n b=[]\n count=0\n dict={}\n for i in a:\n dict[i]=\"0\"\n for i in array:\n dict[i]=int(dict[i])+1\n print(dict)\n for j in dict.values():\n b.append(j)\n if j>1:\n count=count+1\n print(count)\ncountoccurences([1,1,2,3,4,5,5,5,5,7,7,7,9,9])" }, { "alpha_fraction": 0.4935064911842346, "alphanum_fraction": 0.6103895902633667, "avg_line_length": 25, "blob_id": "e9276e232bcf8fed82381867e8542cb9e8b0b07b", "content_id": "79c3b1603898f8808c3e248d983fd8e2e8b6e39b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 77, "license_type": "no_license", "max_line_length": 37, "num_lines": 3, "path": "/Problems/transpose of a matrix using numpy.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "import numpy as np\na=np.array([[2,3,1],[5,7,2],[8,9,2]])\nprint(a.transpose())" }, { "alpha_fraction": 0.5444915294647217, "alphanum_fraction": 0.5600282549858093, "avg_line_length": 22.616666793823242, "blob_id": "fd3d473ba0ba684b4eaa7fd8a2ed694eeae76b80", "content_id": "c391524f53e79d4cb4f0450fef20ea510acd047b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1416, "license_type": "no_license", "max_line_length": 57, "num_lines": 60, "path": "/Data Structures/Singly Linked List/merge two sorted linked lists.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def addlast(self,data):\n newlist=Linkedlist(data)\n if self.head is None:\n self.head=newlist\n return\n temp = self.head\n while temp.next is not None:\n temp=temp.next\n temp.next=newlist\n def mergetwosortedlinkedlist(self,headA,headB):\n dummynode=Linkedlist(0)\n tail=dummynode\n while True:\n if headA is None:\n tail.next = headB\n break\n if headB is None:\n tail.next = headA\n break\n if headA.data<=headB.data:\n tail.next=headA\n headA=headA.next\n else:\n tail.next=headB\n headB=headB.next\n tail=tail.next\n return dummynode.next\n\n def printlinkedlist(self):\n temp=self.head\n while temp:\n print(temp.data,end=\" \")\n temp=temp.next\n print(\"\\n\")\n\nobj=Linklist()\nobj.addlast(10)\nobj.addlast(13)\nobj.addlast(14)\nobj.addlast(15)\nobj.printlinkedlist()\n\nobj2=Linklist()\nobj2.addlast(1)\nobj2.addlast(2)\nobj2.addlast(3)\nobj2.addlast(4)\nobj2.addlast(5)\nobj2.printlinkedlist()\n\nobj.head=obj.mergetwosortedlinkedlist(obj.head,obj2.head)\n\nobj.printlinkedlist()" }, { "alpha_fraction": 0.662162184715271, "alphanum_fraction": 0.6936936974525452, "avg_line_length": 18, "blob_id": "4412f5c4151eaff151f15fc5b6a4d17ac4eb5019", "content_id": "ce80abc5f073b0ddc466dbd6881f3439fbce8e9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 666, "license_type": "no_license", "max_line_length": 67, "num_lines": 35, "path": "/Problems/packing and unpacking arguments in python.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#Packing and Unpacking For List\n\ndef addsum(*array):\n return sum(array)\narray=[1,2,3,4]\na=addsum(*array)\nprint(a)\nc=addsum(*[20,5,3,4,5,6,1])\nprint(c)\n\ndef calculate(a,b,c):\n return a+b+c\na=[1,2,3]\nb=calculate(*a)\nprint(b)\n\ndef printlist(*string):\n list1=list(string)\n list1[0]=\"Hi\"\n print(list1)\nobj=printlist(\"Hello\",\"Palaniappan\",\"How are You\")\n\n\n#Packing and Unpacking For Dictionaries\n\ndef printdict(**dictionary):\n print(dictionary)\n\ndef printanotherdict(Name,Age,Profession):\n print(Name,Age,Profession)\n\ndictionary={\"Name\":\"Palaniappan\",\"Age\":\"20\",\"Profession\":\"Student\"}\n\nobj=printdict(**dictionary)\nobj=printanotherdict(**dictionary)\n\n" }, { "alpha_fraction": 0.7439024448394775, "alphanum_fraction": 0.7560975551605225, "avg_line_length": 26.66666603088379, "blob_id": "86ae35e1e86eee69bfbe595b68070d00997c3c19", "content_id": "0f144e9f45d75ab0a87d041680a8b45363c4a96a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 82, "license_type": "no_license", "max_line_length": 34, "num_lines": 3, "path": "/Problems/finding the combinations of a string.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "from itertools import combinations\nfor i in combinations(\"palani\",5):\n print(i)" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5157342553138733, "avg_line_length": 16.875, "blob_id": "82b082e4104d544177a3433cba2765be7861c23b", "content_id": "c4462bbe0ef6cea911c1a0e417473bd296656ff0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 572, "license_type": "no_license", "max_line_length": 50, "num_lines": 32, "path": "/Problems/minimaxsum.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#!/bin/python3\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n\n# Complete the miniMaxSum function below.\ndef miniMaxSum(arr):\n sum = 0\n mini = 0\n maxi = 0\n array1 = []\n for i in range(len(arr)):\n for j in range(len(arr)):\n if (i == j):\n sum = sum\n else:\n sum = sum + arr[j]\n array1.append(sum)\n sum = 0\n mini = min(array1)\n maxi = max(array1)\n print(mini, maxi)\n\n\nif __name__ == '__main__':\n arr = list(map(int, input().rstrip().split()))\n\n miniMaxSum(arr)\n" }, { "alpha_fraction": 0.6677524447441101, "alphanum_fraction": 0.6807817816734314, "avg_line_length": 24.66666603088379, "blob_id": "35a9322dbcb907a96f20aecf7cfff7c7e4372991", "content_id": "2189b16429711003bfbf22c25852d2fa66012705", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 307, "license_type": "no_license", "max_line_length": 60, "num_lines": 12, "path": "/Problems/data hiding in python.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class A:\n def __init__(self):\n self.__hiddenvariable=0\n def addition(self,addvalue):\n self.__hiddenvariable=self.__hiddenvariable+addvalue\n print(addvalue)\nobj=A()\nobj.addition(20)\nobj.addition(5)\n#This line causes Error\n# print(obj.__hiddenvariable)\nprint(obj._A__hiddenvariable)" }, { "alpha_fraction": 0.6673306822776794, "alphanum_fraction": 0.6932271122932434, "avg_line_length": 16.964284896850586, "blob_id": "0b5bcdb148fd7878606ad2ae88c9980cb1339fe7", "content_id": "73af348c557d5c94d9b505cf8d070e1be85be4b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 502, "license_type": "no_license", "max_line_length": 58, "num_lines": 28, "path": "/Problems/birthdaycandles.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#!/bin/python3\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n#\n# Complete the 'birthdayCakeCandles' function below.\n#\n# The function is expected to return an INTEGER.\n# The function accepts INTEGER_ARRAY candles as parameter.\n#\n\ndef birthdayCakeCandles(candles):\n # Write your code here\n maxi=0\n count=0\n maxi=max(candles)\n for i in candles:\n if(maxi==i):\n count=count+1\n return count\n\ncandles=[5,2,6,6,8,9,1,9,9]\na=birthdayCakeCandles(candles)\nprint(a)" }, { "alpha_fraction": 0.5373563170433044, "alphanum_fraction": 0.5488505959510803, "avg_line_length": 25.846153259277344, "blob_id": "ac97e080d46e448cdc21f10317a12aeebddfd3ab", "content_id": "a57f22a94c1c21ca60b8e5c4a240e9f902842eeb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 348, "license_type": "no_license", "max_line_length": 65, "num_lines": 13, "path": "/Problems/printing objects in python.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Main:\n def __init__(self,val_a,val_b):\n self.aval=val_a\n self.bval=val_b\n def __repr__(self):\n return \"A Value : %s B Value : %s\" %(self.aval,self.bval)\n def __str__(self):\n return \"A : %s B : %s\" %(self.aval,self.bval)\n\n\nobj=Main(75,90)\nprint(obj) #Calls __str__ method\nprint([obj]) #Calls __repr__method" }, { "alpha_fraction": 0.5394126772880554, "alphanum_fraction": 0.5625966191291809, "avg_line_length": 19.21875, "blob_id": "4d39183bcd865b4b7ffda889b6d9d49cf7174f48", "content_id": "30a96112c63db8986f076d65295fdb0783b17167", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 647, "license_type": "no_license", "max_line_length": 56, "num_lines": 32, "path": "/Problems/plusminus.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#!/bin/python3\n\nimport math\nimport os\nimport random\nimport re\nimport sys\n\n# Complete the plusMinus function below.\ndef plusMinus(arr):\n plus=0\n minus=0\n zero=0\n floatplus=0\n floatminus=0\n floatzero=0\n for i in arr:\n if i<0:\n minus=minus+1\n elif i>0:\n plus=plus+1\n else:\n zero=zero+1\n print(\"{:.6f}\".format(float(plus)/float(len(arr))))\n print(\"{:.6f}\".format(float(minus)/float(len(arr))))\n print(\"{:.6f}\".format(float(zero)/float(len(arr))))\nif __name__ == '__main__':\n n = int(input())\n\n arr = list(map(int, input().rstrip().split()))\n\n plusMinus(arr)\n" }, { "alpha_fraction": 0.686274528503418, "alphanum_fraction": 0.6928104758262634, "avg_line_length": 18.125, "blob_id": "98abd7eb4c1cd3ee4cb9534b8d6b4976d368f9fd", "content_id": "0cb0ffef66b06a88d36e93cbceb3fab6319c6768", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 153, "license_type": "no_license", "max_line_length": 34, "num_lines": 8, "path": "/Problems/reverse a string without stack.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def reverestring(strings):\n string=\"\"\n string=strings[::-1]\n return string\n\nstrings=input(\"Enter the String:\")\na=reverestring(strings)\nprint(a)\n" }, { "alpha_fraction": 0.553629457950592, "alphanum_fraction": 0.5590465664863586, "avg_line_length": 23.210525512695312, "blob_id": "5033e5defacfc5f43f826aca324462d5080f0e40", "content_id": "9d5cf9dd184fd3ad532087b5c3d8ff6f9ee8beaf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 923, "license_type": "no_license", "max_line_length": 41, "num_lines": 38, "path": "/Data Structures/Singly Linked List/linked list to array.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertlast(self,data):\n linklist=Linkedlist(data)\n if self.head==None:\n self.head=linklist\n else:\n temp=self.head\n while(temp.next is not None):\n temp=temp.next\n temp.next=linklist\n def printlist(self):\n temp=self.head\n while(temp is not None):\n print(temp.data,end=\" \")\n temp=temp.next\n print(\"\\n\")\n def linkedlisttoarray(self):\n arr=[]\n temp=self.head\n while(temp is not None):\n arr.append(temp.data)\n temp=temp.next\n print(arr)\n\nobj=Linklist()\nobj.insertlast(1)\nobj.insertlast(2)\nobj.insertlast(8)\nobj.insertlast(4)\nobj.insertlast(5)\nobj.printlist()\nobj.linkedlisttoarray()\n\n\n\n" }, { "alpha_fraction": 0.6634615659713745, "alphanum_fraction": 0.6858974099159241, "avg_line_length": 16.828571319580078, "blob_id": "312b51fb6571571c536fed9f2ede7ee4c4274a8c", "content_id": "438d337398088769fb50d524fe42d72a2f7e162c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 624, "license_type": "no_license", "max_line_length": 40, "num_lines": 35, "path": "/Data Structures/Graph/depth first traversal.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "# Python3 program to print DFS traversal\n# from a given given graph\nfrom collections import defaultdict\n\n\nclass Graph:\n\n\tdef __init__(self):\n\t\tself.graph = defaultdict(list)\n\n\tdef addEdge(self, key, value):\n\t\tself.graph[key].append(value)\n\n\tdef DFSUtil(self, a, visited):\n\n\t\tvisited.add(a)\n\t\tprint(a, end=' ')\n\n\t\tfor neighbour in self.graph[a]:\n\t\t\tif neighbour not in visited:\n\t\t\t\tself.DFSUtil(neighbour, visited)\n\tdef DFS(self, vertex):\n\n\t\tvisited = set()\n\t\tself.DFSUtil(vertex, visited)\n\nobj = Graph()\nobj.addEdge(0, 1)\nobj.addEdge(0, 2)\nobj.addEdge(1, 2)\nobj.addEdge(2, 0)\nobj.addEdge(2, 3)\nobj.addEdge(3, 3)\n\nobj.DFS(2)\n" }, { "alpha_fraction": 0.5696378946304321, "alphanum_fraction": 0.5807799696922302, "avg_line_length": 21.46875, "blob_id": "757e904f6fbac577c0041f5392b21fd80c83be16", "content_id": "07b6c40fec9ce99de7ed0e05eb1c09fcd316cd2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 718, "license_type": "no_license", "max_line_length": 32, "num_lines": 32, "path": "/Data Structures/Queue/implementation of queue using linked list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Queue:\n def __init__(self):\n self.rear=None\n self.front=None\n def enqueue(self,data):\n newlist=Linkedlist(data)\n if self.rear is None:\n self.rear=newlist\n self.front=newlist\n return\n self.rear.next=newlist\n self.rear=newlist\n def dequeue(self):\n if self.front==None:\n return\n temp=self.front\n self.front=temp.next\n if self.front==None:\n self.rear=None\n\nobj=Queue()\nobj.enqueue(10)\nobj.enqueue(20)\nobj.enqueue(30)\nobj.enqueue(40)\nobj.dequeue()\nprint(obj.front.data)\nprint(obj.rear.data)" }, { "alpha_fraction": 0.5943952798843384, "alphanum_fraction": 0.6047197580337524, "avg_line_length": 22.379310607910156, "blob_id": "573dc418a519a915295b92f0a4801596e71a525b", "content_id": "806f23dd09d30630708039bbeaa764829b34a2e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 678, "license_type": "no_license", "max_line_length": 42, "num_lines": 29, "path": "/Data Structures/Binary Search Tree/breadth_first_search.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class node:\n def __init__(self,data):\n self.data=data\n self.left=None\n self.right=None\n\ndef breadthfirstsearch(root):\n if root is None:\n return None\n queues = []\n array=[]\n queues.append(root)\n array.append(root.data)\n while (len(queues)>0):\n refer=queues.pop(0)\n if refer.left:\n queues.append(refer.left)\n array.append(refer.left.data)\n if refer.right:\n queues.append(refer.right)\n array.append(refer.right.data)\n return array\nroot=node(1)\nroot.left=node(2)\nroot.right=node(3)\nroot.left.left=node(4)\nroot.left.right=node(5)\nb=breadthfirstsearch(root)\nprint(b)\n" }, { "alpha_fraction": 0.5616438388824463, "alphanum_fraction": 0.5707762837409973, "avg_line_length": 21, "blob_id": "8884351a4e10ad5c7a06ae16ebcd1c1584bcf564", "content_id": "6ba299b0f8459d54292520e79e4aeca0f567ffe9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 219, "license_type": "no_license", "max_line_length": 30, "num_lines": 10, "path": "/Problems/bitwise operators.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def bitwise(a,b):\n print(\"Bitwise AND\",a & b)\n print(\"Bitwise OR\",a | b)\n print(\"Bitwise NOT\",~ a )\n print(\"Bitwise EX-OR\",a^b)\n print(\"Left Shift\",a<<b)\n print(\"Right Shift\",a>>b)\na=4\nb=5\nbitwise(a,b)" }, { "alpha_fraction": 0.5959799289703369, "alphanum_fraction": 0.6110552549362183, "avg_line_length": 26.61111068725586, "blob_id": "15ae14f241e05f5267d38a1bfbd5f1f20c1a589a", "content_id": "4f9fe86f41abe67ec685e261eba4cf85c28efb11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 995, "license_type": "no_license", "max_line_length": 51, "num_lines": 36, "path": "/Data Structures/Graph/graph representations - undirected.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class vertex:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass graph:\n def __init__(self,vertices):\n self.vertices=vertices\n self.totalvertex=[None]*self.vertices\n def appendnode(self,source,destination):\n node=vertex(destination)\n node.next=self.totalvertex[source]\n self.totalvertex[source]=node\n\n node=vertex(source)\n node.next=self.totalvertex[destination]\n self.totalvertex[destination]=node\n def printgraph(self):\n for i in range(self.vertices):\n print(\"Adjacent Vertices for\",i)\n lastnode=self.totalvertex[i]\n print(\"head\",end=\" \")\n while lastnode:\n print(\"--->\",lastnode.data,end=\" \")\n lastnode=lastnode.next\n\n print(\"\\n\")\na=5\nobj=graph(a)\nobj.appendnode(0,1)\nobj.appendnode(0,4)\nobj.appendnode(1,2)\nobj.appendnode(1,3)\nobj.appendnode(1,4)\nobj.appendnode(2,3)\nobj.appendnode(3,4)\nobj.printgraph()\n\n" }, { "alpha_fraction": 0.42553192377090454, "alphanum_fraction": 0.5531914830207825, "avg_line_length": 17.799999237060547, "blob_id": "4e5726be2335bb1b93b7c4a7da734935c80f6ca9", "content_id": "7dcef5be57b84f254a34f6b1afc6fc6292d59295", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 94, "license_type": "no_license", "max_line_length": 29, "num_lines": 5, "path": "/Problems/addition of two matrices using numpy.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "import numpy as np\na=np.array([[0,1,2],[3,5,7]])\nb=np.array([[8,9,7],[5,6,9]])\nc=a+b\nprint(c)\n" }, { "alpha_fraction": 0.7113401889801025, "alphanum_fraction": 0.7113401889801025, "avg_line_length": 23.25, "blob_id": "b81403b7d3699979b82f16db9949973435c26832", "content_id": "1d4d6d80ad153b787d6903b3bef79c75465376b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 97, "license_type": "no_license", "max_line_length": 35, "num_lines": 4, "path": "/Problems/finding the permutations of a string.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "import itertools\na=input(\"Enter the String : \")\nfor i in itertools.permutations(a):\n print(i)\n" }, { "alpha_fraction": 0.38793104887008667, "alphanum_fraction": 0.5258620977401733, "avg_line_length": 22.399999618530273, "blob_id": "d899530d666c3a412224623236e5b712040c239b", "content_id": "0002f96e0f6681104cacd104a6f31ca8f295c96a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 116, "license_type": "no_license", "max_line_length": 51, "num_lines": 5, "path": "/Problems/remove tuples of length k.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "tuples=[(1,9),(7,2),(4,),(28,29),(3,27),(2,),(87,)]\nk=1\na=filter(lambda x:len(x)!=k,tuples)\nfor i in a:\n print(i)" }, { "alpha_fraction": 0.41796875, "alphanum_fraction": 0.435546875, "avg_line_length": 21.217391967773438, "blob_id": "bd03dd79504cfe4f0bb88ed66e9a055e0c9f468c", "content_id": "5deb44f6feac60b0e7d735ab38e46fb9172d70ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 512, "license_type": "no_license", "max_line_length": 45, "num_lines": 23, "path": "/Problems/stringoperations.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def stringop(n):\n a = n\n arr = []\n string = \"\"\n intval = 0\n length = 0\n arr = a.split()\n for i in arr:\n for j in i:\n length = len(i)\n intval = ord(j) + length\n if intval > 122:\n string = string + \"z\"\n elif intval > 90 and intval < 96:\n string = string + \"Z\"\n else:\n string = string + chr(intval)\n string = string + \" \"\n print(string)\n\n\nstring = \"go iNDIa\"\nstringop(string)\n\n" }, { "alpha_fraction": 0.6437007784843445, "alphanum_fraction": 0.6456692814826965, "avg_line_length": 24.299999237060547, "blob_id": "6aa73a4d5d3987a2b36149dd81d9b7da5f959174", "content_id": "3449c7ece33f894b3fb048f3116836e95e9810da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 508, "license_type": "no_license", "max_line_length": 42, "num_lines": 20, "path": "/Data Structures/Stack/reverse a string using stack.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def createstack():\n stack=[]\n return stack\ndef pushelement(stack,element):\n return stack.append(element)\ndef deleteelement(stack):\n if len(stack)==0:\n return \"\"\n else:\n return stack.pop()\ndef reverseelement():\n stack=createstack()\n string=\"\"\n userInput=input(\"Enter a String:\")\n for i in range(len(userInput)):\n pushelement(stack,userInput[i])\n for i in range(len(userInput)):\n string=string+deleteelement(stack)\n print(string)\nreverseelement()\n\n\n" }, { "alpha_fraction": 0.4285714328289032, "alphanum_fraction": 0.4642857015132904, "avg_line_length": 8.333333015441895, "blob_id": "4ef98f3cc8d0c3b70cd68dffc95bafb750f658ca", "content_id": "36f2cdf5cd73391c7a79b12e46931b71c68ff941", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 84, "license_type": "no_license", "max_line_length": 20, "num_lines": 9, "path": "/Problems/sum of array.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "\na = []\nfor i in range(1,5):\n a.append(i)\n\ns = 0\nfor i in a:\n s += i\n\nprint(s)" }, { "alpha_fraction": 0.5621547102928162, "alphanum_fraction": 0.5828729271888733, "avg_line_length": 25.851852416992188, "blob_id": "529ea7e6c8dd74953f75d2373f398bec182b4610", "content_id": "4ac7383d08092d060b56e435e4baed6b9b1040dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 724, "license_type": "no_license", "max_line_length": 43, "num_lines": 27, "path": "/Data Structures/Graph/breadth first traversal.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "from collections import defaultdict\nclass Graph:\n def __init__(self):\n self.graph=defaultdict(list)\n def insertgraph(self,key,value):\n self.graph[key].append(value)\n def breadthfirst(self,key):\n queue=[]\n queue.append(key)\n visible=[False]*(max(self.graph)+1)\n visible[key] =True\n while queue:\n value=queue.pop(0)\n print(value,end=\" \")\n for i in self.graph[value]:\n if visible[i]==False:\n queue.append(i)\n visible[i]=True\n\nobj=Graph()\nobj.insertgraph(0,1)\nobj.insertgraph(0,2)\nobj.insertgraph(1,2)\nobj.insertgraph(2,0)\nobj.insertgraph(2,3)\nobj.insertgraph(3,3)\nobj.breadthfirst(2)" }, { "alpha_fraction": 0.7172236442565918, "alphanum_fraction": 0.7172236442565918, "avg_line_length": 25, "blob_id": "16a4196807ac6557df895fb186891b7f57d3b310", "content_id": "af263f56c1c36c16da79c03c78b79c324f48270f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 43, "num_lines": 15, "path": "/Problems/byte and string object --- encode and decode.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "string_a=\"Hello Palaniappan\"\nstring_b=b\"Hello Palaniappan\"\nencodedresult=string_a.encode('ASCII')\nif encodedresult==string_b:\n print(\"Encoded Successfully\")\n print(\"Encoded value is\",encodedresult)\nelse:\n print(\"Not Encoded\")\n\ndecodedresult=string_b.decode(\"ASCII\")\nif string_a==decodedresult:\n print(\"Decoded Successfully\")\n print(\"Decoded Value is\",decodedresult)\nelse:\n print(\"Not Decoded\")" }, { "alpha_fraction": 0.6122449040412903, "alphanum_fraction": 0.6275510191917419, "avg_line_length": 18.649999618530273, "blob_id": "15cdbd2bc699e65466360280717447001517ea2a", "content_id": "52025ff4aeda59ec04d076ca2eee37af4ab134da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 392, "license_type": "no_license", "max_line_length": 28, "num_lines": 20, "path": "/Data Structures/Binary Search Tree/inorder_traversal.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class node:\n def __init__(self,data):\n self.data=data\n self.left=None\n self.right=None\nary=[]\ndef inorder(root):\n if root.left:\n inorder(root.left)\n ary.append(root.data)\n if root.right:\n inorder(root.right)\n return ary\nroot=node(9)\nroot.left=node(7)\nroot.right=node(15)\nroot.left.left=node(5)\nroot.left.right=node(8)\nb=inorder(root)\nprint(b)" }, { "alpha_fraction": 0.5845481157302856, "alphanum_fraction": 0.5954810380935669, "avg_line_length": 23.464284896850586, "blob_id": "d9c5519ef1664703532118db386f0f9974b031fb", "content_id": "d80e37f8453c925bbb0ff920d74dbacb6ef5cab7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1372, "license_type": "no_license", "max_line_length": 48, "num_lines": 56, "path": "/Data Structures/Singly Linked List/final node of a linked list to loop.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertlast(self,data):\n if self.head==None:\n newnode=Linkedlist(data)\n self.head=newnode\n else:\n temp=self.head\n newnode=Linkedlist(data)\n while(temp.next is not None):\n temp=temp.next\n temp.next=newnode\n def printlist(self):\n temp=self.head\n while(temp is not None):\n print(temp.data)\n temp=temp.next\ndef newnode(data):\n node=Linkedlist(data)\n return node\ndef printlist(linkedlist):\n temp=linkedlist\n while(temp is not None):\n print(temp.data)\n temp=temp.next\ndef finalnodeconnection(obj1):\n slow=obj1\n fast=obj1\n slow=slow.next\n fast=fast.next.next\n while(fast and fast.next is not None):\n if(slow==fast):\n break\n slow=slow.next\n fast=fast.next.next\n if(slow!=fast):\n return None\n slow=obj1\n while(slow!=fast):\n slow=slow.next\n fast=fast.next\n return slow\n\n\nobj1=newnode(1)\nobj1.next=newnode(2)\nobj1.next.next=newnode(3)\nobj1.next.next.next=newnode(4)\nobj1.next.next.next.next=obj1.next\na=finalnodeconnection(obj1)\nprint(\"Final Node Connected to the Node\",a.data)\n\n\n" }, { "alpha_fraction": 0.6210191249847412, "alphanum_fraction": 0.6337579488754272, "avg_line_length": 22.296297073364258, "blob_id": "0155102e0e399a896617d134ab84b0775a656f79", "content_id": "fdad9eaa88f3652f2d4723f78c673cc06bfd7a71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 628, "license_type": "no_license", "max_line_length": 48, "num_lines": 27, "path": "/Data Structures/Singly Linked List/length of a linked list using recursion.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertfirst(self,data):\n newnode=Linkedlist(data)\n newnode.next=self.head\n self.head=newnode\n def countlinkedlist(self,temp):\n if(not temp):\n return 0\n return 1+self.countlinkedlist(temp.next)\n def count(self):\n temp = self.head\n return self.countlinkedlist(temp)\n\nobj=Linklist()\nobj.insertfirst(2)\nobj.insertfirst(4)\nobj.insertfirst(6)\nobj.insertfirst(8)\nobj.insertfirst(12)\na=obj.count()\nprint(a)" }, { "alpha_fraction": 0.42281877994537354, "alphanum_fraction": 0.4244966506958008, "avg_line_length": 24.95652198791504, "blob_id": "39aa56eef0d2a040de12ec10aa688f35be6c0fd9", "content_id": "f370213c09639c64732ad762143e4c981a7f0745", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 596, "license_type": "no_license", "max_line_length": 45, "num_lines": 23, "path": "/Data Structures/Stack/balancing paranthesis.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def balancingparanthesis(expr):\n stack=[]\n for each in expr:\n if each in [\"(\",\"{\",\"[\"]:\n stack.append(each)\n else:\n lastcharacter=stack[len(stack)-1]\n if lastcharacter==\"(\":\n if each==\")\":\n stack.pop()\n elif lastcharacter==\"{\":\n if each==\"}\":\n stack.pop()\n elif lastcharacter==\"[\":\n if each==\"]\":\n stack.pop()\n if stack:\n return False\n return True\n\nexpr=\"[{(([[]]))}]\"\na=balancingparanthesis(expr)\nprint(a)" }, { "alpha_fraction": 0.4417670667171478, "alphanum_fraction": 0.4658634662628174, "avg_line_length": 16.785715103149414, "blob_id": "a5c1f7c3f4216a2d03728a9e6c425425ac096816", "content_id": "a0c2a50543f80857135f468623e171d65caed0cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 249, "license_type": "no_license", "max_line_length": 26, "num_lines": 14, "path": "/Problems/mergetwosortedlist.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Solution:\n def solve(self, a, b):\n c=[]\n for i in a:\n c.append(i)\n for i in b:\n c.append(i)\n c.sort()\n return c\na=[8,5,9]\nb=[6,7,2]\nobj=Solution()\nvalues=obj.solve(a,b)\nprint(values)\n" }, { "alpha_fraction": 0.5454545617103577, "alphanum_fraction": 0.6007905006408691, "avg_line_length": 20.08333396911621, "blob_id": "42231584712a168c37c8d91b4ee2997def188739", "content_id": "cfa32463e87fd1c34aaaf342df68707f4e535240", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 253, "license_type": "no_license", "max_line_length": 45, "num_lines": 12, "path": "/Problems/extraction of digit from tuple.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "from itertools import chain\na=[(1,2),(5,8),(9,7),(16,25),(27,22)]\nprint(a)\nvalues=map(lambda b:b,chain.from_iterable(a))\nsets=set()\nprint(values)\nfor i in values:\n for j in str(i):\n print(i,end=\" \")\n sets.add(j)\nprint(\"\\n\")\nprint(sets)\n" }, { "alpha_fraction": 0.5726495981216431, "alphanum_fraction": 0.6025640964508057, "avg_line_length": 12.823529243469238, "blob_id": "7caf1c4387953a00d5e901466aa64395b0cc2a89", "content_id": "4c3b84814972e68375e3a5d007e32c5447095999", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 234, "license_type": "no_license", "max_line_length": 25, "num_lines": 17, "path": "/Problems/odd numbers using recursion.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "arr=[]\nvalue=0\ndef oddnumbers(n):\n global value\n global arr\n if value>n:\n return arr\n if value%2==1:\n arr.append(value)\n value=value+1\n a=oddnumbers(n)\n return a\n\n\n\narray=oddnumbers(100)\nprint(array)" }, { "alpha_fraction": 0.38831615447998047, "alphanum_fraction": 0.4295532703399658, "avg_line_length": 18.46666717529297, "blob_id": "948d5acb023362455b26c7fd343335ca7e3c1917", "content_id": "5a0e4acdfac084594b6f26b273d309f18f71fb4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 291, "license_type": "no_license", "max_line_length": 35, "num_lines": 15, "path": "/Problems/oddnumberofdigits.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Solution:\n def solve(self, nums):\n count = 0\n arr = []\n new = []\n for i in nums:\n\n arr[:] = str(i)\n if (len(arr) % 2 != 0):\n new.append(i)\n return len(new)\n\nobj=Solution()\na=obj.solve([1,100,52,3,4,9])\nprint(a)" }, { "alpha_fraction": 0.36781609058380127, "alphanum_fraction": 0.39080458879470825, "avg_line_length": 20.875, "blob_id": "cc06b45ab8878cf393294885aaa65346e569e4b2", "content_id": "06b7a3ade9f3aa3a0fd1dc93a34bf448a4c11bad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 174, "license_type": "no_license", "max_line_length": 35, "num_lines": 8, "path": "/Problems/mountainstaircase.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "n=int(input())\nfor i in range(1,n+1):\n print(\" \"*(n-i),end=\"\")\n if(i==1):\n print(\"*\"*i,end=\"\")\n else:\n print(\"*\"*(i+(i-1)),end=\"\")\n print(\" \"*(n-i))" }, { "alpha_fraction": 0.541478157043457, "alphanum_fraction": 0.5565611124038696, "avg_line_length": 24.538461685180664, "blob_id": "dab4eefd2006eb25129849f09e312911f2586ff3", "content_id": "7af30eb9b970752bc66d9d56470fd26917b6df64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 663, "license_type": "no_license", "max_line_length": 49, "num_lines": 26, "path": "/Data Structures/Queue/implementation of queue using stack.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Queue:\n def __init__(self):\n self.array=[]\n self.temparray=[]\n def enqueue(self,data):\n while len(self.array)!=0:\n self.temparray.append(self.array[-1])\n self.array.pop()\n self.array.append(data)\n while len(self.temparray)!=0:\n self.array.append(self.temparray[-1])\n self.temparray.pop()\n def dequeue(self):\n if len(self.array)==0:\n print(\"Queue is Empty\")\n return\n element=self.array[-1]\n self.array.pop()\n return element\nobj=Queue()\nobj.enqueue(1)\nobj.enqueue(2)\nobj.enqueue(3)\nobj.enqueue(4)\na=obj.dequeue()\nprint(a)" }, { "alpha_fraction": 0.5225903391838074, "alphanum_fraction": 0.5557228922843933, "avg_line_length": 21.89655113220215, "blob_id": "0ed23c2854ebb4dffccd2f8c6c9ffea6ba5b16ff", "content_id": "42ff15ce751d84abf17bced0e714f73d598bf5ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 664, "license_type": "no_license", "max_line_length": 36, "num_lines": 29, "path": "/Problems/plaintociphertext.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def cipher(text):\n arr=[]\n arr1=[]\n cipher=\"\"\n plain=\"\"\n new=0\n var=0\n alphabet=\"\"\n countval=0\n chars=\"\"\n values=0\n index=0\n arr[:]=text\n var=int(input(\"Enter the Key:\"))\n for i in range(0,len(arr)):\n chars=arr[i].upper()\n values=ord(chars)%65\n new=(values+var)%26\n arr1.append(new)\n cipher=cipher+chr(new+65)\n alphabet=new+65\n #alphabet=ord(chr(alphabet))\n countval=(alphabet%65)\n index=(countval-var)%26\n plain=plain+chr(index+65)\n print(\"Cipher Text is:\"+cipher)\n print(\"Plain Text is:\"+plain)\ntext=input(\"Enter the Plain Text:\")\ncipher(text)\n" }, { "alpha_fraction": 0.5857987999916077, "alphanum_fraction": 0.6745561957359314, "avg_line_length": 13.166666984558105, "blob_id": "8cb233c93663940e73910dd533786a418357594a", "content_id": "b5886c0d068db60f6735395ca5d8fdb2cdfdd3c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 169, "license_type": "no_license", "max_line_length": 55, "num_lines": 12, "path": "/Problems/xor operation using tuple.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "# Method 1\na=(10,4,6,9)\nb=(5,2,3,3)\n\nc=[element1^element2 for element1,element2 in zip(a,b)]\nprint(c)\n\n# Method 2\n\nfrom operator import xor\nd=list(map(xor,a,b))\nprint(d)" }, { "alpha_fraction": 0.40204864740371704, "alphanum_fraction": 0.43661972880363464, "avg_line_length": 15.617021560668945, "blob_id": "b05bf172d993f77401bb5dc27d341932b0738d84", "content_id": "679a4080b0f87309cfdcfe7907d64d4527012c6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 781, "license_type": "no_license", "max_line_length": 45, "num_lines": 47, "path": "/Problems/timeconversion.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "#!/bin/python3\n\nimport os\nimport sys\n\n#\n# Complete the timeConversion function below.\n#\ndef timeConversion(s):\n #\n # Write your code here.\n #\n a=[]\n new=[]\n fmt=\"\"\n time=\"\"\n last=\"\"\n b=[]\n string=\"\"\n s=str(s)\n a=s.split(\":\")\n fmt=a[len(a)-1]\n new[:]=fmt\n time=new[-2]\n if(time==\"P\"):\n if(int(a[0]!=\"12\")):\n a[0]=str(int(a[0])+12)\n elif(time==\"A\"):\n if(int(a[0]==\"12\")):\n a[0]=str(int(a[0])-12)\n if(int(a[0])<10):\n a[0]=str(0)+a[0]\n last=a.pop()\n b[:]=last\n b=b[:-2]\n for i in b:\n string=string+i\n a.append(string)\n return (a[0]+\":\"+a[1]+\":\"+a[2])\n\nif __name__ == '__main__':\n\n s = input()\n\n result = timeConversion(s)\n\n print(result)\n" }, { "alpha_fraction": 0.4511041045188904, "alphanum_fraction": 0.4511041045188904, "avg_line_length": 27.909090042114258, "blob_id": "7876917788d20db8dff4656b718d3c904388ee55", "content_id": "8fa58004acc640b4115fd4e1199f106f3f60e573", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 317, "license_type": "no_license", "max_line_length": 112, "num_lines": 11, "path": "/Problems/remove vowels in a string.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Problem:\n def problems(self,string):\n a=[]\n a[:]=string\n for i in a:\n if i==\"a\" or i==\"e\" or i==\"i\" or i==\"o\" or i==\"u\" or i==\"A\" or i==\"E\" or i==\"I\" or i==\"O\" or i==\"U\":\n a.remove(i)\n print(a)\n return a\nobj=Problem()\nobj.problems(\"hipalaniappan\")" }, { "alpha_fraction": 0.6541353464126587, "alphanum_fraction": 0.6796992421150208, "avg_line_length": 20.483871459960938, "blob_id": "0ba5aac1273227647de23ee6326751573e799f25", "content_id": "af365c3ef6d27db6fe43a7c8076682c397c5d915", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 665, "license_type": "no_license", "max_line_length": 39, "num_lines": 31, "path": "/Data Structures/Stack/sort a stack using recursion.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "def createstack():\n stack=[]\n return stack\ndef pushelement(stack,item):\n stack.append(item)\ndef sortstack(stack,item):\n if len(stack)==0 or item>stack[-1]:\n stack.append(item)\n else:\n temp=stack.pop()\n sortstack(stack,item)\n stack.append(temp)\ndef stacktraverse(stack):\n if len(stack)!=0:\n temp=stack.pop()\n stacktraverse(stack)\n sortstack(stack,temp)\n\n\nstack=createstack()\npushelement(stack,30)\npushelement(stack,50)\npushelement(stack,-15)\npushelement(stack,-2)\npushelement(stack,-26)\npushelement(stack,-8)\npushelement(stack,32)\npushelement(stack,97)\nprint(stack)\nstacktraverse(stack)\nprint(stack)" }, { "alpha_fraction": 0.593800961971283, "alphanum_fraction": 0.6133768558502197, "avg_line_length": 20.89285659790039, "blob_id": "fdffd10ca1a8844b6ce2ea35b25fee0b28a6ef2d", "content_id": "b9810de2c25714858719b2a996d8788c5a7403d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 613, "license_type": "no_license", "max_line_length": 32, "num_lines": 28, "path": "/Data Structures/Singly Linked List/length of a linked list.py", "repo_name": "palaniappanofficial/Competitive-Programming", "src_encoding": "UTF-8", "text": "class Linkedlist:\n def __init__(self,data):\n self.data=data\n self.next=None\nclass Linklist:\n def __init__(self):\n self.head=None\n def insertfirst(self,data):\n newlist=Linkedlist(data)\n newlist.next=self.head\n self.head=newlist\n def count(self):\n temp=self.head\n count=0\n while(temp):\n count=count+1\n temp=temp.next\n return count\nobj=Linklist()\nobj.insertfirst(10)\nobj.insertfirst('a')\nobj.insertfirst('b')\nobj.insertfirst(20)\nobj.insertfirst(30)\nobj.insertfirst(40)\nobj.insertfirst(50)\na=obj.count()\nprint(a)\n" } ]
52
asishm/advent-of-code-2019-and-2016
https://github.com/asishm/advent-of-code-2019-and-2016
e8c506afcac13e7d577efa2ad5c76ce4fe819d43
7d1ecb1fcd62973dd132e00362fde34da087a2f7
cb3a81f58ef87085e0e4a04bf7028c5cac3e9794
refs/heads/master
2020-09-23T16:37:04.064473
2019-12-13T09:55:05
2019-12-13T09:55:05
225,541,538
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4356202185153961, "alphanum_fraction": 0.46884214878082275, "avg_line_length": 31, "blob_id": "b187b671078054887fba3646ab6440b8ef2eb61e", "content_id": "897c312b946ce7adf600ff57b20879d2f7f25309", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5087, "license_type": "no_license", "max_line_length": 158, "num_lines": 159, "path": "/2019/day11.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import math\nimport collections\nimport itertools\nimport string\nimport re\n\ndef compute(inp, phase=0):\n from collections import defaultdict\n i = 0\n relative_base = 0\n memory = defaultdict(int)\n for idx,point in enumerate(inp):\n memory[idx] = point\n while True:\n val = memory[i]\n strval = f\"{val:05}\"\n (a,b,c), opcode = strval[:3], int(strval[3:])\n \n if opcode == 99:\n return\n\n a1, a2, a3 = memory[i+1], memory[i+2], memory[i+3]\n # print(strval, opcode, b, c, a1, a2, a3)\n # print(memory, a1, a2, a3, b, c, opcode)\n val1 = memory[a1] if c == '0' else a1 if c == '1' else memory[a1 + relative_base]\n val2 = memory[a2] if b == '0' else a2 if b == '1' else memory[a2 + relative_base]\n a3 = a3 + relative_base if a == '2' else a3\n # print(f\"STRVAL: {strval}, OPCODE: {opcode}, PARAM1: {a1}, VAL1: {val1}, MODE1: {c} PARAM2: {a2}, VAL2: {val2}, MODE2: {b} PARAM3: {a3}, MODE3: {a}\")\n if opcode == 1:\n memory[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n memory[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n val = yield\n # print(f\"SETTING MEMORY: a1={a1}, mode={c} relative={relative_base} input={val}\")\n if c == '2':\n memory[a1 + relative_base] = val\n else:\n memory[a1] = val\n # if inp_count == 0:\n # memory[a1] = phase\n # inp_count += 1\n # else:\n # memory[a1] = 1\n i += 2\n elif opcode == 4:\n # print(f\"YIELDING VALUE: {val1}\")\n yield val1\n i += 2\n elif opcode == 7:\n memory[a3] = int(val1 < val2)\n i += 4\n elif opcode == 8:\n memory[a3] = int(val1 == val2)\n i += 4\n elif opcode == 6:\n i = val2 if val1 == 0 else i + 3\n elif opcode == 5:\n i = val2 if val1 != 0 else i + 3\n elif opcode == 9:\n relative_base += val1\n # print(f\"CHANGE RELATIVE BASE: {relative_base} {val1}\")\n i += 2\n\ndef parse(inp):\n return [int(k) for k in inp.strip().split(',')]\n\ndef part1(inp):\n dirs = [(-1, 0), (0, 1), (1, 0), (0, -1)]\n\n grid_color = collections.defaultdict(int)\n grid_count = collections.defaultdict(int)\n x, y = 0, 0\n start_dir = 0\n out_count = 0\n machine = compute(inp.copy())\n next(machine)\n while True:\n try:\n # print(f\"SENDING {grid_color[(x,y)]} or 0\")\n out = machine.send(grid_color[(x,y)] or 0)\n # print(f'RECEIVED {out}')\n except StopIteration:\n break\n if out is None:\n continue\n if out_count % 2 == 0:\n grid_color[(x,y)] = out\n grid_count[(x,y)] += 1\n else:\n assert out in (0,1)\n if out == 0:\n start_dir = (start_dir - 1) % len(dirs)\n else:\n start_dir = (start_dir + 1) % len(dirs)\n\n direction = dirs[start_dir]\n x, y = x + direction[0], y + direction[1]\n # print(f\"NEW (x,y) = ({x}, {y})\")\n out_count += 1\n # print(grid_count)\n # print(grid_color)\n # print(len([(k,v) for k,v in grid_count.items() if v is not None and v > 0]))\n return len([(k,v) for k,v in grid_count.items() if v is not None and v > 0])\n\ndef part2(inp):\n dirs = [(-1, 0), (0, 1), (1, 0), (0, -1)]\n\n grid_color = collections.defaultdict(int)\n grid_count = collections.defaultdict(int)\n grid_color[(0,0)] = 1\n x, y = 0, 0\n start_dir = 0\n out_count = 0\n machine = compute(inp.copy())\n next(machine)\n while True:\n try:\n # print(f\"SENDING {grid_color[(x,y)]} or 0\")\n out = machine.send(grid_color[(x,y)] or 0)\n # print(f'RECEIVED {out}')\n except StopIteration:\n break\n if out is None:\n continue\n if out_count % 2 == 0:\n grid_color[(x,y)] = out\n grid_count[(x,y)] += 1\n else:\n assert out in (0,1)\n if out == 0:\n start_dir = (start_dir - 1) % len(dirs)\n else:\n start_dir = (start_dir + 1) % len(dirs)\n\n direction = dirs[start_dir]\n x, y = x + direction[0], y + direction[1]\n # print(f\"NEW (x,y) = ({x}, {y})\")\n out_count += 1\n # print(grid_count)\n # print(grid_color)\n # print(len([(k,v) for k,v in grid_count.items() if v is not None and v > 0]))\n grid_x = [k[0] for k in grid_color]\n grid_y = [k[1] for k in grid_color]\n for x in range(min(grid_x), max(grid_x) + 1):\n for y in range(min(grid_y), max(grid_y) + 1):\n if grid_color[(x,y)] == 1:\n print('\\u2588', end='')\n else:\n print(' ', end='')\n print()\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))" }, { "alpha_fraction": 0.43991565704345703, "alphanum_fraction": 0.4638088643550873, "avg_line_length": 26.365385055541992, "blob_id": "4aef3ff3483b857c9d16bc13c890a509ce098662", "content_id": "e863c67a3a8a9a4ddb71654db4be470ee0fad529", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1423, "license_type": "no_license", "max_line_length": 105, "num_lines": 52, "path": "/2019/day3.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\n\ndef parse(inp):\n return [k.split(',') for k in inp.splitlines() if k.strip()]\n\ndef part1(inp):\n seens = []\n dirs = {'R': (0,1), 'L': (0, -1), 'U':(-1,0), 'D':(1,0)}\n intersects = []\n for wire in inp:\n x = y = 0\n seen = set()\n for ins in wire:\n direction, dist = ins[0], int(ins[1:])\n dx, dy = dirs[direction]\n for _ in range(dist):\n x += dx\n y += dy\n seen.add((x,y))\n seens.append(seen)\n intersects = seens[0] & seens[1]\n\n return min(abs(x)+abs(y) for (x,y) in intersects)\n\ndef part2(inp):\n seens = []\n dirs = {'R': (0,1), 'L': (0,-1), 'U':(-1,0), 'D':(1,0)}\n for wire in inp:\n x = y = 0\n seen = {}\n steps = 0\n for ins in wire:\n direction, dist = ins[0], int(ins[1:])\n dx, dy = dirs[direction]\n for _ in range(dist):\n steps += 1\n x += dx\n y += dy\n seen[(x,y)] = seen.get((x,y), steps)\n seens.append(seen)\n intersects = seens[0].keys() & seens[1].keys()\n\n return min(abs(x)+abs(y) for (x,y) in intersects), min(seens[0][k] + seens[1][k] for k in intersects)\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))\n" }, { "alpha_fraction": 0.4184716045856476, "alphanum_fraction": 0.46301594376564026, "avg_line_length": 29.97468376159668, "blob_id": "fce3e1211e6c724b27c498fd59f84701d2764994", "content_id": "1a1ca84ec5e57b5136c8c10ced3c81fc80393f51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2447, "license_type": "no_license", "max_line_length": 158, "num_lines": 79, "path": "/2019/day9.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def compute(inp, phase=0):\n from collections import defaultdict\n i = 0\n relative_base = 0\n memory = defaultdict(int)\n for idx,point in enumerate(inp):\n memory[idx] = point\n while True:\n val = memory[i]\n strval = f\"{val:05}\"\n (a,b,c), opcode = strval[:3], int(strval[3:])\n \n if opcode == 99:\n return\n\n a1, a2, a3 = memory[i+1], memory[i+2], memory[i+3]\n # print(strval, opcode, b, c, a1, a2, a3)\n # print(memory, a1, a2, a3, b, c, opcode)\n val1 = memory[a1] if c == '0' else a1 if c == '1' else memory[a1 + relative_base]\n val2 = memory[a2] if b == '0' else a2 if b == '1' else memory[a2 + relative_base]\n a3 = a3 + relative_base if a == '2' else a3\n # print(f\"STRVAL: {strval}, OPCODE: {opcode}, PARAM1: {a1}, VAL1: {val1}, MODE1: {c} PARAM2: {a2}, VAL2: {val2}, MODE2: {b} PARAM3: {a3}, MODE3: {a}\")\n if opcode == 1:\n memory[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n memory[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n # print(f\"SETTING MEMORY: {a1}\")\n if c == '2':\n memory[a1 + relative_base] = yield\n else:\n memory[a1] = yield\n # if inp_count == 0:\n # memory[a1] = phase\n # inp_count += 1\n # else:\n # memory[a1] = 1\n i += 2\n elif opcode == 4:\n # print(strval, opcode, b, c, a1, a2, a3, val1, val2)\n yield val1\n i += 2\n elif opcode == 7:\n memory[a3] = int(val1 < val2)\n i += 4\n elif opcode == 8:\n memory[a3] = int(val1 == val2)\n i += 4\n elif opcode == 6:\n i = val2 if val1 == 0 else i + 3\n elif opcode == 5:\n i = val2 if val1 != 0 else i + 3\n elif opcode == 9:\n relative_base += val1\n # print(f\"CHANGE RELATIVE BASE: {relative_base} {val1}\")\n i += 2\n\ndef parse(inp):\n return [int(k) for k in inp.strip().split(',')]\n\ndef part1(inp):\n # print(len(inp))\n a = compute(inp)\n next(a)\n return a.send(1)\n\ndef part2(inp):\n # print(len(inp))\n a = compute(inp)\n next(a)\n return a.send(2)\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))\n" }, { "alpha_fraction": 0.4212290644645691, "alphanum_fraction": 0.46145251393318176, "avg_line_length": 26.15151596069336, "blob_id": "aaa7b84662d85d6e6cb1aaf898878e5fcdec9436", "content_id": "67e53baff2ab9912fd9ca23dcc54b8a9d9e99e90", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 895, "license_type": "no_license", "max_line_length": 64, "num_lines": 33, "path": "/2016/day1.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def parse(inp):\n return [(k[0], int(k[1:])) for k in inp.strip().split(', ')]\n\ndef part1(ins):\n dirs = [(1,0), (0,1), (-1,0), (0,-1)]\n x,y,direction = 0,0,0\n for lr, step in ins:\n direction += (1 if lr == 'R' else -1)\n direction %= 4\n dx, dy = dirs[direction]\n x, y = x + dx * step, y + dy * step\n\n return abs(x) + abs(y)\n\ndef part2(ins):\n locations = set((0,0))\n dirs = [(1,0), (0,1), (-1,0), (0,-1)]\n x,y,direction = 0,0,0\n for lr, step in ins:\n direction += (1 if lr == 'R' else -1)\n direction %= 4\n dx, dy = dirs[direction]\n for _ in range(step):\n x, y = x + dx, y + dy\n if (x,y) in locations:\n return abs(x) + abs(y)\n locations.add((x,y))\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.4714285731315613, "alphanum_fraction": 0.48452380299568176, "avg_line_length": 21.13157844543457, "blob_id": "ee813221bf3e808937a66cfc4161b98090977405", "content_id": "c751d27fbe79acf08818e653ebb31876cb3dc6ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 840, "license_type": "no_license", "max_line_length": 47, "num_lines": 38, "path": "/2016/day8.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import numpy as np\n\ndef parse(inp):\n out = []\n import re\n pat = re.compile(r'\\d+')\n \n for line in inp.splitlines():\n x, y = map(int, pat.findall(line))\n if line.startswith('rect'):\n out.append(('rect', x, y))\n elif line.startswith('rotate column'):\n out.append(('column', x, y))\n elif line.startswith('rotate row'):\n out.append(('row', x, y))\n return out\n\n\ndef part1(inp):\n GRID = np.array([[0]*50 for _ in range(6)])\n\n for ins, a, b in inp:\n if ins == 'rect':\n GRID[:b, :a] = 1\n elif ins == 'column':\n GRID[:, a] = 1\n elif ins == 'row':\n GRID[a, :] = 1\n\n\ndef part2(inp):\n pass\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.3958333432674408, "alphanum_fraction": 0.4440789520740509, "avg_line_length": 19.288888931274414, "blob_id": "ce4e4c3692d9bb504df0c73e252bdfa4e06de52e", "content_id": "b7eeeb96bb89e18458704b03a6e58a42bd9bfb94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 912, "license_type": "no_license", "max_line_length": 57, "num_lines": 45, "path": "/2019/day2.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\n\ndef parse(inp):\n return list(map(int, inp.split(',')))\n\ndef part1(inp, noun=12, verb=2):\n \n inp[1] = noun\n inp[2] = verb\n i = 0\n while True:\n c = inp[i]\n # print(i, c, inp)\n if c == 99:\n # print(inp)\n return inp[0]\n elif c == 1:\n inp[inp[i+3]] = inp[inp[i+1]] + inp[inp[i+2]]\n i += 4\n elif c == 2:\n inp[inp[i+3]] = inp[inp[i+1]] * inp[inp[i+2]]\n i += 4\n else:\n i += 1\n \n\ndef part2(inp):\n\n for i in range(100):\n for j in range(100):\n inp2 = inp.copy()\n\n val = part1(inp2, i, j)\n if val == 19690720:\n return 100 * i + j\n\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))" }, { "alpha_fraction": 0.3427419364452362, "alphanum_fraction": 0.3941532373428345, "avg_line_length": 23.49382781982422, "blob_id": "73dadd819ad02004bba8598af5dd2db8721ebbe1", "content_id": "8cf79fe41e98fbc011ddd8dfe1e7f39f27c2f6ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1984, "license_type": "no_license", "max_line_length": 51, "num_lines": 81, "path": "/2019/day5.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def parse(inp):\n return list(map(int, inp.split(',')))\n\ndef part1(inp, init=1):\n \n i = 0\n while i < len(inp):\n val = inp[i]\n strval = f\"{val:04}\"\n (b,c), opcode = strval[:2], int(strval[2:])\n \n if opcode == 99:\n return\n\n a1, a2, a3 = inp[i+1:i+4]\n val1 = inp[a1] if c == '0' else a1\n try:\n val2 = inp[a2] if b == '0' else a2\n except IndexError:\n assert opcode in (3,4)\n\n if opcode == 1:\n inp[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n inp[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n inp[a1] = init\n i += 2\n elif opcode == 4:\n print('OUTPUT', val1)\n i += 2\n\ndef part2(inp, init=5):\n \n i = 0\n while i < len(inp):\n val = inp[i]\n strval = f\"{val:04}\"\n (b,c), opcode = strval[:2], int(strval[2:])\n \n if opcode == 99:\n return\n\n a1, a2, a3 = inp[i+1:i+4]\n val1 = inp[a1] if c == '0' else a1\n try:\n val2 = inp[a2] if b == '0' else a2\n except IndexError:\n assert opcode in (3,4)\n\n if opcode == 1:\n inp[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n inp[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n inp[a1] = init\n i += 2\n elif opcode == 4:\n print('OUTPUT', inp[a1])\n i += 2\n elif opcode == 7:\n inp[a3] = int(val1 < val2)\n i += 4\n elif opcode == 8:\n inp[a3] = int(val1 == val2)\n i += 4\n elif opcode == 6:\n i = val2 if val1 == 0 else i + 3\n elif opcode == 5:\n i = val2 if val1 != 0 else i + 3\n \nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print('=====')\n print(part2(inp.copy()))\n" }, { "alpha_fraction": 0.611599326133728, "alphanum_fraction": 0.6203866600990295, "avg_line_length": 23.7391300201416, "blob_id": "82dfcc658f21706aa77a609496421765e91a59b3", "content_id": "ef43235fbd79cedb0b2a9415478b363963167f0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 569, "license_type": "no_license", "max_line_length": 68, "num_lines": 23, "path": "/2019/day6.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\nimport networkx as nx\n\ndef parse(inp):\n return list(k.split(')') for k in inp.splitlines() if k.strip())\n\ndef part1(inp, *args, **kwargs):\n G = nx.from_edgelist(inp)\n return sum(nx.shortest_path_length(G, 'COM').values())\n\ndef part2(inp, *args, **kwargs):\n G = nx.from_edgelist(inp)\n return nx.shortest_path_length(G, 'SAN', 'YOU') - 2\n \nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print('=====')\n print(part2(inp.copy()))\n" }, { "alpha_fraction": 0.4159189462661743, "alphanum_fraction": 0.4442836344242096, "avg_line_length": 27.79166603088379, "blob_id": "00d6895797a470bb735419e62c943685023477ae", "content_id": "3a9558b8edcf4130c4ec5c45c2b9e4f7bf9cb209", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3455, "license_type": "no_license", "max_line_length": 82, "num_lines": 120, "path": "/2019/day7.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\nimport networkx as nx\n\ndef parse(inp):\n return [int(k) for k in inp.strip().split(',')]\n\ndef part1(inp, *args, **kwargs):\n \n max_thrust = float('-inf')\n # out_phase = None\n for phase_list in itertools.permutations([0,1,2,3,4], 5):\n init = 0\n for amp_idx in range(5):\n amp = compute(inp.copy(), phase_list[amp_idx])\n next(amp)\n while True:\n try:\n val = amp.send(init)\n except StopIteration:\n break\n if val is not None:\n init = val\n break\n if init >= max_thrust:\n max_thrust = max(max_thrust, init)\n # out_phase = phase_list\n\n return max_thrust\n\ndef part2(inp, *args, **kwargs):\n max_thrust = float('-inf')\n # out_phase = None\n for phase_list in itertools.permutations([5,6,7,8,9],5):\n amps = [compute(inp.copy(), phase) for phase in phase_list]\n [next(amp) for amp in amps]\n init = [0]\n # steps = []\n for amp_idx in itertools.cycle(range(5)):\n # steps.append(amp_idx)\n # amp, cp = amps[amp_idx]\n amp = amps[amp_idx]\n # print(instructions)\n val = init[-1]\n # print(f\"PHASE: {phase} AMP: {amp_idx} == SENDING {val}\")\n try:\n val2 = amp.send(val)\n except StopIteration:\n if amp_idx == 4:\n break\n else:\n continue\n # print(f\"PHASE: {phase} AMP: {amp_idx} == STOPITERATION\")\n # print(f\"PHASE: {phase} AMP: {amp_idx} == RECEIVED {val2} INP: {cp}\")\n if val2 is not None:\n init.append(val2)\n # print(init, phase_list[amp])\n if init[-1] >= max_thrust:\n max_thrust = max(max_thrust, init[-1])\n # out_phase = phase_list\n # print(max_thrust, phase)\n return max_thrust\n\ndef compute(inp, phase=0):\n \n i = 0\n inp_count = 0\n while i < len(inp):\n val = inp[i]\n strval = f\"{val:04}\"\n (b,c), opcode = strval[:2], int(strval[2:])\n \n if opcode == 99:\n return\n\n a1, a2, *a3 = inp[i+1:i+4]\n if a3:\n a3 = a3[0]\n # print(inp, a1, a2, a3, b, c, opcode)\n val1 = inp[a1] if c == '0' else a1\n try:\n val2 = inp[a2] if b == '0' else a2\n except IndexError:\n assert opcode in (3,4)\n\n if opcode == 1:\n inp[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n inp[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n if inp_count == 0:\n inp[a1] = phase\n inp_count += 1\n else:\n inp[a1] = yield\n i += 2\n elif opcode == 4:\n yield val1 \n i += 2\n elif opcode == 7:\n inp[a3] = int(val1 < val2)\n i += 4\n elif opcode == 8:\n inp[a3] = int(val1 == val2)\n i += 4\n elif opcode == 6:\n i = val2 if val1 == 0 else i + 3\n elif opcode == 5:\n i = val2 if val1 != 0 else i + 3\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print('=====')\n print(part2(inp.copy()))\n" }, { "alpha_fraction": 0.5670840740203857, "alphanum_fraction": 0.6225402355194092, "avg_line_length": 28.36842155456543, "blob_id": "440201f7f1ea6dd5c1c0a05f85651c00ddede225", "content_id": "6da99c9458abdbb2570a973d89a5e40377f9a955", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 559, "license_type": "no_license", "max_line_length": 136, "num_lines": 19, "path": "/2019/day4.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\n\ndef parse(inp):\n return [int(k) for k in inp.strip().split('-')]\n\ndef part1(a,b):\n return sum(list(n) == sorted(n) and len(set(n)) < len(n) for n in map(str, range(max(100000, a), min(999999, b)+1)))\n\ndef part2(a,b):\n return sum(list(n) == sorted(n) and 2 in collections.Counter(n).values() for n in map(str, range(max(100000, a), min(999999, b)+1)))\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(*inp.copy()))\n print(part2(*inp.copy()))\n\n" }, { "alpha_fraction": 0.4793442487716675, "alphanum_fraction": 0.48983606696128845, "avg_line_length": 26.25, "blob_id": "cc6623aa5a82a80efdc0ba83428087ef9849c215", "content_id": "edcee6fafb64af9592483e44d310bdb9768c854f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1525, "license_type": "no_license", "max_line_length": 111, "num_lines": 56, "path": "/2016/day7.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def parse(inp):\n return inp.splitlines()\n\ndef part1(inp):\n import re\n count = 0\n abba_pat = re.compile(r'(?P<a>.)(?P<b>.)(?P=b)(?P=a)')\n inside_pat = re.compile(r'\\[.*?\\]')\n for i in inp:\n insides = inside_pat.findall(i)\n outsides = inside_pat.split(i)\n\n inside_bool = any(abba_pat.search(k) and len(set(abba_pat.search(k).groups())) == 2 for k in insides)\n outside_bool = any(abba_pat.search(k) and len(set(abba_pat.search(k).groups())) == 2 for k in outsides)\n\n if outside_bool and not inside_bool:\n count += 1\n return count\n\ndef part2(inp):\n import re\n count = 0\n inside_pat = re.compile(r'\\[.*?\\]')\n\n for i in inp:\n found = False\n insides = inside_pat.findall(i)\n outsides = inside_pat.split(i)\n potentials = set()\n\n for out in outsides:\n for j in range(len(out)-2):\n a,b,c = out[j:j+3]\n if a == c and b != c:\n potentials.add((a,b))\n\n # print(potentials, outsides, insides)\n for ins in insides:\n ins = ins[1:-1]\n for j in range(len(ins)-2):\n a,b,c = ins[j:j+3]\n if a == c and b != c and (b,a) in potentials:\n count += 1\n found = True\n break\n if found:\n break\n return count\n\n\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.40150249004364014, "alphanum_fraction": 0.43823039531707764, "avg_line_length": 20.781818389892578, "blob_id": "b1ffab09af47cd630748fe300bf3fd259101ad81", "content_id": "9de5055a9136699c96833fd33f30fe226d3af472", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1198, "license_type": "no_license", "max_line_length": 87, "num_lines": 55, "path": "/2016/day2.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "GRID = [\n [1,2,3],\n [4,5,6],\n [7,8,9]\n]\n\nGRID2 = [\n [None, None, 1, None, None],\n [None, 2, 3, 4, None],\n [5, 6, 7, 8, 9],\n [None, 'A', 'B', 'C', None],\n [None, None, 'D', None, None]\n]\n\nDIRS = {\n 'U': (-1, 0),\n 'D': (1, 0),\n 'L': (0, -1),\n 'R': (0, 1)\n}\n\ndef parse(inp):\n # print(inp)\n return [k.strip() for k in inp.splitlines() if k.strip()]\n\ndef part1(ins, GRID):\n x,y = 1,1\n\n digits = []\n for digit in ins:\n for c in digit:\n dx, dy = DIRS[c]\n x, y = min(max(x+dx, 0), 2), min(max(y+dy, 0), 2)\n # print(dx, dy, x, y, GRID[x][y])\n digits.append(str(GRID[x][y]))\n return ''.join(digits)\n\ndef part2(ins, GRID):\n x,y = 2,0\n\n digits = []\n for digit in ins:\n for c in digit:\n dx, dy = DIRS[c]\n if 0 <= x + dx <= 4 and 0 <= y + dy <= 4 and GRID[x+dx][y+dy] is not None:\n x, y = x + dx, y + dy\n # print(dx, dy, x, y, GRID[x][y])\n digits.append(str(GRID[x][y]))\n return ''.join(digits)\n\nif __name__ == \"__main__\":\n import sys\n inp = sys.stdin.read()\n print(part1(parse(inp), GRID))\n print(part2(parse(inp), GRID2))\n" }, { "alpha_fraction": 0.5495023727416992, "alphanum_fraction": 0.5644316673278809, "avg_line_length": 30.825000762939453, "blob_id": "0f84a0785299fec586aaa56320441de87800ecf3", "content_id": "a502ea0a18992fc8aadf04b21128ef0c67c5a9c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3818, "license_type": "no_license", "max_line_length": 113, "num_lines": 120, "path": "/2019/day12.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import math\nimport collections\nimport itertools\nimport string\nimport re\n\nclass Point:\n id_ = 0\n def __init__(self, x, y, z):\n self.x = x\n self.y = y\n self.z = z\n self.vel_x = 0\n self.vel_y = 0\n self.vel_z = 0\n self.modifiers = [0, 0, 0]\n self.id_ = Point.id_\n Point.id_ += 1\n\n def apply_gravity(self, other):\n if self.x != other.x:\n modifier = 1 if self.x < other.x else -1\n self.modifiers[0] += modifier\n other.modifiers[0] -= modifier\n if self.y != other.y:\n modifier = 1 if self.y < other.y else -1\n self.modifiers[1] += modifier\n other.modifiers[1] -= modifier\n if self.z != other.z:\n modifier = 1 if self.z < other.z else -1\n self.modifiers[2] += modifier\n other.modifiers[2] -= modifier\n\n def apply_gravity_modifier(self):\n self.vel_x += self.modifiers[0]\n self.vel_y += self.modifiers[1]\n self.vel_z += self.modifiers[2]\n self.modifiers = [0, 0, 0]\n\n def apply_velocity(self):\n self.x += self.vel_x\n self.y += self.vel_y\n self.z += self.vel_z\n\n def calc_kinetic(self):\n return abs(self.vel_x) + abs(self.vel_y) + abs(self.vel_z)\n \n def calc_potential(self):\n return abs(self.x) + abs(self.y) + abs(self.z)\n\n def calc_energy(self):\n return self.calc_kinetic() * self.calc_potential()\n\n def __str__(self):\n return f\"ID={self.id_} pos=<{self.x}, {self.y}, {self.z}> vel=<{self.vel_x}, {self.vel_y}, {self.vel_z}>\"\n\ndef parse(inp):\n pat = re.compile(r\"-?\\d+\")\n return [Point(*map(int, pat.findall(k))) for k in inp.strip().splitlines()]\n\nclass System:\n def __init__(self, planets):\n self.planets = planets\n\n def simulate_one(self, debug=False):\n for planet_a, planet_b in itertools.combinations(self.planets, 2):\n planet_a.apply_gravity(planet_b)\n for planet in self.planets:\n planet.apply_gravity_modifier()\n planet.apply_velocity()\n if debug:\n for planet in self.planets:\n print(planet)\n\n def calc_energy(self):\n return sum(planet.calc_energy() for planet in self.planets)\n\ndef part1(inp):\n system = System(inp)\n # for planet in system.planets:\n # print(planet)\n # print(\"******8\")\n for _ in range(1000):\n system.simulate_one()\n # print(\"======\")\n return system.calc_energy()\n\ndef part2(inp):\n system = System(inp)\n states = [None] * 3\n counts = [0] * 3\n states[0] = tuple(planet.x for planet in system.planets) + tuple(planet.vel_x for planet in system.planets)\n states[1] = tuple(planet.y for planet in system.planets) + tuple(planet.vel_y for planet in system.planets)\n states[2] = tuple(planet.z for planet in system.planets) + tuple(planet.vel_z for planet in system.planets)\n count = 0\n\n while not all(counts):\n system.simulate_one()\n count += 1\n a = tuple(planet.x for planet in system.planets) + tuple(planet.vel_x for planet in system.planets)\n if a == states[0] and counts[0] == 0:\n counts[0] = count\n b = tuple(planet.y for planet in system.planets) + tuple(planet.vel_y for planet in system.planets)\n if b == states[1] and counts[1] == 0:\n counts[1] = count\n c = tuple(planet.z for planet in system.planets) + tuple(planet.vel_z for planet in system.planets)\n if c == states[2] and counts[2] == 0:\n counts[2] = count\n\n out = 1\n for v in counts:\n g = math.gcd(out, v)\n out *= (v // g)\n return out\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.5245535969734192, "avg_line_length": 16.959999084472656, "blob_id": "46f8fdfce1028e835fc9b04e5ea8d4c86dbe01ac", "content_id": "3ad3ffee807774b2af1d18fa2112580683da83b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "no_license", "max_line_length": 43, "num_lines": 25, "path": "/2019/day1.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\n\ndef parse(inp):\n return list(map(int, inp.splitlines()))\n\ndef part1(inp):\n return sum(k//3 - 2 for k in inp)\n\ndef part2(inp):\n s = 0\n for k in inp:\n while k >= 0:\n k = k // 3 - 2\n if k > 0:\n s += k\n return s\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.6082677245140076, "alphanum_fraction": 0.625984251499176, "avg_line_length": 24.450000762939453, "blob_id": "87e2f6e9dcb88202bd502489727f4755174f26ce", "content_id": "a3779cde278ec46eddccc323530000acdbc24f66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 508, "license_type": "no_license", "max_line_length": 74, "num_lines": 20, "path": "/2016/day6.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def parse(inp):\n return [k.strip() for k in inp.splitlines() if k.strip()]\n\ndef part1(inp):\n from collections import Counter\n inp = list(zip(*inp))\n\n return ''.join(Counter(column).most_common(1)[0][0] for column in inp)\n\ndef part2(inp):\n from collections import Counter\n inp = list(zip(*inp))\n\n return ''.join(Counter(column).most_common()[-1][0] for column in inp)\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.5389681458473206, "alphanum_fraction": 0.55433589220047, "avg_line_length": 31.571428298950195, "blob_id": "e4029150ea5c2e085d9fee939bada3d02fb6e178", "content_id": "5e059c7233607070d5aa0b99c28518d2456b4a28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 911, "license_type": "no_license", "max_line_length": 122, "num_lines": 28, "path": "/2016/day4.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def parse(inp):\n import re\n pat = re.compile(r'(.*?)-(\\d+)\\[(.*?)\\]')\n return [(k[0], int(k[1]), k[2]) for line in inp.splitlines() for k in pat.findall(line)]\n\ndef part1(inp):\n from collections import Counter\n tot = 0\n for letters, secid, checksum in inp:\n c = Counter(letters.replace('-', ''))\n checksum_verify = ''.join(k[0] for k in sorted(c.items(), key=lambda x: (-x[1], x[0]))[:5])\n if checksum == checksum_verify:\n tot += secid\n return tot\n\ndef part2(inp):\n from string import ascii_lowercase\n for letters, secid, _ in inp:\n transf = ''.join([ascii_lowercase[(ord(k) - ord('a') + secid) % 26] if 'a' <= k <= 'z' else ' ' for k in letters])\n if 'north' in transf and 'pole' in transf:\n return secid\n\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.5175879597663879, "alphanum_fraction": 0.5301507711410522, "avg_line_length": 20, "blob_id": "7a63c197f5cf1d674e23d390e16c9fc0cc7e097e", "content_id": "37c11f827790d6b40504de1cdf96dd3cdba9fe78", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "no_license", "max_line_length": 43, "num_lines": 19, "path": "/2019/day1_upping.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "def calc_fuel(x):\n f = 0\n while x > 0:\n x = x // 3 - 2\n if x > 0:\n f += x\n return f\n\ndef parse(inp):\n return list(map(int, inp.splitlines()))\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n\n import multiprocessing as mp\n with mp.Pool(processes=None) as pool:\n results = pool.map(calc_fuel, inp)\n print(sum(results))" }, { "alpha_fraction": 0.43216297030448914, "alphanum_fraction": 0.4711746871471405, "avg_line_length": 28.21518898010254, "blob_id": "c6c3efac359acf5d4c74c539f132e79a33897d80", "content_id": "a16d4489bae29d07fb9470cfb3eda133de95c336", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2307, "license_type": "no_license", "max_line_length": 69, "num_lines": 79, "path": "/2019/day10.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\nimport collections\nimport re\nimport string\nimport networkx as nx\nfrom fractions import gcd\nimport math\n\ndef parse(inp):\n grid = [list(k) for k in inp.strip().splitlines()]\n return grid\n\ndef calc_angle(x1, y1, x2, y2):\n if x1 == x2:\n return (y1 - y2) // abs(y1 - y2), 0\n elif y1 == y2:\n return 0, (x1 - x2) // abs(x1 - x2)\n a, b = y1 - y2, x1 - x2\n common = abs(gcd(a, b))\n return (a // common, b // common)\n\ndef calc_dist(x1, y1, x2, y2):\n return (x1 - x2) ** 2 + (y1 - y2) ** 2\n\ndef part1(inp):\n \n max_seen = float('-inf')\n out_coord = None\n \n for i, row in enumerate(inp):\n for j, val in enumerate(row):\n if val != '#':\n continue\n dirs = set()\n for k, row2 in enumerate(inp):\n for l, val2 in enumerate(row2):\n if val2 != '#' or (i,j) == (k,l):\n continue\n los = calc_angle(i,j,k,l)\n dirs.add(los)\n # print(f\"{i,j} - {len(dirs)}, {dirs}\")\n if len(dirs) > max_seen:\n out_coord = (i, j)\n max_seen = max(max_seen, len(dirs))\n return max_seen, out_coord\n\ndef part2(inp, X=19, Y=23):\n counts = collections.defaultdict(int)\n array = []\n for i, row in enumerate(inp):\n for j, val in enumerate(row):\n if val != \"#\" or (i,j) == (X,Y):\n continue\n deg = math.degrees(math.atan2(X-i,j-Y))\n\n dist = calc_dist(i,j,X,Y)\n if deg < 0: deg += 360\n counts[deg] += 1\n if 0 <= deg <= 90:\n array.append((counts[deg], 0, 90 - deg, dist, i, j))\n elif 270 <= deg < 360:\n array.append((counts[deg], 1, 360 - deg, dist, i, j))\n elif 180 <= deg < 270:\n array.append((counts[deg], 2, 270 - deg, dist, i, j))\n else:\n array.append((counts[deg], 3, 180 - deg, dist, i, j))\n \n array.sort()\n print(array)\n coord = array[199]\n return coord[5] * 100 + coord[4]\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n part1_res, (x,y) = part1(inp.copy())\n print(part1_res, x, y)\n print(\"=========\")\n print(part2(inp.copy(), x, y))" }, { "alpha_fraction": 0.41408973932266235, "alphanum_fraction": 0.4443152844905853, "avg_line_length": 26.754838943481445, "blob_id": "517dca9d7221ed7da10740f44c2489b9804f6d55", "content_id": "d7dbbb1095dc26f0abcd8b35f124afe0525f290b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4301, "license_type": "no_license", "max_line_length": 158, "num_lines": 155, "path": "/2019/day13.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import math\nimport collections\nimport itertools\nimport string\nimport re\n\ndef compute(inp, phase=0):\n yield\n from collections import defaultdict\n i = 0\n relative_base = 0\n memory = defaultdict(int)\n for idx,point in enumerate(inp):\n memory[idx] = point\n while True:\n val = memory[i]\n strval = f\"{val:05}\"\n (a,b,c), opcode = strval[:3], int(strval[3:])\n \n if opcode == 99:\n return\n\n a1, a2, a3 = memory[i+1], memory[i+2], memory[i+3]\n # print(strval, opcode, b, c, a1, a2, a3)\n # print(memory, a1, a2, a3, b, c, opcode)\n val1 = memory[a1] if c == '0' else a1 if c == '1' else memory[a1 + relative_base]\n val2 = memory[a2] if b == '0' else a2 if b == '1' else memory[a2 + relative_base]\n a3 = a3 + relative_base if a == '2' else a3\n # print(f\"STRVAL: {strval}, OPCODE: {opcode}, PARAM1: {a1}, VAL1: {val1}, MODE1: {c} PARAM2: {a2}, VAL2: {val2}, MODE2: {b} PARAM3: {a3}, MODE3: {a}\")\n if opcode == 1:\n memory[a3] = val1 + val2\n i += 4\n elif opcode == 2:\n memory[a3] = val1 * val2\n i += 4\n elif opcode == 3:\n val = yield\n idx = a1 + relative_base if c == '2' else a1\n memory[idx] = val\n # print(f\"SETTING MEMORY: a1={a1}, mode={c} relative={relative_base} input={val}\")\n i += 2\n elif opcode == 4:\n # print(f\"YIELDING VALUE: {val1}\")\n yield val1\n i += 2\n elif opcode == 7:\n memory[a3] = int(val1 < val2)\n i += 4\n elif opcode == 8:\n memory[a3] = int(val1 == val2)\n i += 4\n elif opcode == 6:\n i = val2 if val1 == 0 else i + 3\n elif opcode == 5:\n i = val2 if val1 != 0 else i + 3\n elif opcode == 9:\n relative_base += val1\n # print(f\"CHANGE RELATIVE BASE: {relative_base} {val1}\")\n i += 2\n\ndef parse(inp):\n return [int(k) for k in inp.strip().split(',')]\n\ndef part1(inp):\n computer = compute(inp.copy())\n next(computer)\n i = 0\n x, y, val = None, None, None\n items = collections.defaultdict(int)\n while True:\n try:\n out = computer.send(1)\n except StopIteration:\n break\n assert out is not None\n if i % 3 == 0:\n x = out\n elif i % 3 == 1:\n y = out\n else:\n val = out\n items[(x,y)] = val\n x, y, val = None, None, None\n i += 1\n # print(items)\n return sum(k == 2 for k in items.values())\n\ndef draw_grid(x, y, val):\n xmax = max(x)\n xmin = min(x)\n ymax = max(y)\n ymin = min(y)\n c = collections.defaultdict(int)\n for a,b,d in zip(x,y,val):\n c[(a,b)] = d\n for y in range(ymin, ymax+1):\n for x in range(xmin, xmax+1):\n v = c[(x,y)]\n if v == 0:\n print(' ', end='')\n else:\n print(v, end='')\n print()\n\ndef part2(inp):\n import random\n inp[0] = 2\n computer = compute(inp)\n next(computer)\n ball = None\n paddle = None\n x = y = val = None\n i = 0\n score = None\n while True:\n try:\n # draw_grid(x_arr, y_arr, val_arr)\n if ball and paddle:\n if ball[0] < paddle[0]:\n player_inp = -1\n elif ball[0] > paddle[0]:\n player_inp = 1\n else:\n player_inp = 0\n else:\n player_inp = 0\n out = computer.send(player_inp)\n except StopIteration:\n break\n # print(out, end=',')\n if out is None:\n continue\n if i % 3 == 0:\n x = out\n elif i % 3 == 1:\n y = out\n else:\n if x == -1 and y == 0:\n score = out\n print(score)\n else:\n val = out\n if val == 3:\n paddle = (x, y)\n elif val == 4:\n ball = (x, y)\n i += 1\n print(score)\n\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(part2(inp.copy()))" }, { "alpha_fraction": 0.3476177752017975, "alphanum_fraction": 0.4972052276134491, "avg_line_length": 39.84782791137695, "blob_id": "523e31cd7ca88531c96f2540d467cb3a449a7f7e", "content_id": "0971fc9404086976bcc3216ed8f3fd61c0bfc64b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3757, "license_type": "no_license", "max_line_length": 151, "num_lines": 92, "path": "/2019/day8.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "from collections import Counter\nfrom PIL import Image\nimport subprocess as sp\nimport os\n\ndef parse(inp):\n return [int(k) for k in inp.strip()]\n\ndef part1(inp, *args, **kwargs):\n layers = [Counter(inp[i:i+25*6]) for idx,i in enumerate(range(0, len(inp), 25*6))]\n min_layer = min(layers, key=lambda x: x[0])\n return min_layer[1] * min_layer[2]\n\nletters = {\n 'A' : [[0,1,1,1,1,1],[1,0,0,1,0,0],[1,0,0,1,0,0],[0,1,1,1,1,1],[0,0,0,0,0,0]],\n 'B' : [[1,1,1,1,1,1],[1,0,1,0,0,1],[1,0,1,0,0,1],[0,1,0,1,1,0],[0,0,0,0,0,0]],\n 'C' : [[0,1,1,1,1,0],[1,0,0,0,0,1],[1,0,0,0,0,1],[0,1,0,0,1,0],[0,0,0,0,0,0]],\n 'E' : [[1,1,1,1,1,1],[1,0,1,0,0,1],[1,0,1,0,0,1],[1,0,0,0,0,1],[0,0,0,0,0,0]],\n 'F' : [[1,1,1,1,1,1],[1,0,1,0,0,0],[1,0,1,0,0,0],[1,0,0,0,0,0],[0,0,0,0,0,0]],\n 'G' : [[0,1,1,1,1,0],[1,0,0,0,0,1],[1,0,0,1,0,1],[0,1,0,1,1,1],[0,0,0,0,0,0]],\n 'H' : [[1,1,1,1,1,1],[0,0,1,0,0,0],[0,0,1,0,0,0],[1,1,1,1,1,1],[0,0,0,0,0,0]],\n 'J' : [[0,0,0,0,1,0],[0,0,0,0,0,1],[1,0,0,0,0,1],[1,1,1,1,1,0],[0,0,0,0,0,0]],\n 'K' : [[1,1,1,1,1,1],[0,0,1,0,0,0],[0,1,0,1,1,0],[1,0,0,0,0,1],[0,0,0,0,0,0]],\n 'L' : [[1,1,1,1,1,1],[0,0,0,0,0,1],[0,0,0,0,0,1],[0,0,0,0,0,1],[0,0,0,0,0,0]],\n 'P' : [[1,1,1,1,1,1],[1,0,0,1,0,0],[1,0,0,1,0,0],[0,1,1,0,0,0],[0,0,0,0,0,0]],\n 'R' : [[1,1,1,1,1,1],[1,0,0,1,0,0],[1,0,0,1,1,0],[0,1,1,0,0,1],[0,0,0,0,0,0]],\n 'U' : [[1,1,1,1,1,0],[0,0,0,0,0,1],[0,0,0,0,0,1],[1,1,1,1,1,0],[0,0,0,0,0,0]],\n 'Y' : [[1,1,0,0,0,0],[0,0,1,0,0,0],[0,0,0,1,1,1],[0,0,1,0,0,0],[1,1,0,0,0,0]],\n 'Z' : [[1,0,0,0,1,1],[1,0,0,1,0,1],[1,0,1,0,0,1],[1,1,0,0,0,1],[0,0,0,0,0,0]],\n}\n\ndef part2_oneline(inp, *args, **kwargs):\n ## ugly one-liner\n return '\\n'.join(''.join(next('\\u2588' if val == 1 else ' ' for val in inp[i*25+j:len(inp):25*6] if val !=2) for j in range(25)) for i in range(6))\n\ndef part2_ocr(inp, *args, **kwargs):\n grid = [[inp[i*25+j:len(inp):25*6] for j in range(25)] for i in range(6)]\n out = []\n width = 50\n height = 12\n margin_width = 3\n margins = [(255,255,255)] * (margin_width * 2 + width)\n for _ in range(margin_width):\n out.append(margins)\n for row in grid:\n tmp = [(255,255,255)] * margin_width\n for vals in row:\n val = next((0,0,0) if val == 1 else (255,255,255) for val in vals if val != 2)\n tmp.extend([val] * (width // 25))\n tmp.extend([(255,255,255)] * margin_width)\n out.append(tmp * (height // 6))\n # print(out)\n for _ in range(margin_width):\n out.append(margins)\n # print(len(out), len(out[0]))\n im = Image.new('RGB', (width + 2 * margin_width, height + 2 * margin_width))\n im.putdata([k for row in out for k in row])\n im.convert('1').save(\"day8.png\")\n proc = sp.Popen(\"tesseract day8.png stdout -l eng --oem 0 --psm 6\", stdout=sp.PIPE, stderr=sp.PIPE)\n out, err = proc.communicate()\n\n return out.decode().strip()\n\ndef part2_letter(inp, *args, **kwargs):\n grid = [[inp[i*25+j:len(inp):25*6] for j in range(25)] for i in range(6)]\n out = [[] for _ in range(5)]\n for row in grid:\n tmp = []\n for i, vals in enumerate(row):\n val = next(val for val in vals if val != 2)\n tmp.append(val)\n if (i + 1) % 5 == 0:\n out[i // 5].append(tmp)\n tmp = []\n out_str = []\n for letter in out:\n letter = [list(k) for k in zip(*letter)]\n # print(letter)\n for k,v in letters.items():\n if letter == v:\n out_str.append(k)\n\n return ''.join(out_str)\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp.copy()))\n print(\"=========\")\n print(part2_letter(inp.copy()))\n print(part2_ocr(inp.copy()))\n print(part2_oneline(inp.copy()))" }, { "alpha_fraction": 0.5259938836097717, "alphanum_fraction": 0.5458715558052063, "avg_line_length": 22.39285659790039, "blob_id": "c26c6f8837c592da5d4fcd5e30011e7c61cf3518", "content_id": "e21f2654b57f8c89ab334331385238633c4588f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 654, "license_type": "no_license", "max_line_length": 85, "num_lines": 28, "path": "/2016/day3.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import itertools\n\ndef parse(inp):\n # print(inp)\n return [list(map(int, k.strip().split())) for k in inp.splitlines() if k.strip()]\n\ndef part1(inp):\n inp = [sorted(k) for k in inp]\n return sum(k[0] + k[1] > k[2] for k in inp)\n\ndef part2(inp):\n inp = itertools.chain.from_iterable(zip(*inp))\n count = 0\n while True:\n try:\n lens = sorted([next(inp) for _ in range(3)])\n except StopIteration:\n break\n if lens[0] + lens[1] > lens[2]:\n count += 1\n return count\n\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" }, { "alpha_fraction": 0.4218061566352844, "alphanum_fraction": 0.4548458158969879, "avg_line_length": 22.921052932739258, "blob_id": "848cc503e1d59f62ad1347c65e8684472cad8a6d", "content_id": "4563745e09329305d3fef9aa37c1c161925ef638", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 908, "license_type": "no_license", "max_line_length": 59, "num_lines": 38, "path": "/2016/day5.py", "repo_name": "asishm/advent-of-code-2019-and-2016", "src_encoding": "UTF-8", "text": "import hashlib\n\ndef parse(inp):\n return inp.strip()\n\ndef part1(inp):\n idx = 1\n pwd = []\n for _ in range(8):\n while True:\n s = f\"{inp}{idx}\".encode()\n hashed = hashlib.md5(s).hexdigest()\n if hashed[:5] == '00000':\n pwd.append(hashed[5])\n break\n idx += 1\n idx += 1\n return ''.join(pwd)\n\ndef part2(inp):\n idx = 1\n pwd = [None] * 8\n while not all(pwd):\n s = f\"{inp}{idx}\".encode()\n hashed = hashlib.md5(s).hexdigest()\n if hashed[:5] == '00000':\n pos, char = hashed[5:7]\n if '0' <= pos <= '7' and pwd[int(pos)] is None:\n pwd[int(pos)] = char\n # print(pos, char, pwd)\n idx += 1\n return ''.join(pwd)\n\nif __name__ == \"__main__\":\n import sys\n inp = parse(sys.stdin.read())\n print(part1(inp))\n print(part2(inp))" } ]
22
callisto1337/jansale
https://github.com/callisto1337/jansale
7090d6f94a1444bd309ae8972dd355a5c1603d44
b1b46853c983a8868de29ab0c01cdf6af47ab927
c13ce1aabe2b422196cd22d50d5581421db310b6
refs/heads/master
2021-08-22T17:27:52.402340
2017-11-30T20:38:59
2017-11-30T20:38:59
112,032,218
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6643356680870056, "alphanum_fraction": 0.6818181872367859, "avg_line_length": 26.238094329833984, "blob_id": "465a23504cfda6230d9e3d247e058742e2161b3c", "content_id": "3bff80b01c9bd7c4cd16b255187b778e96fc3ce2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 572, "license_type": "no_license", "max_line_length": 64, "num_lines": 21, "path": "/app/models.py", "repo_name": "callisto1337/jansale", "src_encoding": "UTF-8", "text": "from django.db import models\n\n\nclass Category(models.Model):\n name = models.CharField(max_length=40)\n url = models.CharField(max_length=100, blank=True)\n\n def __str__(self):\n return self.name\n\n\nclass Product(models.Model):\n image = models.ImageField(upload_to='products/')\n name = models.CharField(max_length=40)\n price = models.DecimalField(max_digits=20, decimal_places=2)\n category = models.ForeignKey(Category, blank=True)\n popular = models.BooleanField()\n new = models.BooleanField()\n\n def __str__(self):\n return self.name\n" }, { "alpha_fraction": 0.7118644118309021, "alphanum_fraction": 0.7118644118309021, "avg_line_length": 13.75, "blob_id": "cc357a2328ac84a14677b792dc8e1dbf24e236df", "content_id": "8a70eb301f038fd47a772596fac3e894e7cfe066", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 59, "license_type": "no_license", "max_line_length": 42, "num_lines": 4, "path": "/README.md", "repo_name": "callisto1337/jansale", "src_encoding": "UTF-8", "text": "Jansale\n======\n\nE-commerce platform for Python and Django.\n" }, { "alpha_fraction": 0.7008797526359558, "alphanum_fraction": 0.7008797526359558, "avg_line_length": 30, "blob_id": "ce2014c61da676c1caf8614d9e57e812acc4e0ab", "content_id": "ed92953d7ec8e0029a44070c100b8f4b56ad9cbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 341, "license_type": "no_license", "max_line_length": 65, "num_lines": 11, "path": "/app/urls.py", "repo_name": "callisto1337/jansale", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.conf.urls import url\nfrom django.conf.urls.static import static\nfrom . import views\n\n\nurlpatterns = [\n url(r'^$', views.main),\n url(r'^contacts/$', views.contacts),\n url(r'^category/(?P<url>[A-Za-z]+)/$', views.category)\n] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n" }, { "alpha_fraction": 0.6540948152542114, "alphanum_fraction": 0.662715494632721, "avg_line_length": 27.121212005615234, "blob_id": "17cfbe578108a15905ddb9d0952964e2b7ea510b", "content_id": "af47bea91c871c2a77927f49b42c535e32c3f32c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 928, "license_type": "no_license", "max_line_length": 76, "num_lines": 33, "path": "/app/views.py", "repo_name": "callisto1337/jansale", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, get_object_or_404\nfrom .models import Product, Category\n\n\ndef main(request):\n popular = Product.objects.all().filter(popular=True)[:4]\n new = Product.objects.all().filter(new=True)[:4]\n categories = Category.objects.all()\n\n return render(request, 'app/main.html', {\n 'popular': popular, 'new': new,\n 'categories': categories}\n )\n\n\ndef category(request, url):\n data_category = get_object_or_404(Category, url=url)\n category_items = Product.objects.all().filter(category=data_category.pk)\n categories = Category.objects.all()\n\n return render(request, 'app/category.html', {\n 'data_category': data_category,\n 'category_items': category_items,\n 'categories': categories}\n )\n\n\ndef contacts(request):\n categories = Category.objects.all()\n\n return render(request, 'app/contacts.html', {\n 'categories': categories}\n )\n" } ]
4
leigharobinson/chap11_PythonClassesCont
https://github.com/leigharobinson/chap11_PythonClassesCont
a5df83e7c8846997c48af40d6a91b9cbe8aceb37
78a70558fc726e226375a33a769789b7653ca41e
fa7083ee5a89bb7509b3bcf3929aba8a0f0a1132
refs/heads/master
2022-11-20T02:47:08.083419
2020-07-20T13:43:07
2020-07-20T13:43:07
281,127,712
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5767747759819031, "alphanum_fraction": 0.5910637378692627, "avg_line_length": 18.683034896850586, "blob_id": "0212dc445fe6e44ee621b68a63dd718d16f8670f", "content_id": "150e932b259c3736918981910b5a396126647224", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4409, "license_type": "no_license", "max_line_length": 67, "num_lines": 224, "path": "/animals_2.py", "repo_name": "leigharobinson/chap11_PythonClassesCont", "src_encoding": "UTF-8", "text": "# import the python datetime module to help us create a timestamp\nfrom datetime import date\n\n# 1\n\n\nclass Llama:\n\n def __init__(self, name, species, shift):\n # Establish the properties of each animal\n # with a default value\n self.name = name\n self.species = species\n self.shift = shift\n self.date_added = date.today()\n self.walking = True\n\n# 2\n\n\nclass Donkey:\n\n def __init__(self, name, species, shift):\n self.name = name\n self.species = species\n self.shift = shift\n self.date_added = date.today()\n self.walking = True\n# 3\n\n\nclass Goat:\n\n def __init__(self, name, species, shift):\n self.name = name\n self.species = species\n self.shift = shift\n self.date_added = date.today()\n self.walking = True\n# 4\n\n\nclass Pony:\n\n def __init__(self, name, species, shift):\n self.name = name\n self.species = species\n self.shift = shift\n self.date_added = date.today()\n self.walking = True\n# 5\n\n\nclass Turkey:\n\n def __init__(self, name, species, shift):\n self.name = name\n self.species = species\n self.shift = shift\n self.date_added = date.today()\n self.walking = True\n# 6\n\n\nclass Copperhead:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.slithering = True\n# 7\n\n\nclass Rat_Snake:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.slithering = True\n# 8\n\n\nclass Northern_Water_Snake:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.slithering = True\n# 9\n\n\nclass King_Snake:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.slithering = True\n\n# 10\n\n\nclass Timber_Rattlesnake:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.slithering = True\n# 11\n\n\nclass Mallard:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.swimming = True\n\n# 12\n\n\nclass Goldfish:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.swimming = True\n# 13\n\n\nclass Yellow_Bellied_Slider:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.swimming = True\n# 14\n\n\nclass Brook_Trout:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.swimming = True\n\n# 15\n\n\nclass Bluegill:\n\n def __init__(self, name, species):\n self.name = name\n self.species = species\n self.date_added = date.today()\n self.swimming = True\n\n\n# 1\nmiss_fuzz = Llama(\"Miss Fuzz\", \"domestic llama\", \"morning\")\nprint(\"1.\", miss_fuzz.name)\n# 2\nmr_long_ears = Donkey(\"Mr. Long Ears\", \"domestic donkey\", \"midday\")\nprint(\"2.\", mr_long_ears.name)\n# 3\nbilly = Goat(\"Billy\", \"domestic goat\", \"afternoon\")\n\nprint(\"3.\", billy.name)\n# 4\nteddy = Pony(\"Teady\", \"domestic pony\", \"morning\")\n\nprint(\"4.\", teddy.name)\n# 5\nfrank = Turkey(\"Frank\", \"domestic turkey\", \"midday\")\n\nprint(\"5.\", frank.name)\n# 6\ncopper = Copperhead(\"Copper\", \"Copperhead Snake\")\n\nprint(\"6.\", copper.name)\n# 7\nbigBoi = Rat_Snake(\"Big Boy\", \"Rat Snake\")\n\nprint(\"7.\", bigBoi.name)\n# 8\nhappy = Northern_Water_Snake(\"Happy\", \"Rat Snake\")\n\nprint(\"8.\", happy.name)\n# 9\nthe_king = King_Snake(\"The King\", \"King Snake\")\n\nprint(\"9.\", the_king.name)\n# 10\nole_shakey = Timber_Rattlesnake(\"Shakey Graves\", \"Timber Rattlesnake\")\n\nprint(\"10.\", ole_shakey.name)\n# 11\ndonald = Mallard(\"Donald\", \"Mallard\")\n\nprint(\"11.\", donald.name)\n# 12\ngoldie = Goldfish(\"Goldie\", \"Goldfish\")\n\nprint(\"12.\", goldie.name)\n# 13\nbruce = Yellow_Bellied_Slider(\"Bruce\", \"Yellow Bellied Slider\")\n\nprint(\"13.\", bruce.name)\n# 14\ngrumpy = Brook_Trout(\"Grumpy\", \"Brook Trout\")\n\nprint(\"14.\", grumpy.name)\n# 15\nblue_boi = Bluegill(\"Blue Boy\", \"Bluegill\")\n\nprint(\"15.\", blue_boi.name)\n" } ]
1
adarshjudoka212/pythonprogramme
https://github.com/adarshjudoka212/pythonprogramme
ccfeef4a3ee573b866d4155bf3dfcb03e5520a52
1b3865ddc6060deebd74529ce294b152d5c8f25d
c149bd230f718e193b21f24334058be3687d5e98
refs/heads/main
2023-08-04T01:37:25.588287
2021-09-15T14:38:38
2021-09-15T14:38:38
405,647,708
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.47457626461982727, "alphanum_fraction": 0.49152541160583496, "avg_line_length": 16.5, "blob_id": "1867ca40ecfbe91777c58b9249ad239b9817c873", "content_id": "01f61dd37d771c36e425d22092d66d122558934f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 177, "license_type": "permissive", "max_line_length": 31, "num_lines": 10, "path": "/code3.py", "repo_name": "adarshjudoka212/pythonprogramme", "src_encoding": "UTF-8", "text": "def recurv(n):\n if n<=1:\n return n\n else:\n return n + recurv(n-1)\n \nif num < 0:\n print(\"print +ve number\")\nelse:\n print(\"sum is\",recurv(num))\n\n\n" }, { "alpha_fraction": 0.5045871734619141, "alphanum_fraction": 0.5321100950241089, "avg_line_length": 17.33333396911621, "blob_id": "9785630a8be9c34627b07b0624cc9feb4eb5fa4a", "content_id": "ad459f7c6b76019e6a96e19aedef3e5b763f43c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 109, "license_type": "permissive", "max_line_length": 32, "num_lines": 6, "path": "/code4.py", "repo_name": "adarshjudoka212/pythonprogramme", "src_encoding": "UTF-8", "text": "def recur_sum(n):\n if n <= 1:\n return n\n else:\n return n + recur_sum(n-1)\nprint(recur_sum(0))" }, { "alpha_fraction": 0.4637681245803833, "alphanum_fraction": 0.49275362491607666, "avg_line_length": 12.600000381469727, "blob_id": "76f70f759bb3d64df998a410cb600980b85c7e35", "content_id": "bbbf17a3996e84b02e39f13c5f1c38c29bc3dbf7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 69, "license_type": "permissive", "max_line_length": 19, "num_lines": 5, "path": "/code5.py", "repo_name": "adarshjudoka212/pythonprogramme", "src_encoding": "UTF-8", "text": "def fun():\n a=89\n str=\"adar\"\n return[a,str]; \nprint(fun())\n\n" }, { "alpha_fraction": 0.8571428656578064, "alphanum_fraction": 0.8571428656578064, "avg_line_length": 16.5, "blob_id": "c2c7fc7b629f2f2db34d155460a784dbf2195825", "content_id": "00e35d68c079fc4605c36d7026382bac8dd19248", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 35, "license_type": "permissive", "max_line_length": 17, "num_lines": 2, "path": "/README.md", "repo_name": "adarshjudoka212/pythonprogramme", "src_encoding": "UTF-8", "text": "# pythonprogramme\nbasic programmes\n" }, { "alpha_fraction": 0.4234234094619751, "alphanum_fraction": 0.45945945382118225, "avg_line_length": 15.833333015441895, "blob_id": "b59b2af76162cc342593788b5aefaca72d74fe58", "content_id": "8b172300429da567d5132c6f988401ac3fb62206", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 111, "license_type": "permissive", "max_line_length": 19, "num_lines": 6, "path": "/code2.py", "repo_name": "adarshjudoka212/pythonprogramme", "src_encoding": "UTF-8", "text": "def out(a,b):\n def innn(a,b):\n return(a+b)\n c=innn(a,b)\n return c+5\nprint(out(25,6))\n\n \n" } ]
5
jozefzivcic/hello-world
https://github.com/jozefzivcic/hello-world
fd5d6dafe1df409061c5afd13fc8220218291b9f
c48f2d1eddb7809b61e9a2846299d045dd01b293
7683e38e02ea3e1bd7f4fda9c4c4a37f06311f94
refs/heads/master
2021-01-17T22:26:47.673106
2018-02-15T13:18:22
2018-02-15T13:18:22
32,940,927
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 21.5, "blob_id": "377ff8b502979fbb13f2b0e8bdcb49aa03f10457", "content_id": "cfaff2c4d557158f0c9243a390a47e851a5e8545", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 180, "license_type": "no_license", "max_line_length": 41, "num_lines": 8, "path": "/README.md", "repo_name": "jozefzivcic/hello-world", "src_encoding": "UTF-8", "text": "# hello-world\nHello-world\nThis is testing and creating Hello world!\nthis was created in test branch\nthis was created in master\nthis was added in test\nthis was added\nthis was added\n" }, { "alpha_fraction": 0.5986621975898743, "alphanum_fraction": 0.6053511500358582, "avg_line_length": 17.6875, "blob_id": "e0daf59d8d405c3664af023b2c67a0c01cde09cb", "content_id": "81e285273b5900efb5805206ef2cdc6fe86bfde4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 299, "license_type": "no_license", "max_line_length": 69, "num_lines": 16, "path": "/ret_val.py", "repo_name": "jozefzivcic/hello-world", "src_encoding": "UTF-8", "text": "import argparse\nimport sys\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\"Parsing arguments\")\n parser.add_argument('-i', action='store_true')\n args = parser.parse_args()\n if args.i:\n return -1\n return 0\n\n\nif __name__ == '__main__':\n x = main()\n sys.exit(x)\n" } ]
2
ananyamukh6/ngraph-bridge
https://github.com/ananyamukh6/ngraph-bridge
9904b8e938171280921f25613188f0954bdfd59b
2a8c066912ffff0c5afdc1c27c94ba0f288748d7
165c2afaec16a8c122bf34c05dbb7ce54ab97852
refs/heads/master
2020-05-21T21:41:35.491956
2019-05-10T23:39:52
2019-05-10T23:39:52
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5763765573501587, "alphanum_fraction": 0.5905861258506775, "avg_line_length": 34.1875, "blob_id": "59eedfb887350a9a89df78d8637c74e795f3d78c", "content_id": "37a430c3e9ffa8b47114557236bd83f917fc52af", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2252, "license_type": "permissive", "max_line_length": 80, "num_lines": 64, "path": "/examples/axpy.py", "repo_name": "ananyamukh6/ngraph-bridge", "src_encoding": "UTF-8", "text": "# ==============================================================================\n# Copyright 2018-2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"nGraph TensorFlow axpy\n\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport getpass\nimport ctypes\n\nimport numpy as np\nimport tensorflow as tf\nimport ngraph_bridge\n\nprint(\"TensorFlow version: \", tf.GIT_VERSION, tf.VERSION)\n\n# Setup TensorBoard\ngraph_location = \"/tmp/\" + getpass.getuser() + \"/tensorboard-logs/test\"\nprint('Saving graph to: %s' % graph_location)\ntrain_writer = tf.summary.FileWriter(graph_location)\n\n# Define the data\na = tf.constant(np.full((2, 3), 5.0, dtype=np.float32), name='alpha')\nx = tf.placeholder(tf.float32, [None, 3], name='x')\ny = tf.placeholder(tf.float32, shape=(2, 3), name='y')\n\nc = a * x\naxpy = c + y\n\n# Configure the session\nconfig = tf.ConfigProto(\n allow_soft_placement=True,\n log_device_placement=False,\n inter_op_parallelism_threads=1)\n\n# Create session and run\nwith tf.Session(config=config) as sess:\n print(\"Python: Running with Session\")\n for i in range(10):\n (result_axpy, result_c) = sess.run((axpy, c),\n feed_dict={\n x: np.ones((2, 3)),\n y: np.ones((2, 3)),\n })\n print(\"[\", i, \"] \", i)\n print(\"Result: \\n\", result_axpy, \" C: \\n\", result_c)\n\ntrain_writer.add_graph(tf.get_default_graph())\ntf.train.write_graph(tf.get_default_graph(), '.', 'axpy.pbtxt', as_text=True)\n" } ]
1
mikeharris100/django-classfield
https://github.com/mikeharris100/django-classfield
b52b5f40b9c9543b30649428c12864226d0e98b2
b6f4c87696af128fef4d50c5deb7cc4d863aad81
11e6492c97aebbdd6eea1a48d333b824a708e09e
refs/heads/master
2021-01-21T10:13:17.651973
2014-12-01T09:48:33
2014-12-01T09:48:33
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.577102780342102, "alphanum_fraction": 0.5825545191764832, "avg_line_length": 28.813953399658203, "blob_id": "6a635fafa235394256193f0c490f0176f58dd6c1", "content_id": "0611cc694a3460b562a5c4ef5f6ebf8a56aeac5d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1284, "license_type": "permissive", "max_line_length": 88, "num_lines": 43, "path": "/classfield/fields.py", "repo_name": "mikeharris100/django-classfield", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.db.models import SubfieldBase\nfrom django.utils.translation import ugettext_lazy as _\n\n\nclass ClassField(models.Field):\n\n description = _('Class Field')\n\n __metaclass__ = SubfieldBase\n\n def __init__(self, *args, **kwargs):\n if 'choices' not in kwargs:\n kwargs['editable'] = False\n\n kwargs.setdefault('max_length', 256)\n super(ClassField, self).__init__(*args, **kwargs)\n\n\n def get_prep_value(self, value):\n return \"%s.%s\" % (value.__module__, value.__name__)\n\n\n def to_python(self, value):\n if not isinstance(value, basestring):\n return value\n\n if value is None or value == '':\n return None\n\n parts = value.split( \".\" )\n imported = __import__('.'.join(parts[:-1]), globals(), locals(), [parts[-1]], 0)\n return getattr(imported, parts[-1])\n\n\n def get_db_prep_lookup(self, lookup_type, value):\n # We only handle 'exact' and 'in'. All others are errors.\n if lookup_type == 'exact':\n return [self.get_db_prep_save(value)]\n elif lookup_type == 'in':\n return [self.get_db_prep_save(v) for v in value]\n else:\n raise TypeError('Lookup type %r not supported.' % lookup_type)\n\n\n" }, { "alpha_fraction": 0.6557971239089966, "alphanum_fraction": 0.6739130616188049, "avg_line_length": 22, "blob_id": "81da53990dfb9bc4c9f9220429fb99959df736f4", "content_id": "c9c1479426828e92759cbddeff644ede0f88d622", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 276, "license_type": "permissive", "max_line_length": 61, "num_lines": 12, "path": "/setup.py", "repo_name": "mikeharris100/django-classfield", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom distutils.core import setup\n\nsetup(\n name='django-classfield',\n version='1.0',\n description='Adds a class field to django',\n author='Mike Harris',\n url='https://github.com/mikeharris100/django-classfield',\n py_modules=['classfield']\n)\n" } ]
2
bonnetn/backend_adh
https://github.com/bonnetn/backend_adh
4752216084089f599ac850732b8bd52712f65aae
f41abd37865165955eddadbf9d104c0bfc404698
c01e1f6d0a31b7adc0f7f81ba65b1b9879b4c95b
refs/heads/master
2020-03-18T18:12:17.818928
2018-05-27T19:59:19
2018-05-27T19:59:19
135,077,000
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6393705010414124, "alphanum_fraction": 0.6420378684997559, "avg_line_length": 26.977611541748047, "blob_id": "0812ff805af6ea3685e83d022798c55d5c1c0237", "content_id": "1ea290a5fc1b2f2fd41312283b958b96f9eca57a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3749, "license_type": "no_license", "max_line_length": 77, "num_lines": 134, "path": "/adh/controller/device_utils.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from adh.model.models import Adherent, Portable, Ordinateur, Modification\nfrom sqlalchemy.sql.expression import literal\nfrom sqlalchemy.types import String\n\n\ndef is_wired(macAddress, s):\n \"\"\" Return true if the mac address corresponds to a wired device \"\"\"\n queryWired = s.query(Ordinateur)\n queryWired = queryWired.filter(Ordinateur.mac == macAddress)\n\n return s.query(queryWired.exists()).scalar()\n\n\ndef is_wireless(macAddress, s):\n \"\"\" Return true if the mac address corresponds to a wireless device \"\"\"\n queryWireless = s.query(Portable)\n queryWireless = queryWireless.filter(Portable.mac == macAddress)\n\n return s.query(queryWireless.exists()).scalar()\n\n\ndef create_wireless_device(admin, body, s):\n \"\"\" Create a wireless device in the database \"\"\"\n dev = Portable(\n mac=body['mac'],\n adherent=Adherent.find(s, body['username']),\n )\n\n s.add(dev)\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef create_wired_device(admin, body, s):\n \"\"\" Create a wired device in the database \"\"\"\n dev = Ordinateur(\n mac=body['mac'],\n ip=body['ipAddress'],\n ipv6=body['ipv6Address'],\n adherent=Adherent.find(s, body['username']),\n )\n\n s.add(dev)\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef update_wireless_device(admin, macAddress, body, s):\n \"\"\" Update a wireless device in the database \"\"\"\n q = s.query(Portable).filter(Portable.mac == macAddress)\n dev = q.one()\n\n dev.start_modif_tracking()\n dev.mac = body['mac']\n dev.adherent = Adherent.find(s, body['username'])\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef update_wired_device(admin, macAddress, body, s):\n \"\"\" Update a wired device in the database \"\"\"\n q = s.query(Ordinateur).filter(Ordinateur.mac == macAddress)\n dev = q.one()\n\n dev.start_modif_tracking()\n dev.mac = body['mac']\n dev.ip = body['ipAddress']\n dev.ipv6 = body['ipv6Address']\n dev.adherent = Adherent.find(s, body['username'])\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef delete_wired_device(admin, macAddress, s):\n \"\"\" Delete a wired device from the databse \"\"\"\n q = s.query(Ordinateur).filter(Ordinateur.mac == macAddress)\n dev = q.one()\n\n dev.start_modif_tracking()\n s.delete(dev)\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef delete_wireless_device(admin, macAddress, s):\n \"\"\" Delete a wireless device from the database \"\"\"\n q = s.query(Portable).filter(Portable.mac == macAddress)\n dev = q.one()\n\n dev.start_modif_tracking()\n s.delete(dev)\n s.flush()\n\n Modification.add_and_commit(s, dev.adherent, dev.get_ruby_modif(), admin)\n\n\ndef get_all_devices(s):\n\n q_wired = s.query(\n Ordinateur.mac.label(\"mac\"),\n Ordinateur.ip.label(\"ip\"),\n Ordinateur.ipv6.label(\"ipv6\"),\n Ordinateur.adherent_id.label(\"adherent_id\"),\n literal(\"wired\", type_=String).label(\"type\"),\n )\n\n q_wireless = s.query(\n Portable.mac.label(\"mac\"),\n literal(None, type_=String).label(\"ip\"),\n literal(None, type_=String).label(\"ipv6\"),\n Portable.adherent_id.label(\"adherent_id\"),\n literal(\"wireless\", type_=String).label(\"type\"),\n )\n q = q_wireless.union_all(q_wired)\n return q.subquery()\n\n\ndef _dev_to_gen(d):\n yield \"mac\", d.mac,\n yield \"connectionType\", d.type,\n if d.ip:\n yield \"ipAddress\", d.ip\n if d.ipv6:\n yield \"ipv6Address\", d.ipv6\n yield \"username\", d.login\n\n\ndef dev_to_dict(d):\n return dict(_dev_to_gen(d))\n" }, { "alpha_fraction": 0.5529412031173706, "alphanum_fraction": 0.5889411568641663, "avg_line_length": 22.480663299560547, "blob_id": "b8deb2bd5ca673a103e4384639850dc09beab5ee", "content_id": "bd6bd84942646ddbf87f658fc419619afb800934", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4250, "license_type": "no_license", "max_line_length": 57, "num_lines": 181, "path": "/test/test_room.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import json\nimport pytest\nfrom adh.model.database import Database as db\nfrom adh.model.models import Chambre, Vlan\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom .resource import base_url, TEST_HEADERS\n\n\[email protected]\ndef sample_vlan():\n yield Vlan(\n numero=42,\n adresses=\"192.168.1.0/24\",\n adressesv6=\"fe80::0\",\n )\n\n\[email protected]\ndef sample_room1(sample_vlan):\n yield Chambre(\n numero=4591,\n description=\"Chambre du swag\",\n telephone=\"1234\",\n vlan=sample_vlan,\n )\n\n\[email protected]\ndef sample_room2(sample_vlan):\n yield Chambre(\n numero=4592,\n description=\"Chambre voisine du swag\",\n telephone=\"5678\",\n vlan=sample_vlan,\n )\n\n\ndef prep_db(session,\n sample_room1,\n sample_room2,\n sample_vlan):\n session.add_all([\n sample_vlan,\n sample_room1,\n sample_room2,\n ])\n session.commit()\n\n\[email protected]\ndef api_client(sample_room1, sample_room2, sample_vlan):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(),\n sample_room1, sample_room2, sample_vlan)\n yield c\n\n\ndef test_room_to_dict(sample_room1):\n dict(sample_room1)\n\n\ndef test_room_filter_all_rooms(api_client):\n r = api_client.get(\n \"{}/room/\".format(base_url),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n response = json.loads(r.data.decode())\n assert len(response) == 2\n\n\ndef test_room_filter_all_rooms_limit_invalid(api_client):\n r = api_client.get(\n \"{}/room/?limit={}\".format(base_url, -1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_room_filter_all_rooms_limit(api_client):\n r = api_client.get(\n \"{}/room/?limit={}\".format(base_url, 1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n response = json.loads(r.data.decode())\n assert len(response) == 1\n\n\ndef test_room_filter_by_term(api_client):\n r = api_client.get(\n \"{}/room/?terms={}\".format(base_url, \"voisin\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n response = json.loads(r.data.decode())\n assert len(response) == 1\n\n\ndef test_room_get_valid_room(api_client):\n r = api_client.get(\n \"{}/room/{}\".format(base_url, 4591),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n response = json.loads(r.data.decode())\n assert len(response) == 4\n\n\ndef test_room_get_invalid_room(api_client):\n r = api_client.get(\n \"{}/room/{}\".format(base_url, 4900),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_room_put_new_room_invalid_vlan(api_client):\n room = {\n \"roomNumber\": 5110,\n \"vlan\": 45,\n \"phone\": 6842,\n \"description\": \"Chambre 5110\"\n }\n r = api_client.put(\n \"{}/room/{}\".format(base_url, 5110),\n data=json.dumps(room),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_room_put_new_room(api_client):\n room = {\n \"roomNumber\": 5110,\n \"vlan\": 42,\n \"phone\": 6842,\n \"description\": \"Chambre 5110\"\n }\n r = api_client.put(\n \"{}/room/{}\".format(base_url, 5110),\n data=json.dumps(room),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 201\n\n\ndef test_room_put_update_room(api_client):\n room = {\n \"roomNumber\": 5110,\n \"vlan\": 42,\n \"phone\": 6842,\n \"description\": \"Chambre 5110\"\n }\n r = api_client.put(\n \"{}/room/{}\".format(base_url, 4591),\n data=json.dumps(room),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n\n\ndef test_room_delete_existant_room(api_client):\n r = api_client.delete(\n \"{}/room/{}\".format(base_url, 4591),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n\n\ndef test_room_delete_non_existant_room(api_client):\n r = api_client.delete(\n \"{}/room/{}\".format(base_url, 4900),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n" }, { "alpha_fraction": 0.5646651387214661, "alphanum_fraction": 0.6062355637550354, "avg_line_length": 23.742856979370117, "blob_id": "e2b469245d7d6ef79969b1f0a9d44574ae7c25a6", "content_id": "20dc8d5c682423f60573e0e34383dff0ca00ebc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1732, "license_type": "no_license", "max_line_length": 67, "num_lines": 70, "path": "/adh/util/checks.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "# from datetime import datetime\nfrom ipaddress import IPv4Address, IPv4Network\nfrom ipaddress import IPv6Address, IPv6Network, AddressValueError\nimport re\n\nMAC_REGEX = re.compile('^([0-9A-Fa-f]{2}:){5}([0-9A-Fa-f]{2})$')\nEMAIL_REGEX = re.compile(r\"[^@]+@[^@]+\\.[^@]+\")\n\n\ndef isEmail(mail):\n return EMAIL_REGEX.match(mail)\n\n\ndef isMac(macAddress):\n \"\"\" Allowed MAC address format: DE:AD:BE:EF:01:23 \"\"\"\n macAddress = str(macAddress).upper()\n return bool(MAC_REGEX.match(macAddress))\n\n\n# def checkDate(dateString):\n# \"\"\" Allowed date format: YYYY-MM-DD \"\"\"\n# splittedDate = dateString.split('-')\n# if len(splittedDate) != 3 \\\n# or len(splittedDate[0]) != 4 \\\n# or len(splittedDate[1]) != 2 \\\n# or len(splittedDate[2]) != 2:\n# return False\n# else:\n# try:\n# datetime(int(splittedDate[0]),\n# int(splittedDate[1]),\n# int(splittedDate[2]))\n# except (TypeError, ValueError):\n# return False\n# return True\n\n\ndef isIPv4Network(ipAddress):\n \"\"\" Allowed format: 192.168.0.1/24 \"\"\"\n try:\n IPv4Network(ipAddress)\n except AddressValueError:\n return False\n return True\n\n\ndef isIPv6Network(ipAddress):\n try:\n IPv6Network(ipAddress)\n except AddressValueError:\n return False\n return True\n\n\ndef isIPv4(ipAddress):\n \"\"\" Allowed format: 192.168.0.1 \"\"\"\n try:\n IPv4Address(ipAddress)\n except AddressValueError:\n return False\n return True\n\n\ndef isIPv6(ipAddress):\n \"\"\" Allowed format: fe80:0000:0000:0000:62eb:69ff:feec:c643 \"\"\"\n try:\n IPv6Address(ipAddress)\n except AddressValueError:\n return False\n return True\n" }, { "alpha_fraction": 0.5847522020339966, "alphanum_fraction": 0.598751425743103, "avg_line_length": 31.832298278808594, "blob_id": "6793cbe9048e1c276710ab5bed68dd103a56a999", "content_id": "d5ce260802c79cc1f8a1678c2587ed39d1a62516", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5286, "license_type": "no_license", "max_line_length": 78, "num_lines": 161, "path": "/adh/controller/device.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom adh.exceptions import UserNotFound\nfrom adh.model.database import Database as db\nfrom adh.model import models\nfrom adh.model.models import Adherent\nfrom sqlalchemy.orm.exc import MultipleResultsFound\nfrom adh.exceptions import InvalidIPv4, InvalidIPv6, InvalidMac\nfrom adh.controller.device_utils import is_wired, is_wireless, \\\n delete_wireless_device, \\\n delete_wired_device, \\\n update_wireless_device, \\\n update_wired_device, \\\n create_wireless_device, \\\n create_wired_device, \\\n get_all_devices, \\\n dev_to_dict\nfrom adh.auth import auth_simple_user\n\n\n@auth_simple_user\ndef filterDevice(admin, limit=100, offset=0, username=None, terms=None):\n \"\"\" [API] Filter the list of the devices according to some criterias \"\"\"\n s = db.get_db().get_session()\n\n if limit < 0:\n return 'Limit must be a positive number', 400\n\n # Return a subquery with all devices (wired & wireless)\n # The fields, ip, ipv6, dns, etc, are set to None for wireless devices\n # There is also a field \"type\" wich is wired and wireless\n all_devices = get_all_devices(s)\n\n # Query all devices and their owner's unsername\n q = s.query(all_devices, Adherent.login.label(\"login\"))\n q = q.join(Adherent, Adherent.id == all_devices.columns.adherent_id)\n\n if username:\n q = q.filter(Adherent.login == username)\n\n if terms:\n q = q.filter(\n (all_devices.columns.mac.contains(terms)) |\n (all_devices.columns.ip.contains(terms)) |\n (all_devices.columns.ipv6.contains(terms)) |\n (Adherent.login.contains(terms))\n )\n count = q.count()\n q = q.order_by(all_devices.columns.mac.asc())\n q = q.offset(offset)\n q = q.limit(limit)\n r = q.all()\n results = list(map(dev_to_dict, r))\n\n headers = {\n \"X-Total-Count\": count,\n \"access-control-expose-headers\": \"X-Total-Count\"\n }\n return results, 200, headers\n\n\n@auth_simple_user\ndef putDevice(admin, macAddress, body):\n \"\"\" [API] Put (update or create) a new device in the database \"\"\"\n s = db.get_db().get_session()\n try:\n wired = is_wired(macAddress, s)\n wireless = is_wireless(macAddress, s)\n wanted_type = body[\"connectionType\"]\n\n # TODO: Make proper IP assignement system\n if wanted_type == \"wired\":\n if 'ipAddress' not in body:\n body['ipAddress'] = '192.168.0.1'\n if 'ipv6Address' not in body:\n body['ipv6Address'] = 'fe80::1'\n\n if wired and wireless:\n if wanted_type == \"wired\":\n delete_wireless_device(admin, macAddress, s)\n update_wired_device(admin, macAddress, body, s)\n else:\n delete_wired_device(admin, macAddress, s)\n update_wireless_device(admin, macAddress, body, s)\n elif wired:\n if wanted_type == \"wireless\":\n delete_wired_device(admin, macAddress, s)\n create_wireless_device(admin, body, s)\n else:\n update_wired_device(admin, macAddress, body, s)\n elif wireless:\n if wanted_type == \"wired\":\n delete_wireless_device(admin, macAddress, s)\n create_wired_device(admin, body, s)\n else:\n update_wireless_device(admin, macAddress, body, s)\n else: # Create device\n if body[\"mac\"] != macAddress:\n return 'The MAC address in the query ' + \\\n 'and in the body don\\'t match', 400\n\n if wanted_type == \"wired\":\n create_wired_device(admin, body, s)\n else:\n create_wireless_device(admin, body, s)\n\n s.commit()\n return NoContent, 201\n\n s.commit()\n return NoContent, 204\n\n except UserNotFound:\n return 'User not found', 400\n\n except InvalidMac:\n return 'Invalid mac', 400\n\n except InvalidIPv6:\n return 'Invalid IPv6', 400\n\n except InvalidIPv4:\n return 'Invalid IPv4', 400\n except MultipleResultsFound:\n return 'Multiple records for that MAC address found in database. ' + \\\n 'A MAC address should be unique. Fix your database.', 500\n\n\n@auth_simple_user\ndef getDevice(admin, macAddress):\n \"\"\" [API] Return the device specified by the macAddress \"\"\"\n s = db.get_db().get_session()\n if is_wireless(macAddress, s):\n q = s.query(models.Portable)\n q = q.filter(models.Portable.mac == macAddress)\n r = q.one()\n return dict(r), 200\n\n elif is_wired(macAddress, s):\n q = s.query(models.Ordinateur)\n q = q.filter(models.Ordinateur.mac == macAddress)\n r = q.one()\n return dict(r), 200\n\n else:\n return NoContent, 404\n\n\n@auth_simple_user\ndef deleteDevice(admin, macAddress):\n \"\"\" [API] Delete the specified device from the database \"\"\"\n s = db.get_db().get_session()\n if is_wireless(macAddress, s):\n delete_wireless_device(admin, macAddress, s)\n return NoContent, 204\n\n elif is_wired(macAddress, s):\n delete_wired_device(admin, macAddress, s)\n return NoContent, 204\n\n else:\n return NoContent, 404\n" }, { "alpha_fraction": 0.6113861203193665, "alphanum_fraction": 0.6782178282737732, "avg_line_length": 21.44444465637207, "blob_id": "56353ebaff7d52619e6fa0fd4c34d08131ef7898", "content_id": "8e89b1b52857b8a54cd7b5fcd970c53440f4c16e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 404, "license_type": "no_license", "max_line_length": 53, "num_lines": 18, "path": "/test/test_check.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import pytest\n\nfrom adh.util import checks\nfrom test import resource\n\n\[email protected]('mac', [\n \"12:34:56:78:9A:BC\", # OK\n \"DE:F0:00:00:00:00\",\n \"12:34:56:78:9a:bc\", # lowercased, OK\n])\ndef test_check_valid_mac(mac):\n assert checks.isMac(mac) is True\n\n\[email protected]('mac', resource.INVALID_MAC)\ndef test_check_invalid_mac(mac):\n assert checks.isMac(mac) is False\n" }, { "alpha_fraction": 0.8611111044883728, "alphanum_fraction": 0.8611111044883728, "avg_line_length": 8, "blob_id": "5c8fa7c7eabe1eca8dd2a10fcef1b4875ac4ed77", "content_id": "0f44972c2a5e1febac358eaf511a5ecda68030c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 108, "license_type": "no_license", "max_line_length": 15, "num_lines": 12, "path": "/requirements.txt", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "pytest\nflask\nconnexion\ngevent\nflask_cors\nSQLAlchemy\nmysqlclient\npip\npytest-cov\npython-dateutil\npysnmp\nuwsgi\n" }, { "alpha_fraction": 0.5053501725196838, "alphanum_fraction": 0.5097275972366333, "avg_line_length": 31.634920120239258, "blob_id": "de0af9cfc274a6bb1ea9c80a997b4ab5feb47d51", "content_id": "4d83cf31496a69c5dbc35948ca002343d1d7e1f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2056, "license_type": "no_license", "max_line_length": 77, "num_lines": 63, "path": "/adh/snmp.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from pysnmp.hlapi import (\n setCmd, SnmpEngine, CommunityData, ObjectType, ObjectIdentity, Integer,\n UdpTransportTarget, ContextData, getCmd\n)\n\n\nclass SNMPError(Exception):\n def __init__(self, value):\n self.value = value\n\n\nclass SNMPManager:\n\n def __init__(self, server, secret, port=161):\n self.server = server\n self.secret = secret\n self.port = port\n\n def change_value(self, req, oid, value):\n\n try:\n errorIndication, errorStatus, errorIndex, varBinds = next(\n setCmd(\n SnmpEngine(), CommunityData(\n self.secret), UdpTransportTarget(\n (self.server, self.port)), ContextData(), ObjectType(\n ObjectIdentity(\n req + str(oid)), Integer(value))), )\n except Exception:\n raise\n else:\n if errorIndication:\n return SNMPError(errorIndication)\n elif errorStatus:\n return SNMPError(errorStatus)\n else:\n if len(varBinds) == 1:\n return varBinds[0][1]\n else:\n raise SNMPError(\"Multiple MIB variables returned.\")\n\n def make_request(self, req, oid):\n try:\n errorIndication, errorStatus, errorIndex, varBinds = next(\n getCmd(SnmpEngine(),\n CommunityData(self.secret),\n UdpTransportTarget((self.server, self.port)),\n ContextData(),\n ObjectType(ObjectIdentity(req + str(oid)))),\n )\n except Exception:\n raise\n else:\n\n if errorIndication:\n return SNMPError(errorIndication)\n elif errorStatus:\n return SNMPError(errorStatus)\n else:\n if len(varBinds) == 1:\n return varBinds[0][1]\n else:\n raise SNMPError(\"Multiple MIB variables returned.\")\n" }, { "alpha_fraction": 0.5499964952468872, "alphanum_fraction": 0.5732978582382202, "avg_line_length": 29.536325454711914, "blob_id": "9ddbc4d32a178862c0a3a77fc700a3f89d73725e", "content_id": "d31114d95031d005e2e8c14fe320e33cbd1e41c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14291, "license_type": "no_license", "max_line_length": 78, "num_lines": 468, "path": "/test/test_device.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import json\n\nimport pytest\nfrom adh.model.database import Database as db\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom adh.model.models import Ordinateur, Portable, Adherent\n\nfrom .resource import (\n base_url, INVALID_MAC, INVALID_IP, INVALID_IPv6, TEST_HEADERS\n)\n\n\[email protected]\ndef member1():\n yield Adherent(\n nom='Dubois',\n prenom='Jean-Louis',\n mail='[email protected]',\n login='dubois_j',\n password='a',\n )\n\n\[email protected]\ndef member2():\n yield Adherent(\n nom='Reignier',\n prenom='Edouard',\n mail='[email protected]',\n login='reignier',\n password='b',\n )\n\n\[email protected]\ndef wired_device(member1):\n yield Ordinateur(\n mac='96:24:F6:D0:48:A7',\n ip='157.159.42.42',\n dns='bonnet_n4651',\n adherent=member1,\n ipv6='e91f:bd71:56d9:13f3:5499:25b:cc84:f7e4'\n )\n\n\[email protected]\ndef wireless_device(member2):\n yield Portable(\n mac='80:65:F3:FC:44:A9',\n adherent=member2,\n )\n\n\[email protected]\ndef wireless_device_dict():\n '''\n Device that will be inserted/updated when tests are run.\n It is not present in the api_client by default\n '''\n yield {\n 'mac': '01:23:45:67:89:AC',\n 'ipAddress': '127.0.0.1',\n 'ipv6Address': 'c69f:6c5:754c:d301:df05:ba81:76a8:ddc4',\n 'connectionType': 'wireless',\n 'username': 'dubois_j'\n }\n\n\[email protected]\ndef wired_device_dict():\n yield {\n 'mac': '01:23:45:67:89:AD',\n 'ipAddress': '127.0.0.1',\n 'ipv6Address': 'dbb1:39b7:1e8f:1a2a:3737:9721:5d16:166',\n 'connectionType': 'wired',\n 'username': 'dubois_j'\n }\n\n\ndef prep_db(session,\n member1,\n member2,\n wired_device,\n wireless_device):\n session.add_all([\n member1,\n member2,\n wired_device,\n wireless_device\n ])\n session.commit()\n\n\[email protected]\ndef api_client(member1,\n member2,\n wired_device,\n wireless_device):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(),\n member1,\n member2,\n wired_device,\n wireless_device)\n yield c\n\n\ndef test_device_filter_all_devices(api_client):\n r = api_client.get(\n '{}/device/'.format(base_url),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 2\n\n\[email protected]('user,expected', [\n ('reignier', 1),\n ('dubois_j', 1),\n ('gates_bi', 0), # Non existant user\n ('dubois', 0), # Exact match\n])\ndef test_device_filter_wired_by_username(\n api_client, user, expected):\n r = api_client.get(\n '{}/device/?username={}'.format(\n base_url,\n user\n ),\n headers=TEST_HEADERS\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == expected\n\n\[email protected]('terms,expected', [\n ('96:24:F6:D0:48:A7', 1), # Should find sample wired device\n ('96:', 1),\n ('e91f', 1),\n ('157.159', 1),\n ('80:65:F3:FC:44:A9', 1), # Should find sample wireless device\n ('F3:FC', 1),\n (':', 2), # Should find everything\n ('00:', 0), # Should find nothing\n])\ndef test_device_filter_by_terms(\n api_client, wired_device, terms, expected):\n r = api_client.get(\n '{}/device/?terms={}'.format(\n base_url,\n terms,\n ),\n headers=TEST_HEADERS\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == expected\n\n\ndef test_device_filter_invalid_limit(api_client, member1):\n r = api_client.get(\n '{}/device/?limit={}'.format(base_url, -1),\n headers=TEST_HEADERS\n )\n assert r.status_code == 400\n\n\ndef test_device_filter_hit_limit(api_client, member1):\n s = db.get_db().get_session()\n LIMIT = 10\n\n # Create a lot of devices\n for i in range(LIMIT*2):\n suffix = \"{0:04X}\".format(i)\n dev = Portable(\n adherent=member1,\n mac='00:00:00:00:'+suffix[:2]+\":\"+suffix[2:]\n )\n s.add(dev)\n s.commit()\n\n r = api_client.get(\n '{}/device/?limit={}'.format(base_url, LIMIT),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == LIMIT\n\n\ndef test_device_put_create_wireless_without_ip(api_client,\n wireless_device_dict):\n ''' Can create a valid wireless device ? '''\n del wireless_device_dict['ipAddress']\n addr = '{}/device/{}'.format(base_url, wireless_device_dict['mac'])\n r = api_client.put(addr,\n data=json.dumps(wireless_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 201\n\n\ndef test_device_put_create_wireless(api_client, wireless_device_dict):\n ''' Can create a valid wireless device ? '''\n addr = '{}/device/{}'.format(base_url, wireless_device_dict['mac'])\n r = api_client.put(addr,\n data=json.dumps(wireless_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 201\n\n\ndef test_device_put_create_wired_without_ip(api_client, wired_device_dict):\n ''' Can create a valid wired device ? '''\n del wired_device_dict['ipAddress']\n r = api_client.put('{}/device/{}'.format(base_url,\n wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 201\n\n\ndef test_device_put_create_wired(api_client, wired_device_dict):\n ''' Can create a valid wired device ? '''\n r = api_client.put('{}/device/{}'.format(base_url,\n wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 201\n\n\ndef test_device_put_create_different_mac_addresses(api_client,\n wired_device_dict):\n ''' Create with invalid mac address '''\n wired_device_dict['mac'] = \"11:11:11:11:11:11\"\n r = api_client.put('{}/device/{}'.format(base_url, \"22:22:22:22:22:22\"),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 400\n\n\[email protected]('test_mac', INVALID_MAC)\ndef test_device_put_create_invalid_mac_address(api_client,\n test_mac,\n wired_device_dict):\n ''' Create with invalid mac address '''\n wired_device_dict['mac'] = test_mac\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400 or r.status_code == 405\n\n\[email protected]('test_ip', INVALID_IPv6)\ndef test_device_put_create_invalid_ipv6(api_client, test_ip,\n wired_device_dict):\n ''' Create with invalid ip address '''\n wired_device_dict['ipv6Address'] = test_ip\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\[email protected]('test_ip', INVALID_IP)\ndef test_device_put_create_invalid_ipv4(api_client, test_ip,\n wired_device_dict):\n ''' Create with invalid ip address '''\n wired_device_dict['ipAddress'] = test_ip\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_device_put_create_invalid_username(api_client, wired_device_dict):\n ''' Create with invalid mac address '''\n wired_device_dict['username'] = 'abcdefgh'\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device_dict['mac']),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_device_put_update_wireless(api_client, wireless_device,\n wireless_device_dict):\n ''' Can update a valid wireless device ? '''\n r = api_client.put(\n '{}/device/{}'.format(base_url, wireless_device.mac),\n data=json.dumps(wireless_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_put_update_wired(api_client, wired_device, wired_device_dict):\n ''' Can update a valid wired device ? '''\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device.mac),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_put_update_wired_to_wireless(api_client, wired_device,\n wireless_device_dict):\n ''' Can update a valid wired device and cast it into a wireless d ? '''\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device.mac),\n data=json.dumps(wireless_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_put_update_wireless_to_wired(api_client,\n wireless_device,\n wired_device_dict):\n ''' Can update a valid wireless device and cast it into a wired d ? '''\n r = api_client.put(\n '{}/device/{}'.format(base_url, wireless_device.mac),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_put_update_wired_and_wireless_to_wireless(\n api_client,\n wired_device,\n wireless_device_dict):\n '''\n Test if the controller is able to handle the case where the MAC address is\n in the Wireless table _AND_ the Wired table\n Tests the case where we want to move the mac to the wireless table\n '''\n # Add a wireless device that has the same mac as WIRED_DEVICE\n dev_with_same_mac = Portable(\n mac=wired_device.mac,\n adherent_id=1,\n )\n session = db.get_db().get_session()\n session.add(dev_with_same_mac)\n session.commit()\n\n # Then try to update it...\n r = api_client.put(\n '{}/device/{}'.format(base_url, wired_device.mac),\n data=json.dumps(wireless_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_put_update_wired_and_wireless_to_wired(api_client,\n wireless_device,\n wired_device_dict):\n '''\n Test if the controller is able to handle the case where the MAC address is\n in the Wireless table _AND_ the Wired table\n Tests the case where we want to move the mac to the wired table\n '''\n # Add a wired device that has the same mac as WIRELESS_DEVICE\n dev_with_same_mac = Ordinateur(\n mac=wireless_device.mac,\n adherent_id=1,\n )\n session = db.get_db().get_session()\n session.add(dev_with_same_mac)\n session.commit()\n\n # Then try to update it...\n r = api_client.put(\n '{}/device/{}'.format(base_url, wireless_device.mac),\n data=json.dumps(wired_device_dict),\n content_type='application/json',\n headers=TEST_HEADERS)\n assert r.status_code == 204\n\n\ndef test_device_get_unknown_mac(api_client):\n mac = '00:00:00:00:00:00'\n r = api_client.get(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_device_get_valid_wired(api_client, wired_device):\n mac = wired_device.mac\n r = api_client.get(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n assert json.loads(r.data.decode('utf-8'))\n\n\ndef test_device_get_valid_wireless(api_client, wireless_device):\n mac = wireless_device.mac\n r = api_client.get(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n assert json.loads(r.data.decode('utf-8'))\n\n\ndef test_device_delete_wired(api_client, wired_device):\n mac = wired_device.mac\n r = api_client.delete(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n\n s = db.get_db().get_session()\n q = s.query(Ordinateur)\n q = q.filter(Ordinateur.mac == mac)\n assert not s.query(q.exists()).scalar(), \"Object not actually deleted\"\n\n\ndef test_device_delete_wireless(api_client, wireless_device):\n mac = wireless_device.mac\n r = api_client.delete(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n\n s = db.get_db().get_session()\n q = s.query(Portable)\n q = q.filter(Portable.mac == mac)\n assert not s.query(q.exists()).scalar(), \"Object not actually deleted\"\n\n\ndef test_device_delete_unexistant(api_client):\n mac = '00:00:00:00:00:00'\n r = api_client.delete(\n '{}/device/{}'.format(base_url, mac),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n" }, { "alpha_fraction": 0.7180156707763672, "alphanum_fraction": 0.7232375741004944, "avg_line_length": 10.264705657958984, "blob_id": "e7ba6c1108f13784c41126e9abf0059b01b8b682", "content_id": "1add9f3856dd577783537dc2acac0ca6a9f9a342", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 383, "license_type": "no_license", "max_line_length": 33, "num_lines": 34, "path": "/adh/exceptions.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "class InvalidEmail(ValueError):\n pass\n\n\nclass InvalidIPv6(ValueError):\n pass\n\n\nclass InvalidIPv4(ValueError):\n pass\n\n\nclass InvalidMac(ValueError):\n pass\n\n\nclass UserNotFound(ValueError):\n pass\n\n\nclass SwitchNotFound(ValueError):\n pass\n\n\nclass PortNotFound(ValueError):\n pass\n\n\nclass VlanNotFound(ValueError):\n pass\n\n\nclass RoomNotFound(ValueError):\n pass\n" }, { "alpha_fraction": 0.7220902442932129, "alphanum_fraction": 0.7553443908691406, "avg_line_length": 18.136363983154297, "blob_id": "5b59978be85cc826a054b2c8f6db0ca6654df860", "content_id": "d8352b88462c91b42482ab24cf2698ea8b3c80eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 421, "license_type": "no_license", "max_line_length": 43, "num_lines": 22, "path": "/adh/controller/snmp_port.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom adh.auth import auth_simple_user\n\n\n@auth_simple_user\ndef getPortStatus(switchID, portID):\n return 204, NoContent, True\n\n\n@auth_simple_user\ndef setPortStatus(switchID, portID, state):\n return 200, NoContent\n\n\n@auth_simple_user\ndef getPortVlan(switchID, portID):\n return 200, NoContent, 42\n\n\n@auth_simple_user\ndef setPortVlan(switchID, portID, vlan):\n return 204, NoContent\n" }, { "alpha_fraction": 0.5308001041412354, "alphanum_fraction": 0.541680634021759, "avg_line_length": 24.421276092529297, "blob_id": "d1d50cace8692e6acd094263b7e766c9352235e5", "content_id": "3f29428207abb721fa6b3e1f51b4ba93f55b2ec2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5974, "license_type": "no_license", "max_line_length": 74, "num_lines": 235, "path": "/test/test_modifications.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import pytest\nfrom adh.model.database import Database as db\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom adh.model.models import (\n Adherent, Chambre, Vlan, Modification, Utilisateur\n)\nimport datetime\n\n\[email protected]\ndef sample_vlan():\n yield Vlan(\n numero=42,\n adresses=\"192.168.42.1\",\n adressesv6=\"fe80::1\",\n )\n\n\[email protected]\ndef sample_room(sample_vlan):\n yield Chambre(\n numero=1234,\n description='chambre 1',\n vlan=sample_vlan,\n )\n\n\[email protected]\ndef sample_member(sample_room):\n yield Adherent(\n nom='Dubois',\n prenom='Jean-Louis',\n mail='[email protected]',\n login='dubois_j',\n password='a',\n chambre=sample_room,\n )\n\n\[email protected]\ndef sample_member2(sample_room):\n yield Adherent(\n nom='Reignier',\n prenom='Edouard',\n mail='[email protected]',\n login='reignier',\n commentaires='Desauthent pour routeur',\n password='a',\n chambre=sample_room,\n )\n\n\ndef prep_db(session,\n sample_member,\n sample_room, sample_vlan):\n session.add_all([\n sample_room, sample_vlan,\n sample_member])\n session.commit()\n\n\[email protected]\ndef api_client(sample_member, sample_room, sample_vlan):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(),\n sample_member,\n sample_room,\n sample_vlan)\n yield c\n\n\ndef test_modification_pass_updated(api_client, sample_member):\n s = db.get_db().get_session()\n a = Adherent.find(s, sample_member.login)\n\n a.start_modif_tracking()\n a.password = \"TESTESTEST\"\n s.flush()\n\n # Build the corresponding modification\n Modification.add_and_commit(s, a, a.get_ruby_modif(),\n Utilisateur.find_or_create(s, \"test\"))\n q = s.query(Modification)\n m = q.first()\n assert m.action == (\n '--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess\\n'\n 'password:\\n'\n '- a\\n'\n '- TESTESTEST\\n'\n )\n assert m.adherent_id == sample_member.id\n now = datetime.datetime.now()\n one_sec = datetime.timedelta(seconds=1)\n assert now - m.created_at < one_sec\n assert now - m.updated_at < one_sec\n assert m.utilisateur_id == 1\n\n\ndef test_modification_multiple_changes_updated(api_client, sample_member):\n s = db.get_db().get_session()\n a = Adherent.find(s, sample_member.login)\n\n a.start_modif_tracking()\n a.commentaires = \"Hey I am a comment\"\n a.nom = \"Test\"\n a.prenom = \"Test\"\n a.mail = \"[email protected]\"\n s.flush()\n\n # Build the corresponding modification\n Modification.add_and_commit(s, a, a.get_ruby_modif(),\n Utilisateur.find_or_create(s, \"test\"))\n q = s.query(Modification)\n m = q.first()\n assert m.action == (\n '--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess\\n'\n 'commentaires:\\n'\n '- \\n'\n '- Hey I am a comment\\n'\n 'mail:\\n'\n '- [email protected]\\n'\n '- [email protected]\\n'\n 'nom:\\n'\n '- Dubois\\n'\n '- Test\\n'\n 'prenom:\\n'\n '- Jean-Louis\\n'\n '- Test\\n'\n )\n assert m.adherent_id == sample_member.id\n now = datetime.datetime.now()\n one_sec = datetime.timedelta(seconds=1)\n assert now - m.created_at < one_sec\n assert now - m.updated_at < one_sec\n assert m.utilisateur_id == 1\n\n\ndef test_modification_add_new_user(api_client, sample_member2):\n s = db.get_db().get_session()\n\n a = sample_member2\n s.add(a)\n s.flush()\n\n # Build the corresponding modification\n Modification.add_and_commit(s, a, a.get_ruby_modif(),\n Utilisateur.find_or_create(s, \"test\"))\n q = s.query(Modification)\n m = q.first()\n assert m.action == (\n '--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess\\n'\n 'chambre_id:\\n'\n '- \\n'\n '- 1\\n'\n 'commentaires:\\n'\n '- \\n'\n '- Desauthent pour routeur\\n'\n 'id:\\n'\n '- \\n'\n '- 2\\n'\n 'login:\\n'\n '- \\n'\n '- reignier\\n'\n 'mail:\\n'\n '- \\n'\n '- [email protected]\\n'\n 'mode_association:\\n'\n '- \\n'\n '- 2011-04-30 17:50:17\\n'\n 'nom:\\n'\n '- \\n'\n '- Reignier\\n'\n 'password:\\n'\n '- \\n'\n '- a\\n'\n 'prenom:\\n'\n '- \\n'\n '- Edouard\\n'\n )\n assert m.adherent_id == a.id\n now = datetime.datetime.now()\n one_sec = datetime.timedelta(seconds=1)\n assert now - m.created_at < one_sec\n assert now - m.updated_at < one_sec\n assert m.utilisateur_id == 1\n\n\ndef test_modification_delete_member(api_client, sample_member):\n s = db.get_db().get_session()\n a = Adherent.find(s, sample_member.login)\n\n a.start_modif_tracking()\n s.delete(a)\n s.flush()\n\n # Build the corresponding modification\n Modification.add_and_commit(s, a, a.get_ruby_modif(),\n Utilisateur.find_or_create(s, \"test\"))\n q = s.query(Modification)\n m = q.first()\n assert m.action == (\n '--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess\\n'\n 'chambre_id:\\n'\n '- 1\\n'\n '- \\n'\n 'id:\\n'\n '- 1\\n'\n '- \\n'\n 'login:\\n'\n '- dubois_j\\n'\n '- \\n'\n 'mail:\\n'\n '- [email protected]\\n'\n '- \\n'\n 'mode_association:\\n'\n '- 2011-04-30 17:50:17\\n'\n '- \\n'\n 'nom:\\n'\n '- Dubois\\n'\n '- \\n'\n 'password:\\n'\n '- a\\n'\n '- \\n'\n 'prenom:\\n'\n '- Jean-Louis\\n'\n '- \\n'\n )\n assert m.adherent_id == sample_member.id\n now = datetime.datetime.now()\n one_sec = datetime.timedelta(seconds=1)\n assert now - m.created_at < one_sec\n assert now - m.updated_at < one_sec\n assert m.utilisateur_id == 1\n" }, { "alpha_fraction": 0.5937746167182922, "alphanum_fraction": 0.6053979396820068, "avg_line_length": 25.164947509765625, "blob_id": "64df46f0afb2dbe80c7e1d88e643cefd6cab50de", "content_id": "56605032fee8eda004a9f7f484f46777581fa6b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5076, "license_type": "no_license", "max_line_length": 72, "num_lines": 194, "path": "/adh/controller/user.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom adh.model.database import Database as db\nfrom adh.model.models import Adherent, Chambre, Adhesion, Modification\nfrom adh.util.date import string_to_date\nfrom adh.exceptions import InvalidEmail, RoomNotFound, UserNotFound\nimport datetime\nimport sqlalchemy\nfrom adh.auth import auth_simple_user\nimport hashlib\n\n\ndef adherentExists(session, username):\n \"\"\" Returns true if the user exists \"\"\"\n try:\n Adherent.find(session, username)\n except UserNotFound:\n return False\n return True\n\n\n@auth_simple_user\ndef filterUser(admin, limit=100, offset=0, terms=None, roomNumber=None):\n \"\"\" [API] Filter the list of users from the the database \"\"\"\n if limit < 0:\n return \"Limit must be positive\", 400\n\n s = db.get_db().get_session()\n\n q = s.query(Adherent)\n if roomNumber:\n try:\n q2 = s.query(Chambre)\n q2 = q2.filter(Chambre.numero == roomNumber)\n result = q2.one()\n except sqlalchemy.orm.exc.NoResultFound:\n return [], 200, {\"X-Total-Count\": '0'}\n\n q = q.filter(Adherent.chambre == result)\n if terms:\n q = q.filter(\n (Adherent.nom.contains(terms)) |\n (Adherent.prenom.contains(terms)) |\n (Adherent.mail.contains(terms)) |\n (Adherent.login.contains(terms)) |\n (Adherent.commentaires.contains(terms))\n )\n count = q.count()\n q = q.order_by(Adherent.login.asc())\n q = q.offset(offset)\n q = q.limit(limit)\n r = q.all()\n headers = {\n \"X-Total-Count\": str(count),\n 'access-control-expose-headers': 'X-Total-Count'\n }\n return list(map(dict, r)), 200, headers\n\n\n@auth_simple_user\ndef getUser(admin, username):\n \"\"\" [API] Get the specified user from the database \"\"\"\n s = db.get_db().get_session()\n try:\n return dict(Adherent.find(s, username))\n except UserNotFound:\n return NoContent, 404\n\n\n@auth_simple_user\ndef deleteUser(admin, username):\n \"\"\" [API] Delete the specified User from the database \"\"\"\n s = db.get_db().get_session()\n\n # Find the soon-to-be deleted user\n try:\n a = Adherent.find(s, username)\n except UserNotFound:\n return NoContent, 404\n\n try:\n # if so, start tracking for modifications\n a.start_modif_tracking()\n\n # Actually delete it\n s.delete(a)\n s.flush()\n\n # Write it in the modification table\n Modification.add_and_commit(s, a, a.get_ruby_modif(), admin)\n except Exception:\n s.rollback()\n raise\n return NoContent, 204\n\n\n@auth_simple_user\ndef putUser(admin, username, body):\n \"\"\" [API] Create/Update user from the database \"\"\"\n s = db.get_db().get_session()\n\n # Create a valid object\n try:\n new_user = Adherent.from_dict(s, body)\n except InvalidEmail:\n return \"Invalid email\", 400\n except RoomNotFound:\n return \"No room found\", 400\n except ValueError:\n return \"String must not be empty\", 400\n\n try:\n # Check if it already exists\n update = adherentExists(s, username)\n\n if update:\n current_adh = Adherent.find(s, username)\n new_user.id = current_adh.id\n current_adh.start_modif_tracking()\n\n # Merge the object (will create a new if it doesn't exist)\n new_user = s.merge(new_user)\n s.flush()\n\n # Create the corresponding modification\n Modification.add_and_commit(s, new_user,\n new_user.get_ruby_modif(), admin)\n except Exception:\n s.rollback()\n raise\n\n if update:\n return NoContent, 204\n else:\n return NoContent, 201\n\n\n@auth_simple_user\ndef addMembership(admin, username, body):\n \"\"\" [API] Add a membership record in the database \"\"\"\n\n s = db.get_db().get_session()\n\n start = string_to_date(body[\"start\"])\n end = None\n if start and \"duration\" in body:\n duration = body[\"duration\"]\n end = start + datetime.timedelta(days=duration)\n\n try:\n s.add(Adhesion(\n adherent=Adherent.find(s, username),\n depart=start,\n fin=end\n ))\n except UserNotFound:\n return NoContent, 404\n\n s.commit()\n return NoContent, 200, {'Location': 'test'} # TODO: finish that!\n\n\ndef ntlm_hash(txt):\n \"\"\"\n NTLM hashing function\n wow much security such hashing function\n Needed by MSCHAPv2.\n \"\"\"\n\n return hashlib.new('md4', txt.encode('utf-16le')).hexdigest()\n\n\n@auth_simple_user\ndef updatePassword(admin, username, body):\n password = body[\"password\"]\n s = db.get_db().get_session()\n\n try:\n a = Adherent.find(s, username)\n except UserNotFound:\n return NoContent, 404\n\n try:\n a.start_modif_tracking()\n a.password = ntlm_hash(password)\n s.flush()\n\n # Build the corresponding modification\n Modification.add_and_commit(s, a, a.get_ruby_modif(), admin)\n\n except Exception:\n s.rollback()\n raise\n\n return NoContent, 204\n" }, { "alpha_fraction": 0.6047585010528564, "alphanum_fraction": 0.6193181872367859, "avg_line_length": 24.834861755371094, "blob_id": "053bdaffac734f31d3512fa56a09a7c0a56324e6", "content_id": "cfb461adb7b8e720a992a81ebc498fbdb7cfe1a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2816, "license_type": "no_license", "max_line_length": 72, "num_lines": 109, "path": "/adh/controller/port.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom adh.model.database import Database as db\nfrom sqlalchemy import or_\nfrom adh.exceptions import RoomNotFound, SwitchNotFound, PortNotFound\nfrom adh.model.models import Port, Chambre, Switch\nfrom adh.auth import auth_simple_user\n\n\n@auth_simple_user\ndef filterPort(admin, limit=100, offset=0,\n switchID=None, roomNumber=None, terms=None):\n \"\"\" [API] Filter the port list according to some criteria \"\"\"\n if limit < 0:\n return 'Limit must be a positive number', 400\n\n s = db.get_db().get_session()\n q = s.query(Port)\n if switchID:\n q = q.join(Switch)\n q = q.filter(Switch.id == switchID)\n if roomNumber:\n q = q.join(Chambre)\n q = q.filter(Chambre.numero == roomNumber)\n if terms:\n q = q.filter(or_(\n Port.numero.contains(terms),\n Port.oid.contains(terms),\n ))\n\n count = q.count()\n q = q.order_by(Port.switch_id.asc(), Port.numero.asc())\n q = q.offset(offset)\n q = q.limit(limit)\n result = q.all()\n\n result = map(dict, result)\n result = list(result)\n headers = {\n 'access-control-expose-headers': 'X-Total-Count',\n 'X-Total-Count': str(count)\n }\n return result, 200, headers\n\n\n@auth_simple_user\ndef createPort(admin, switchID, body):\n \"\"\" [API] Create a port in the database \"\"\"\n\n session = db.get_db().get_session()\n try:\n port = Port.from_dict(session, body)\n except SwitchNotFound:\n return \"Switch not found\", 400\n except RoomNotFound:\n return \"Room not found\", 400\n\n session.add(port)\n session.commit()\n headers = {\n 'Location': '/switch/{}/port/{}'.format(port.switch_id, port.id)\n }\n return NoContent, 200, headers\n\n\n@auth_simple_user\ndef getPort(admin, switchID, portID):\n \"\"\" [API] Get a port from the database \"\"\"\n s = db.get_db().get_session()\n try:\n result = Port.find(s, portID)\n except PortNotFound:\n return NoContent, 404\n\n result = dict(result)\n return result, 200\n\n\n@auth_simple_user\ndef updatePort(admin, switchID, portID, body):\n \"\"\" [API] Update a port in the database \"\"\"\n\n s = db.get_db().get_session()\n\n try:\n new_port = Port.from_dict(s, body)\n except SwitchNotFound:\n return \"Switch not found\", 400\n\n try:\n new_port.id = Port.find(s, portID).id\n except PortNotFound:\n return \"Port not found\", 404\n\n s.merge(new_port)\n s.commit()\n\n return NoContent, 204\n\n\n@auth_simple_user\ndef deletePort(admin, switchID, portID):\n \"\"\" [API] Delete a port from the database \"\"\"\n session = db.get_db().get_session()\n try:\n session.delete(Port.find(session, portID))\n except PortNotFound:\n return NoContent, 404\n session.commit()\n return NoContent, 204\n" }, { "alpha_fraction": 0.6620370149612427, "alphanum_fraction": 0.665123462677002, "avg_line_length": 28.454545974731445, "blob_id": "22edc916ca215f4e184f40ae15b3734b566bf906", "content_id": "8bc9059facba6fc9182fa472f4ccf151882f19dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1296, "license_type": "no_license", "max_line_length": 74, "num_lines": 44, "path": "/adh/model/database.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from sqlalchemy.orm import sessionmaker\nfrom sqlalchemy.orm import scoped_session\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy.engine.url import URL\nfrom sqlalchemy import create_engine, event\n\n\nBase = declarative_base()\n\n\nclass Database():\n\n def __init__(self, db_settings, testing=False):\n self.engine = create_engine(URL(**db_settings), pool_recycle=3600)\n @event.listens_for(self.engine, \"connect\")\n def do_connect(dbapi_connection, connection_record):\n dbapi_connection.isolation_level = None\n\n @event.listens_for(self.engine, \"begin\")\n def do_begin(conn):\n # emit our own BEGIN\n conn.execute(\"BEGIN\")\n self.db_session = scoped_session(sessionmaker(bind=self.engine))\n if testing:\n Base.metadata.drop_all(self.engine)\n Base.metadata.create_all(self.engine)\n self.testing = testing\n\n self.db_session().begin_nested()\n\n def get_session(self):\n return self.db_session()\n\n def remove_session(self):\n return self.db_session.remove()\n\n db = None\n\n def init_db(settings, testing=False):\n Database.db = Database(settings, testing=testing)\n\n def get_db():\n return Database.db\n" }, { "alpha_fraction": 0.6234652400016785, "alphanum_fraction": 0.6391541361808777, "avg_line_length": 25.89908218383789, "blob_id": "5dd7fe5a14aece15462e1909f38f0993f31b6dc8", "content_id": "47a6900b856f644db79cb025afea25457521442a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2932, "license_type": "no_license", "max_line_length": 71, "num_lines": 109, "path": "/adh/controller/switch.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom sqlalchemy import or_\nfrom adh.model.database import Database as db\nfrom adh.model.models import Switch\nfrom adh.exceptions import InvalidIPv4, SwitchNotFound\nfrom adh.auth import auth_simple_user\n\n\ndef switchExists(session, switchID):\n \"\"\" Return true if the switch exists \"\"\"\n try:\n Switch.find(session, switchID)\n except SwitchNotFound:\n return False\n return True\n\n\n@auth_simple_user\ndef filterSwitch(admin, limit=100, offset=0, terms=None):\n \"\"\" [API] Filter the switch list \"\"\"\n if limit < 0:\n return \"Limit must be positive\", 400\n q = db.get_db().get_session().query(Switch)\n # Filter by terms\n if terms:\n q = q.filter(or_(\n Switch.description.contains(terms),\n Switch.ip.contains(terms),\n Switch.communaute.contains(terms),\n ))\n count = q.count()\n q = q.order_by(Switch.description.asc())\n q = q.offset(offset)\n q = q.limit(limit) # Limit the number of matches\n q = q.all()\n\n # Convert the qs into data suited for the API\n q = map(lambda x: {'switchID': x.id, 'switch': dict(x)}, q)\n result = list(q) # Cast generator as list\n\n headers = {\n 'access-control-expose-headers': 'X-Total-Count',\n 'X-Total-Count': str(count)\n }\n return result, 200, headers\n\n\n@auth_simple_user\ndef createSwitch(admin, body):\n \"\"\" [API] Create a switch in the database \"\"\"\n if \"id\" in body:\n return \"You cannot set the id\", 400\n session = db.get_db().get_session()\n try:\n switch = Switch.from_dict(session, body)\n except InvalidIPv4:\n return \"Invalid IPv4\", 400\n session.add(switch)\n session.commit()\n\n return NoContent, 201, {'Location': '/switch/{}'.format(switch.id)}\n\n\n@auth_simple_user\ndef getSwitch(admin, switchID):\n \"\"\" [API] Get the specified switch from the database \"\"\"\n session = db.get_db().get_session()\n try:\n return dict(Switch.find(session, switchID))\n except SwitchNotFound:\n return NoContent, 404\n\n\n@auth_simple_user\ndef updateSwitch(admin, switchID, body):\n \"\"\" [API] Update the specified switch from the database \"\"\"\n if \"id\" in body:\n return \"You cannot update the id\", 400\n\n session = db.get_db().get_session()\n if not switchExists(session, switchID):\n return NoContent, 404\n\n try:\n switch = Switch.from_dict(session, body)\n switch.id = switchID\n except InvalidIPv4:\n return \"Invalid IPv4\", 400\n\n session.merge(switch)\n session.commit()\n\n return NoContent, 204\n\n\n@auth_simple_user\ndef deleteSwitch(admin, switchID):\n \"\"\" [API] Delete the specified switch from the database \"\"\"\n session = db.get_db().get_session()\n\n try:\n switch = Switch.find(session, switchID)\n except SwitchNotFound:\n return NoContent, 404\n\n session.delete(switch)\n session.commit()\n\n return NoContent, 204\n" }, { "alpha_fraction": 0.5678279995918274, "alphanum_fraction": 0.5737583637237549, "avg_line_length": 24.452829360961914, "blob_id": "aa242f02ab4c3c4b9cd84b85d5f42c16b660856d", "content_id": "1522bc0816c9d7f053d612f47282b49952f4a659", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1349, "license_type": "no_license", "max_line_length": 66, "num_lines": 53, "path": "/adh/auth.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import requests\nimport requests.exceptions\nfrom flask import current_app\nfrom connexion import NoContent\nfrom adh.model.database import Database as db\nfrom adh.model.models import Utilisateur\n\n\ndef get_groups(token):\n try:\n headers = {\"Authorization\": \"Bearer \" + token}\n r = requests.get(\n current_app.config[\"AUTH_SERVER_ADDRESS\"] + \"/api/me\",\n headers=headers,\n timeout=1\n )\n except requests.exceptions.ReadTimeout:\n return None\n\n if r.status_code != 200 or \"uid\" not in r.json():\n return None\n\n return r.json()\n\n\ndef token_info(access_token) -> dict:\n\n if current_app.config[\"TESTING\"]:\n return {\n \"uid\": \"TestingClient\",\n \"scope\": [\"profile\"],\n \"groups\": []\n }\n\n infos = get_groups(access_token)\n if not infos:\n return None\n return {\n \"uid\": infos[\"uid\"],\n \"scope\": [\"profile\"],\n \"groups\": infos[\"groups\"]\n }\n\n\ndef auth_simple_user(f):\n def wrapper(*args, user, token_info, **kwargs):\n if current_app.config[\"TESTING\"] \\\n or \"adh6_user\" in token_info[\"groups\"]:\n s = db.get_db().get_session()\n admin = Utilisateur.find_or_create(s, user)\n return f(admin, *args, **kwargs)\n return NoContent, 401\n return wrapper\n" }, { "alpha_fraction": 0.555189311504364, "alphanum_fraction": 0.5878972411155701, "avg_line_length": 25.469799041748047, "blob_id": "c72e032af362ee3b634c0824f4ebba9b04fd4ea3", "content_id": "da284908cf98babec20d0156c408041c8b2386e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11832, "license_type": "no_license", "max_line_length": 74, "num_lines": 447, "path": "/test/test_user.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import json\nimport pytest\nfrom adh.model.database import Database as db\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom test.resource import base_url, TEST_HEADERS\nfrom adh.model.models import Adherent, Chambre, Vlan, Modification\nfrom dateutil import parser\nfrom adh.controller.user import ntlm_hash\n\n\[email protected]\ndef sample_vlan():\n yield Vlan(\n numero=42,\n adresses=\"192.168.42.1\",\n adressesv6=\"fe80::1\",\n )\n\n\[email protected]\ndef sample_room(sample_vlan):\n yield Chambre(\n numero=1234,\n description='chambre 1',\n vlan=sample_vlan,\n )\n\n\[email protected]\ndef sample_room2(sample_vlan):\n yield Chambre(\n numero=1111,\n description='chambre 2',\n vlan=sample_vlan,\n )\n\n\[email protected]\ndef sample_member(sample_room):\n yield Adherent(\n nom='Dubois',\n prenom='Jean-Louis',\n mail='[email protected]',\n login='dubois_j',\n password='a',\n chambre=sample_room,\n )\n\n\[email protected]\ndef sample_member2(sample_room2):\n yield Adherent(\n nom='Reignier',\n prenom='Edouard',\n mail='[email protected]',\n login='reignier',\n commentaires='Desauthent pour routeur',\n password='a',\n chambre=sample_room2,\n )\n\n\[email protected]\ndef sample_member3(sample_room2):\n \"\"\" Membre sans chambre \"\"\"\n yield Adherent(\n nom='Robert',\n prenom='Dupond',\n mail='[email protected]',\n login='dupond_r',\n commentaires='a',\n password='a',\n )\n\n\ndef prep_db(session,\n sample_member, sample_member2, sample_member3,\n sample_room, sample_room2, sample_vlan):\n session.add_all([\n sample_room, sample_room2,\n sample_member, sample_member2, sample_member3])\n session.commit()\n\n\[email protected]\ndef api_client(sample_member, sample_member2, sample_member3,\n sample_room, sample_room2, sample_vlan):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(),\n sample_member,\n sample_member2,\n sample_member3,\n sample_room,\n sample_room2,\n sample_vlan)\n yield c\n\n\ndef assert_user_in_db(body):\n # Actually check that the object was inserted\n s = db.get_db().get_session()\n q = s.query(Adherent)\n q = q.filter(Adherent.login == body[\"username\"])\n r = q.one()\n assert r.nom == body[\"lastName\"]\n assert r.prenom == body[\"firstName\"]\n assert r.mail == body[\"email\"]\n print(r.date_de_depart)\n assert r.date_de_depart == parser.parse(body[\"departureDate\"]).date()\n asso_time = parser.parse(body[\"associationMode\"]).replace(tzinfo=None)\n assert r.mode_association == asso_time\n assert r.chambre.numero == body[\"roomNumber\"]\n assert r.commentaires == body[\"comment\"]\n assert r.login == body[\"username\"]\n\n\ndef assert_one_modification_created(username):\n s = db.get_db().get_session()\n q = s.query(Modification)\n assert q.count() == 1\n\n\ndef test_user_to_dict(sample_member):\n dict_member = {'email': '[email protected]',\n 'firstName': 'Jean-Louis',\n 'lastName': 'Dubois',\n 'username': 'dubois_j',\n 'roomNumber': 1234}\n\n assert dict(sample_member) == dict_member\n\n\ndef test_user_filter_all(api_client):\n r = api_client.get(\n '{}/user/'.format(base_url),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 3\n\n\ndef test_user_filter_all_with_invalid_limit(api_client):\n r = api_client.get(\n '{}/user/?limit={}'.format(base_url, -1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_user_filter_all_with_limit(api_client):\n r = api_client.get(\n '{}/user/?limit={}'.format(base_url, 1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_by_room_number(api_client):\n r = api_client.get(\n '{}/user/?roomNumber={}'.format(base_url, 1234),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_by_non_existant_room_number(api_client):\n r = api_client.get(\n '{}/user/?roomNumber={}'.format(base_url, 6666),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 0\n\n\ndef test_user_filter_terms_first_name(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"Jean\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_terms_last_name(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"ubois\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_terms_email(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"bgdu78\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_terms_login(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"dubois_j\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_terms_comment(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"routeur\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_filter_terms_nonexistant(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"azerty\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 0\n\n\ndef test_user_filter_terms_test_upper_case(api_client):\n r = api_client.get(\n '{}/user/?terms={}'.format(base_url, \"DUBOIS_J\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n\n response = json.loads(r.data.decode('utf-8'))\n assert len(response) == 1\n\n\ndef test_user_get_existant(api_client):\n r = api_client.get(\n '{}/user/{}'.format(base_url, \"dubois_j\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n assert json.loads(r.data.decode('utf-8'))\n\n\ndef test_user_get_nonexistant(api_client):\n r = api_client.get(\n '{}/user/{}'.format(base_url, \"bond_jam\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_user_delete_existant(api_client):\n r = api_client.delete(\n '{}/user/{}'.format(base_url, \"dubois_j\"),\n headers=TEST_HEADERS\n )\n assert r.status_code == 204\n\n s = db.get_db().get_session()\n q = s.query(Adherent)\n q = q.filter(Adherent.login == \"dubois_j\")\n assert not s.query(q.exists()).scalar()\n\n\ndef test_user_delete_non_existant(api_client):\n r = api_client.delete(\n '{}/user/{}'.format(base_url, \"azerty\"),\n headers=TEST_HEADERS\n )\n assert r.status_code == 404\n\n\ndef test_user_put_user_create_invalid_email(api_client):\n body = {\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"roomNumber\": 1111,\n \"comment\": \"comment\",\n \"departureDate\": \"2000-01-23T04:56:07.000+00:00\",\n \"associationMode\": \"2000-01-23T04:56:07.000+00:00\",\n \"email\": \"INVALID_EMAIL\",\n \"username\": \"doe_john\"\n }\n res = api_client.put(\n '{}/user/{}'.format(base_url, body[\"username\"]),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert res.status_code == 400\n\n\ndef test_user_put_user_create_unknown_room(api_client):\n body = {\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"roomNumber\": 9999,\n \"comment\": \"comment\",\n \"departureDate\": \"2000-01-23T04:56:07.000+00:00\",\n \"associationMode\": \"2000-01-23T04:56:07.000+00:00\",\n \"email\": \"[email protected]\",\n \"username\": \"doe_john\"\n }\n res = api_client.put(\n '{}/user/{}'.format(base_url, body[\"username\"]),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert res.status_code == 400\n\n\ndef test_user_put_user_create(api_client):\n body = {\n \"firstName\": \"John\",\n \"lastName\": \"Doe\",\n \"roomNumber\": 1111,\n \"comment\": \"comment\",\n \"departureDate\": \"2000-01-23T04:56:07.000+00:00\",\n \"associationMode\": \"2000-01-23T04:56:07.000+00:00\",\n \"email\": \"[email protected]\",\n \"username\": \"doe_john\"\n }\n res = api_client.put(\n '{}/user/{}'.format(base_url, body[\"username\"]),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert res.status_code == 201\n\n assert_user_in_db(body)\n assert_one_modification_created(body[\"username\"])\n\n\ndef test_user_put_user_update(api_client):\n body = {\n \"firstName\": \"Jean-Louis\",\n \"lastName\": \"Dubois\",\n \"roomNumber\": 1111,\n \"comment\": \"comment\",\n \"departureDate\": \"2000-01-23T04:56:07.000+00:00\",\n \"associationMode\": \"2000-01-23T04:56:07.000+00:00\",\n \"email\": \"[email protected]\",\n \"username\": \"dubois_j\"\n }\n res = api_client.put(\n '{}/user/{}'.format(base_url, body[\"username\"]),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert res.status_code == 204\n\n assert_user_in_db(body)\n assert_one_modification_created(body[\"username\"])\n\n\ndef test_user_post_add_membership_not_found(api_client):\n body = {\n \"duration\": 365,\n \"start\": \"2000-01-23T04:56:07.000+00:00\"\n }\n result = api_client.post(\n '{}/user/{}/membership'.format(base_url, \"charlie\"),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert result.status_code == 404\n\n\ndef test_user_post_add_membership_ok(api_client):\n body = {\n \"duration\": 365,\n \"start\": \"2000-01-23T04:56:07.000+00:00\"\n }\n result = api_client.post(\n '{}/user/{}/membership'.format(base_url, \"dubois_j\"),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert result.status_code == 200\n\n\ndef test_user_change_password_ok(api_client):\n USERNAME = \"dubois_j\"\n body = {\n \"password\": \"on;X\\\\${QG55Bd\\\"#NyL#+k:_xEdJrEDT7\",\n }\n result = api_client.put(\n '{}/user/{}/password/'.format(base_url, USERNAME),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert result.status_code == 204\n\n s = db.get_db().get_session()\n q = s.query(Adherent)\n q = q.filter(Adherent.login == USERNAME)\n r = q.one()\n assert r.password == ntlm_hash(body[\"password\"])\n assert_one_modification_created(USERNAME)\n\n\ndef test_user_change_password_user_not_exist(api_client):\n body = {\n \"password\": \"on;X\\\\${QG55Bd\\\"#NyL#+k:_xEdJrEDT7\",\n }\n result = api_client.put(\n '{}/user/{}/password/'.format(base_url, \"sherlock\"),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert result.status_code == 404\n" }, { "alpha_fraction": 0.7500647306442261, "alphanum_fraction": 0.752913773059845, "avg_line_length": 44.42353057861328, "blob_id": "65be69248028e2556bb00a687dc2a550dba48caf", "content_id": "70f1a9554139a3b1e8d06fb377a2b89aebc5ba0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3912, "license_type": "no_license", "max_line_length": 119, "num_lines": 85, "path": "/README.md", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "# ADH6\n\n## 2018 School project - ADH frontend\nThis is an application that I originallyd developed to go with the Angular\nFrontend of this project. See [here](https://github.com/bonnetn/frontend_adh).\n\n## How to setup the project\n- Create a virtualenv ```virtualenv ./```\n- Enter the virtualenv ```source bin/activate```\n- Install the requirements ```pip3 install -r requirements.txt```\n- Fill the settings files (there are some examples provided) ``` vim settings.py ``` & ``` vim unit_test_settings.py```\n- Run the tests ```pytest```\n- ``` apt install uwsgi uwsgi-plugin-python3 ```\n- ``` cp adh6-api.ini /etc/uwsgi/sites-available ```\n- ``` ln -s /etc/uwsgi/sites-available /etc/uwsgi/sites-enabled ```\n- Edit the file you just copied to have the correct paths...\n- Launch the server ```systemctl restart uwsgi```\n\n## What the hell is this mess?\nCe projet consiste juste en l'implémentation des différents méthodes définies\ndans la spécification de l'API. \n\nSi vous êtes un PGM et que vous voulez juste lire le code, sachez juste que tout\nle code est dans le dossier *adh/*.\n\nPour que python se comporte en serveur Web on utilise *Flask*, et pour pas \navoir à faire de trucs compliqués on utilise *connexion* qui fait le binding \nentre *Flask* et les fonctions en python qui sont appelées presque magiquement.\n\nLa spécification de l'API est stockée dans swagger.yaml à la racine du projet,\nce fichier est automatiquement exporté de swaggerhub.\nhttps://app.swaggerhub.com/apis/insolentbacon/adherents/\n\n*En gros*, les fonctions importantes sont juste celles dans *adh/controller/*,\nqui sont appelées quand on fait des requêtes vers le serveur web.\n\nMaintenant, parce qu'on veut pas faire de requêtes directement dans la BDD SQL\n(pour des raisons de sécurité et de flemme), on utilise *SQLAlchemy*. C'est en\nfait une bibliothèque qui permet de manipuler des objets dans la BDD comme des\nobjets python (allez chercher ce qu'est un *ORM*).\n\nEn résumé on a:\n\n- **controller/**: Le plus important, c'est là où sont les fonctions qui sont\nappelées lorsque une requête HTTP est effectuée sur l'API.\n- **model/**: C'est là où on définit ce qu'il y a dans la base de données (c'est\nà dire les noms des tables, des colonnes, les contraintes qu'il y a sur les\nchamps [genre une IP doit être valide]). On importe ensuite les modèles dans les\ncontrollers pour manipuler la BDD\n- **settings/**: Euh, ben c'est là où y'a les settings de l'application...\nj'vais pas vous faire un dessin\n- **exceptions/**: c'est là où on met les erreurs custom qu'on a défini, c'est\npeu important\n- **test/**: c'est là où il y a des les tests. C'est super important. On teste\nchacune des lignes de code des fichiers .py (on vise un *code coverage* de 100%)\nLes cas normaux et extrêmes doivent être testés. C'est ce qui est executé\nlorsque on lance pytest.\n\nRemarque:\nQuand vous implémentez une fonction de l'API dans controller, ne faites qu'UNE\nsession SQLAlchemy, créée DANS votre fonction de controller. Ca evite les nested\ntransactions qui sont pas toujours supportées. (et c'est plus propre, moins\nerror-prone)\n\n*Extrait de la doc d'SQLALchemy:*\n> As a general rule, keep the lifecycle of the session separate and external \n> from functions and objects that access and/or manipulate database data. \n> This will greatly help with achieving a predictable and consistent \n> transactional scope.\n\n\nJ'ai défini quelques fonctions utiles dans les modèles des objets de la BDD.\n\n- dict(obj) permet de retourner un dict du format de l'api\n- Obj.from_dict(dict) permet de retourner un obj en utilisant un dict de l'API\n- Obj.find(session, value) permet de retourner l'objet qui est associé par l'API\n\n\n\n## Code coverage report generation:\n> pytest --cov=adh --cov-report html\n\n## On committing...\nYour commit must pass all the tests\nIf you add a piece of code you should write a test to test it\n" }, { "alpha_fraction": 0.7516170740127563, "alphanum_fraction": 0.7580853700637817, "avg_line_length": 27.629629135131836, "blob_id": "a4626c8856ff408e5615cc6237bbc6247827883e", "content_id": "510cab5c5bc0342bd4493d439e40b243a0210ee1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 773, "license_type": "no_license", "max_line_length": 57, "num_lines": 27, "path": "/wsgi.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport connexion\nimport logging\nfrom flask_cors import CORS\nfrom adh.model.database import Database\nfrom CONFIGURATION import PROD_DATABASE as DATABASE\nfrom connexion.resolver import RestyResolver\nfrom CONFIGURATION import API_CONF\n\nDatabase.init_db(DATABASE)\n\nlogging.basicConfig(level=logging.INFO)\napp = connexion.FlaskApp(__name__)\napp.app.config.update(API_CONF)\napp.add_api('swagger.yaml',\n resolver=RestyResolver('adh.controller'),\n strict_validation=True)\nCORS(app.app)\n# set the WSGI application callable to allow using uWSGI:\n# uwsgi --http :8080 -w app\napplication = app.app\n\n\[email protected]_appcontext\ndef shutdown_session(exception=None):\n if Database.get_db():\n Database.get_db().remove_session()\n" }, { "alpha_fraction": 0.5269121527671814, "alphanum_fraction": 0.5533144474029541, "avg_line_length": 25.032447814941406, "blob_id": "2e7ebe8d5ee0d283c95c110ab2e50b3772c4a061", "content_id": "3cb5a46d69ba63d6eb9f90b56384f6e3c628acae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8825, "license_type": "no_license", "max_line_length": 78, "num_lines": 339, "path": "/test/test_port.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import json\nimport pytest\nfrom adh.model.database import Database as db\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom adh.model.models import Port, Switch, Chambre\n\nfrom .resource import base_url, TEST_HEADERS\n\n\[email protected]\ndef sample_switch1():\n yield Switch(\n description=\"Switch sample 1\",\n ip=\"192.168.102.51\",\n communaute=\"GrosMotDePasse\",\n )\n\n\[email protected]\ndef sample_switch2():\n yield Switch(\n description=\"Switch sample 2\",\n ip=\"192.168.102.52\",\n communaute=\"GrosMotDePasse\",\n )\n\n\[email protected]\ndef sample_port1(sample_switch1):\n yield Port(\n rcom=1,\n numero=\"0/0/1\",\n oid=\"1.1.1\",\n switch=sample_switch1,\n chambre_id=0,\n\n )\n\n\[email protected]\ndef sample_port2(sample_switch2):\n yield Port(\n rcom=2,\n numero=\"0/0/2\",\n oid=\"1.1.2\",\n switch=sample_switch2,\n chambre_id=0,\n\n )\n\n\[email protected]\ndef sample_room():\n yield Chambre(\n numero=5110,\n description=\"Chambre de l'ambiance\",\n telephone=1234\n )\n\n\ndef prep_db(session,\n sample_switch1,\n sample_switch2,\n sample_port1,\n sample_port2,\n sample_room):\n session.add_all([\n sample_switch1,\n sample_switch2,\n sample_port1,\n sample_port2,\n sample_room\n ])\n session.commit()\n\n\[email protected]\ndef api_client(sample_port1, sample_port2, sample_switch1, sample_switch2,\n sample_room):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(),\n sample_switch1,\n sample_switch2,\n sample_port1,\n sample_port2,\n sample_room)\n yield c\n\n\ndef assert_port_in_db(body):\n s = db.get_db().get_session()\n q = s.query(Port)\n q = q.filter(Port.numero == body[\"portNumber\"])\n p = q.one()\n assert body[\"portNumber\"] == p.numero\n assert body[\"roomNumber\"] == p.chambre.numero\n assert body[\"switchID\"] == p.switch.id\n\n\ndef test_port_to_dict(sample_port1):\n assert dict(sample_port1) == {'id': None, 'portNumber': '0/0/1'}\n\n\ndef test_port_get_filter_all(api_client):\n r = api_client.get(\n \"{}/ports/\".format(base_url),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 2\n\n\ndef test_port_get_filter_all_with_invalid_limit(api_client):\n r = api_client.get(\n \"{}/ports/?limit={}\".format(base_url, -1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_port_get_filter_all_with_limit(api_client):\n r = api_client.get(\n \"{}/ports/?limit={}\".format(base_url, 1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 1\n\n\ndef test_port_get_filter_by_switchid(api_client, sample_switch2):\n r = api_client.get(\n \"{}/ports/?switchID={}\".format(base_url, sample_switch2.id),\n headers=TEST_HEADERS\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 1\n\n\ndef test_port_get_filter_by_roomnumber_with_results(api_client):\n r = api_client.get(\n \"{}/ports/?roomNumber={}\".format(base_url, 0),\n headers=TEST_HEADERS,\n )\n\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 2\n\n\ndef test_port_get_filter_by_roomnumber_without_result(api_client):\n r = api_client.get(\n \"{}/ports/?roomNumber={}\".format(base_url, 42),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert not switches\n assert len(switches) == 0\n\n\ndef test_port_get_filter_by_term_oid(api_client):\n r = api_client.get(\n \"{}/ports/?terms={}\".format(base_url, \"1.2\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 1\n\n\ndef test_port_get_filter_by_term_numero(api_client):\n r = api_client.get(\n \"{}/ports/?terms={}\".format(base_url, \"0/0/1\"),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switches = json.loads(r.data.decode())\n assert switches\n assert len(switches) == 1\n\n\ndef test_port_post_create_port_invalid_switch_id(api_client, sample_switch1):\n body = {\n \"roomNumber\": 5110,\n \"switchID\": 999,\n \"portNumber\": \"1/0/4\"\n }\n\n r = api_client.post(\n \"{}/switch/{}/port/\".format(base_url, sample_switch1.id),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_port_post_create_port(api_client, sample_switch1):\n body = {\n \"roomNumber\": 5110,\n \"switchID\": sample_switch1.id,\n \"portNumber\": \"1/0/4\"\n }\n\n r = api_client.post(\n \"{}/switch/{}/port/\".format(base_url, sample_switch1.id),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n print(r.data)\n assert r.status_code == 200\n assert 'Location' in r.headers\n\n\ndef test_port_get_existant_port(api_client, sample_switch1, sample_port1):\n r = api_client.get(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n sample_port1.id),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n switch = json.loads(r.data.decode())\n assert switch\n\n\ndef test_port_get_non_existant_port(api_client, sample_switch1, sample_port1):\n r = api_client.get(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n 4242),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_port_put_update_port_invalid_switch(api_client,\n sample_switch1,\n sample_port1):\n\n portNumber = \"1/2/3\"\n body = {\n \"roomNumber\": 5110,\n \"switchID\": 999,\n \"portNumber\": portNumber\n }\n\n r = api_client.put(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n sample_port1.id),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_port_put_update_port(api_client, sample_switch1, sample_port1):\n\n portNumber = \"1/2/3\"\n body = {\n \"roomNumber\": 5110,\n \"switchID\": sample_switch1.id,\n \"portNumber\": portNumber\n }\n\n assert sample_port1.numero != portNumber\n r = api_client.put(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n sample_port1.id),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n assert sample_port1.numero == portNumber\n assert_port_in_db(body)\n\n\ndef test_port_put_update_non_existant_port(api_client,\n sample_switch1):\n\n portNumber = \"1/2/3\"\n body = {\n \"roomNumber\": 5110,\n \"switchID\": sample_switch1.id,\n \"portNumber\": portNumber\n }\n\n r = api_client.put(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n 4242),\n data=json.dumps(body),\n content_type='application/json',\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_port_put_delete_port(api_client, sample_switch1, sample_port1):\n\n r = api_client.delete(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n sample_port1.id),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 204\n\n s = db.get_db().get_session()\n q = s.query(Port)\n q = q.filter(Port.id == sample_port1.id)\n assert not s.query(q.exists()).scalar()\n\n\ndef test_port_put_delete_non_existant_port(api_client,\n sample_switch1):\n\n r = api_client.delete(\n \"{}/switch/{}/port/{}\".format(base_url,\n sample_switch1.id,\n 4242),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n" }, { "alpha_fraction": 0.3961178958415985, "alphanum_fraction": 0.4787922501564026, "avg_line_length": 27.97916603088379, "blob_id": "d4c8ce45b1938262509b91abad6e822932b1e517", "content_id": "1810e223f1c145bdefe974d0b14c7e8a630cf7b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1391, "license_type": "no_license", "max_line_length": 53, "num_lines": 48, "path": "/test/resource.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "base_url = \"api\"\ndevice_cazal = {\n 'mac': 'FF:FF:FF:FF:FF:FF',\n 'ipAddress': '127.0.0.1',\n 'ipv6Address': 'fe80::0',\n 'connectionType': 'wired',\n 'username': 'cazal_k'\n}\n\ndevice_bonnet = {\n 'mac': '00:00:00:00:00:00',\n 'ipAddress': '127.0.0.2',\n 'ipv6Address': 'fe80::1',\n 'connectionType': 'wireless',\n 'username': 'bonnet_n'\n}\n\nINVALID_IPv6 = [\n \"\", # Empty string\n \"randomString\", # Some random data\n \"::::\", # Only delimiters\n \"2001:660:3203:i08::a79\", # wrong character\n 42, # Wrong type\n]\n\nINVALID_IP = [\n \"\", # Empty string\n \"randomString\", # Some random data\n \"192.168\", # Unfinished string\n \"....\", # Only delimiters\n \"200.256.200.200\", # Number > 255\n \"-1.200.200.200\", # Number < 0\n \"192.168.0.0/24\", # Address with mask\n \"192.168.0.0-10\", # Address range\n 42, # Wrong type\n]\n\nINVALID_MAC = [\n \"\", # Empty string\n \"AA:AA:AA:\", # Unfinished MAC address\n \"AA:AA:AA:AA:AA:AA:BB\", # MAC address too long\n \"randomString\", # Random data\n \":::::\", # Only delimiters\n \"12:34:56:78:9A:BG\", # Non hexa byte (BG)\n 42, # Wrong type\n]\n\nTEST_HEADERS = {\"Authorization\": \"Bearer TEST_TOKEN\"}\n" }, { "alpha_fraction": 0.590114951133728, "alphanum_fraction": 0.6003117561340332, "avg_line_length": 28.27186393737793, "blob_id": "b535bbc1dbf62a1c46983548025ebd663c2e302d", "content_id": "afa6549498592e4e7606390997453027e31bd4c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15397, "license_type": "no_license", "max_line_length": 78, "num_lines": 526, "path": "/adh/model/models.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "# coding: utf-8\nfrom sqlalchemy import Column, Date, DateTime, Integer, \\\n Numeric, String, Text, text, ForeignKey\nfrom sqlalchemy.orm import relationship, validates\nfrom sqlalchemy.orm.exc import NoResultFound\nfrom adh.util import checks\nfrom adh.model.database import Base\nfrom adh.exceptions import InvalidIPv4, InvalidIPv6, InvalidEmail, InvalidMac\nfrom adh.exceptions import UserNotFound, RoomNotFound, SwitchNotFound\nfrom adh.exceptions import VlanNotFound, PortNotFound\nfrom adh.util.date import string_to_date\nimport datetime\nfrom sqlalchemy import inspect\n\n\ndef _get_model_dict(model):\n \"\"\"\n Converts a SQLAlchemy row to a dictionnary of Column:Value\n \"\"\"\n return dict((column.name, getattr(model, column.name))\n for column in model.__table__.columns)\n\n\nclass ModificationTracker():\n \"\"\"\n Define a class on which you can record modification of this instance\n \"\"\"\n __abstract__ = True\n\n def _end_modif_tracking(self):\n \"\"\"\n Call this function when you want to stop recording modifications\n This should not be called by the user.\n\n If start_modif was not called before, it will consider that the object\n was created from scratch.\n \"\"\"\n self._old_data = getattr(self, \"_old_data\", {})\n self._new_data = _get_model_dict(self)\n if inspect(self).deleted:\n self._new_data = {}\n\n def start_modif_tracking(self):\n \"\"\"\n Call this function when you want to start recording modifications\n \"\"\"\n self._old_data = _get_model_dict(self)\n self._new_data = None\n\n\nclass RubyHashModificationTracker(ModificationTracker):\n \"\"\"\n Define a class on which you can record modification and get the result as\n ruby/hash modification (like ADH5 does)\n \"\"\"\n\n def get_ruby_modif(self):\n self._end_modif_tracking()\n\n base_str = '{} !ruby/hash:ActiveSupport::HashWithIndifferentAccess\\n'\n txt = [base_str.format(self._ruby_hash_prefix)]\n for key in sorted(set().union(\n self._new_data.keys(),\n self._old_data.keys()\n )):\n old = self._old_data.get(key)\n new = self._new_data.get(key)\n\n old = old if old is not None else \"\"\n new = new if new is not None else \"\"\n\n if old != new:\n txt += [\"{}:\\n- {}\\n- {}\\n\".format(key, old, new)]\n\n return \"\".join(txt)\n\n\nclass Vlan(Base):\n __tablename__ = 'vlans'\n\n id = Column(Integer, primary_key=True)\n numero = Column(Integer)\n adresses = Column(String(255))\n adressesv6 = Column(String(255))\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n @staticmethod\n def find(session, num):\n \"\"\" [API] Get the specified Vlan from the database \"\"\"\n try:\n q = session.query(Vlan)\n q = q.filter(Vlan.numero == num)\n return q.one()\n except NoResultFound:\n raise VlanNotFound\n\n @validates('adresses')\n def valid_ipv4(self, key, addr):\n if not checks.isIPv4Network(addr):\n raise InvalidIPv4()\n return addr\n\n @validates('adressesv6')\n def valid_ipv6(self, key, addr):\n if not checks.isIPv6Network(addr):\n raise InvalidIPv4()\n return addr\n\n\nclass Chambre(Base):\n __tablename__ = 'chambres'\n\n id = Column(Integer, primary_key=True)\n numero = Column(Integer)\n description = Column(String(255))\n telephone = Column(String(255))\n vlan_old = Column(Integer)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n dernier_adherent = Column(Integer)\n vlan_id = Column(Integer, ForeignKey(Vlan.id))\n vlan = relationship(Vlan)\n\n @staticmethod\n def from_dict(session, d):\n return Chambre(\n numero=d.get(\"roomNumber\"),\n description=d.get(\"description\"),\n telephone=d.get(\"phone\"),\n vlan=Vlan.find(session, d.get(\"vlan\")),\n )\n\n @staticmethod\n def find(session, roomNumber):\n if not roomNumber:\n return None\n q = session.query(Chambre)\n q = q.filter(Chambre.numero == roomNumber)\n try:\n return q.one()\n except NoResultFound:\n raise RoomNotFound()\n\n @validates('numero')\n def not_empty(self, key, s):\n if not s:\n raise ValueError(\"String must not be empty\")\n return s\n\n def __iter__(self):\n yield \"roomNumber\", self.numero\n if self.description:\n yield \"description\", self.description\n if self.telephone:\n yield \"phone\", self.telephone\n if self.vlan:\n yield \"vlan\", self.vlan.numero\n\n\nclass Adherent(Base, RubyHashModificationTracker):\n __tablename__ = 'adherents'\n _ruby_hash_prefix = '---'\n\n id = Column(Integer, primary_key=True)\n nom = Column(String(255))\n prenom = Column(String(255))\n mail = Column(String(255))\n login = Column(String(255))\n password = Column(String(255))\n chambre_id = Column(Integer, ForeignKey(Chambre.id))\n chambre = relationship(Chambre)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n date_de_depart = Column(Date)\n commentaires = Column(String(255))\n mode_association = Column(\n DateTime,\n server_default=text(\"'2011-04-30 17:50:17'\")\n )\n access_token = Column(String(255))\n\n @staticmethod\n def find(session, username):\n if not username:\n return None\n q = session.query(Adherent)\n q = q.filter(Adherent.login == username)\n try:\n return q.one()\n except NoResultFound:\n raise UserNotFound()\n\n @staticmethod\n def from_dict(session, d):\n return Adherent(\n mail=d.get(\"email\"),\n prenom=d.get(\"firstName\"),\n nom=d.get(\"lastName\"),\n login=d.get(\"username\"),\n date_de_depart=string_to_date(d.get('departureDate')),\n commentaires=d.get('comment'),\n mode_association=string_to_date(d.get('associationMode')),\n chambre=Chambre.find(session, d.get(\"roomNumber\")),\n )\n\n @validates('nom', 'prenom', 'login', 'password')\n def not_empty(self, key, s):\n if not s:\n raise ValueError(\"String must not be empty\")\n return s\n\n @validates('mail')\n def valid_email(self, key, mail):\n if not mail or not checks.isEmail(mail):\n raise InvalidEmail()\n return mail\n\n def __iter__(self):\n yield \"email\", self.mail\n yield \"firstName\", self.prenom\n yield \"lastName\", self.nom\n yield \"username\", self.login\n if self.commentaires:\n yield \"comment\", self.commentaires\n\n if self.chambre:\n yield \"roomNumber\", self.chambre.numero\n\n if self.date_de_depart:\n yield \"departureDate\", self.date_de_depart\n\n if self.mode_association:\n yield \"associationMode\", self.mode_association\n\n\nclass Caisse(Base):\n __tablename__ = 'caisse'\n\n id = Column(Integer, primary_key=True)\n fond = Column(Numeric(10, 2))\n coffre = Column(Numeric(10, 2))\n date = Column(DateTime)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n\nclass Compte(Base):\n __tablename__ = 'comptes'\n\n id = Column(Integer, primary_key=True)\n intitule = Column(String(255))\n description = Column(Text)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n\nclass Ecriture(Base):\n __tablename__ = 'ecritures'\n\n id = Column(Integer, primary_key=True)\n intitule = Column(String(255))\n montant = Column(Numeric(10, 2))\n moyen = Column(String(255))\n date = Column(DateTime)\n compte_id = Column(Integer, index=True)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n utilisateur_id = Column(Integer, index=True)\n adherent_id = Column(Integer, index=True)\n\n\nclass Inscription(Base):\n __tablename__ = 'inscriptions'\n\n id = Column(Integer, primary_key=True)\n nom = Column(String(255))\n prenom = Column(String(255))\n email = Column(String(255))\n login = Column(String(255))\n password = Column(String(255))\n chambre_id = Column(Integer, index=True)\n duree_cotisation = Column(Integer)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n\nclass MacVendor(Base):\n __tablename__ = 'mac_vendors'\n\n id = Column(Integer, primary_key=True)\n prefix = Column(String(255))\n nom = Column(String(255))\n created_at = Column(DateTime, nullable=False)\n updated_at = Column(DateTime, nullable=False)\n\n\nclass Modification(Base):\n __tablename__ = 'modifications'\n\n id = Column(Integer, primary_key=True)\n adherent_id = Column(Integer, index=True)\n action = Column(Text)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n utilisateur_id = Column(Integer, index=True)\n\n @staticmethod\n def add_and_commit(session, adherent, action, admin):\n now = datetime.datetime.now()\n m = Modification(\n adherent_id=adherent.id,\n action=action,\n created_at=now,\n updated_at=now,\n utilisateur_id=admin.id\n )\n session.add(m)\n session.commit()\n\n\nclass Ordinateur(Base, RubyHashModificationTracker):\n __tablename__ = 'ordinateurs'\n _ruby_hash_prefix = 'ordinateurs:'\n\n id = Column(Integer, primary_key=True)\n mac = Column(String(255))\n ip = Column(String(255))\n dns = Column(String(255))\n adherent_id = Column(Integer, ForeignKey(Adherent.id), nullable=False)\n adherent = relationship(Adherent)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n last_seen = Column(DateTime)\n ipv6 = Column(String(255))\n\n @validates('mac')\n def mac_valid(self, key, mac):\n if not mac or not checks.isMac(mac):\n raise InvalidMac()\n return mac\n\n @validates('ip')\n def valid_ip(self, key, addr):\n if not addr or not checks.isIPv4(addr):\n raise InvalidIPv4()\n return addr\n\n @validates('ipv6')\n def valid_ipv6(self, key, addr):\n if not addr or not checks.isIPv6(addr):\n raise InvalidIPv6()\n return addr\n\n def __iter__(self):\n yield \"mac\", self.mac\n yield \"connectionType\", \"wired\"\n if self.ip:\n yield \"ipAddress\", self.ip\n if self.ipv6:\n yield \"ipv6Address\", self.ipv6\n if self.adherent:\n yield \"username\", self.adherent.login\n\n\nclass Portable(Base, RubyHashModificationTracker):\n __tablename__ = 'portables'\n _ruby_hash_prefix = 'portables:'\n\n id = Column(Integer, primary_key=True)\n mac = Column(String(255))\n adherent_id = Column(Integer, ForeignKey(Adherent.id), nullable=False)\n adherent = relationship(Adherent)\n last_seen = Column(DateTime)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n @validates('mac')\n def mac_valid(self, key, mac):\n if not mac or not checks.isMac(mac):\n raise InvalidMac()\n return mac\n\n def __iter__(self):\n yield \"mac\", self.mac\n yield \"connectionType\", \"wireless\"\n if self.adherent:\n yield \"username\", self.adherent.login\n\n\nclass Switch(Base):\n __tablename__ = 'switches'\n\n id = Column(Integer, primary_key=True)\n description = Column(String(255))\n ip = Column(String(255))\n communaute = Column(String(255))\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n @staticmethod\n def find(session, switchID):\n \"\"\" [API] Get the specified switch from the database \"\"\"\n try:\n q = session.query(Switch)\n q = q.filter(Switch.id == switchID)\n return q.one()\n except NoResultFound:\n raise SwitchNotFound\n\n @staticmethod\n def from_dict(session, body):\n \"\"\" Transforms a dictionary to Switch object \"\"\"\n return Switch(\n description=body.get('description'),\n ip=body.get('ip'),\n communaute=body.get('community'),\n )\n\n @validates('ip')\n def valid_ip(self, key, addr):\n if not addr or not checks.isIPv4(addr):\n raise InvalidIPv4()\n return addr\n\n def __iter__(self):\n yield \"id\", self.id\n yield \"ip\", self.ip\n yield \"community\", self.communaute\n if self.description:\n yield \"description\", self.description\n\n\nclass Port(Base):\n __tablename__ = 'ports'\n\n id = Column(Integer, primary_key=True)\n rcom = Column(Integer)\n numero = Column(String(255))\n oid = Column(String(255))\n switch_id = Column(Integer, ForeignKey(Switch.id), nullable=False)\n switch = relationship(Switch)\n chambre_id = Column(Integer, ForeignKey(Chambre.id), nullable=False)\n chambre = relationship(Chambre)\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n\n @staticmethod\n def find(session, portID):\n \"\"\" [API] Get the specified Port from the database \"\"\"\n try:\n q = session.query(Port)\n q = q.filter(Port.id == portID)\n return q.one()\n except NoResultFound:\n raise PortNotFound\n\n @staticmethod\n def from_dict(session, d):\n \"\"\" Creates a Port object from a request \"\"\"\n return Port(\n chambre=Chambre.find(session, d.get(\"roomNumber\")),\n switch=Switch.find(session, d.get(\"switchID\")),\n numero=d.get(\"portNumber\"),\n )\n\n @validates('numero')\n def not_empty(self, key, s):\n if not s:\n raise ValueError(\"String must not be empty\")\n return s\n\n def __iter__(self):\n yield \"id\", self.id\n yield \"portNumber\", self.numero\n if self.chambre:\n yield \"roomNumber\", self.chambre.numero\n if self.switch_id:\n yield \"switchID\", self.switch_id\n\n\nclass Utilisateur(Base):\n __tablename__ = 'utilisateurs'\n\n id = Column(Integer, primary_key=True)\n nom = Column(String(255))\n access = Column(Integer)\n email = Column(String(255))\n login = Column(String(255))\n password_hash = Column(String(255))\n created_at = Column(DateTime)\n updated_at = Column(DateTime)\n access_token = Column(String(255))\n\n @staticmethod\n def find_or_create(session, username):\n \"\"\" Get the specified admin, if it does not exist, create it. \"\"\"\n try:\n q = session.query(Utilisateur)\n q = q.filter(Utilisateur.login == username)\n return q.one()\n except NoResultFound:\n now = datetime.datetime.now()\n new_admin = Utilisateur(\n nom=\"-\",\n access=42,\n email=\"-\",\n login=username,\n password_hash=\"-\",\n created_at=now,\n updated_at=now,\n access_token=\"-\"\n )\n session.add(new_admin)\n session.flush()\n return new_admin\n\n\nclass Adhesion(Base):\n __tablename__ = 'adhesion'\n\n id = Column(Integer, primary_key=True)\n adherent_id = Column(Integer, ForeignKey(Adherent.id), nullable=False)\n adherent = relationship(Adherent)\n depart = Column(DateTime, nullable=False)\n fin = Column(DateTime, nullable=False)\n" }, { "alpha_fraction": 0.684684693813324, "alphanum_fraction": 0.7327327132225037, "avg_line_length": 19.75, "blob_id": "e3177d8bbc72007c87e6158d326a1bdec45886ec", "content_id": "367907cb4cdfeffe6f76c99629fa97f20fb39770", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 333, "license_type": "no_license", "max_line_length": 88, "num_lines": 16, "path": "/adh6-api.ini", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "[uwsgi]\nplugin=python3\nmodule = wsgi\nchdir = /opt/api-server/\nmaster = true\nprocesses = 4\nenable-threads = true\nuid=adh6\ngid=adh6\nssl-socket = :9443,/opt/api-server/certs/adh6-api.crt,/opt/api-server/certs/adh6-api.key\nvacuum = true\ndie-on-term = true\nharakiri = 30\npost-buffering = 4096\nmount = /=wsgi.py\nmanage-script-name = true\n\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 13, "blob_id": "8b6f0bc0dc29c5211aca2080b14f816065927246", "content_id": "ba96a98ba1c65ec49b3d4956703ecbea06a5ba51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 56, "license_type": "no_license", "max_line_length": 32, "num_lines": 4, "path": "/test/context.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from wsgi import app\n\n\napp.app.config[\"TESTING\"] = True\n" }, { "alpha_fraction": 0.5820148587226868, "alphanum_fraction": 0.6049402356147766, "avg_line_length": 24.595041275024414, "blob_id": "690dd20c57c71297b39a1bc1c287dabb4e3f3adb", "content_id": "e427e683a0601a3dac90515712d8f0c984a48a19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6194, "license_type": "no_license", "max_line_length": 63, "num_lines": 242, "path": "/test/test_switch.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "import pytest\nimport json\nfrom .resource import base_url\nfrom CONFIGURATION import TEST_DATABASE as db_settings\nfrom adh.model.models import Switch\nfrom adh.model.database import Database as db\nfrom .resource import INVALID_IP, TEST_HEADERS\n\n\[email protected]\ndef sample_switch():\n yield Switch(\n id=1,\n description='Switch',\n ip='192.168.102.2',\n communaute='communaute',\n )\n\n\ndef prep_db(session, sample_switch):\n \"\"\" Insert the test objects in the db \"\"\"\n session.add(sample_switch)\n session.commit() # TODO: remove?\n\n\[email protected]\ndef api_client(sample_switch):\n from .context import app\n with app.app.test_client() as c:\n db.init_db(db_settings, testing=True)\n prep_db(db.get_db().get_session(), sample_switch)\n yield c\n\n\ndef assert_switch_in_db(body):\n s = db.get_db().get_session()\n q = s.query(Switch)\n q = q.filter(Switch.ip == body[\"ip\"])\n sw = q.one()\n assert sw.ip == body[\"ip\"]\n assert sw.communaute == body[\"community\"]\n assert sw.description == body[\"description\"]\n\n\ndef test_switch_to_dict(sample_switch):\n dict_sw = {\n 'id': 1,\n 'ip': '192.168.102.2',\n 'community': 'communaute',\n 'description': 'Switch'\n }\n\n assert dict(sample_switch) == dict_sw\n\n\[email protected](\"test_ip\", INVALID_IP)\ndef test_switch_post_invalid_ip(api_client, test_ip):\n sample_switch = {\n \"description\": \"Test Switch\",\n \"ip\": test_ip,\n \"community\": \"myGreatCommunity\"\n }\n r = api_client.post(\n \"{}/switch/\".format(base_url),\n data=json.dumps(sample_switch),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert r.status_code == 400\n\n\ndef test_switch_post_valid(api_client):\n\n sample_switch = {\n \"description\": \"Test Switch\",\n \"ip\": \"192.168.103.128\",\n \"community\": \"myGreatCommunity\"\n }\n\n # Insert data to the database\n r = api_client.post(\n \"{}/switch/\".format(base_url),\n data=json.dumps(sample_switch),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert r.status_code == 201\n assert 'Location' in r.headers\n assert r.headers['Location'] == 'http://localhost/switch/2'\n assert_switch_in_db(sample_switch)\n\n\ndef test_switch_get_all_invalid_limit(api_client):\n r = api_client.get(\n \"{}/switch/?limit={}\".format(base_url, -1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 400\n\n\ndef test_switch_get_all_limit(api_client):\n r = api_client.get(\n \"{}/switch/?limit={}\".format(base_url, 0),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n t = json.loads(r.data.decode('utf-8'))\n assert len(t) == 0\n\n\ndef test_switch_get_all(api_client):\n r = api_client.get(\n \"{}/switch/\".format(base_url),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n t = json.loads(r.data.decode('utf-8'))\n assert t\n assert len(t) == 1\n\n\ndef test_switch_get_existant_switch(api_client):\n r = api_client.get(\n \"{}/switch/{}\".format(base_url, 1),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n assert json.loads(r.data.decode('utf-8'))\n\n\ndef test_switch_get_non_existant_switch(api_client):\n r = api_client.get(\n \"{}/switch/{}\".format(base_url, 100000),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 404\n\n\ndef test_switch_filter_by_term_ip(api_client):\n terms = \"102.2\"\n r = api_client.get(\n \"{}/switch/?terms={}\".format(base_url, terms),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n result = json.loads(r.data.decode('utf-8'))\n assert result\n assert len(result) == 1\n\n\ndef test_switch_filter_by_term_desc(api_client):\n terms = \"Switch\"\n r = api_client.get(\n \"{}/switch/?terms={}\".format(base_url, terms),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n result = json.loads(r.data.decode('utf-8'))\n assert result\n assert len(result) == 1\n\n\ndef test_switch_filter_by_term_nonexistant(api_client):\n terms = \"HEYO\"\n r = api_client.get(\n \"{}/switch/?terms={}\".format(base_url, terms),\n headers=TEST_HEADERS,\n )\n assert r.status_code == 200\n result = json.loads(r.data.decode('utf-8'))\n assert not result\n\n\[email protected](\"test_ip\", INVALID_IP)\ndef test_switch_update_switch_invalid_ip(api_client, test_ip):\n sample_switch = {\n \"description\": \"Modified switch\",\n \"ip\": test_ip,\n \"community\": \"communityModified\"\n }\n\n r = api_client.put(\n \"{}/switch/{}\".format(base_url, 1),\n data=json.dumps(sample_switch),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert r.status_code == 400\n\n\ndef test_switch_update_existant_switch(api_client):\n sample_switch = {\n \"description\": \"Modified switch\",\n \"ip\": \"192.168.103.132\",\n \"community\": \"communityModified\"\n }\n\n r = api_client.put(\n \"{}/switch/{}\".format(base_url, 1),\n data=json.dumps(sample_switch),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert r.status_code == 204\n assert_switch_in_db(sample_switch)\n\n\ndef test_switch_update_non_existant_switch(api_client):\n sample_switch = {\n \"description\": \"Modified switch\",\n \"ip\": \"192.168.103.132\",\n \"community\": \"communityModified\"\n }\n\n r = api_client.put(\n \"{}/switch/{}\".format(base_url, 100000),\n data=json.dumps(sample_switch),\n content_type='application/json',\n headers=TEST_HEADERS\n )\n assert r.status_code == 404\n\n\ndef test_switch_delete_existant_switch(api_client):\n r = api_client.delete(\n \"{}/switch/{}\".format(base_url, 1),\n headers=TEST_HEADERS\n )\n assert r.status_code == 204\n s = db.get_db().get_session()\n q = s.query(Switch)\n q = q.filter(Switch.id == 1)\n\n assert not s.query(q.exists()).scalar()\n\n\ndef test_switch_delete_non_existant_switch(api_client):\n r = api_client.delete(\n \"{}/switch/{}\".format(base_url, 10000),\n headers=TEST_HEADERS\n )\n assert r.status_code == 404\n" }, { "alpha_fraction": 0.6160754561424255, "alphanum_fraction": 0.6304445266723633, "avg_line_length": 24.597702026367188, "blob_id": "b3d3add89b1b7cfd90bbfd7f9bbed61971b5316f", "content_id": "9cfc7a49b666306b98f7800c880961d169251af4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2227, "license_type": "no_license", "max_line_length": 59, "num_lines": 87, "path": "/adh/controller/room.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from connexion import NoContent\nfrom sqlalchemy import or_\nfrom adh.exceptions import RoomNotFound, VlanNotFound\nfrom adh.model.database import Database as db\nfrom adh.model.models import Chambre\nfrom adh.auth import auth_simple_user\n\n\ndef roomExists(session, roomNumber):\n \"\"\" Returns true if the room exists in the database \"\"\"\n try:\n Chambre.find(session, roomNumber)\n except RoomNotFound:\n return False\n return True\n\n\n@auth_simple_user\ndef filterRoom(admin, limit=100, offset=0, terms=None):\n \"\"\" [API] Filter the list of the rooms \"\"\"\n if limit < 0:\n return \"Limit must be a positive integer\", 400\n s = db.get_db().get_session()\n q = s.query(Chambre)\n if terms:\n q = q.filter(or_(\n Chambre.telephone.contains(terms),\n Chambre.description.contains(terms),\n ))\n count = q.count()\n q = q.order_by(Chambre.id.asc())\n q = q.offset(offset)\n q = q.limit(limit)\n result = q.all()\n result = map(dict, result)\n result = list(result)\n headers = {\n 'access-control-expose-headers': 'X-Total-Count',\n 'X-Total-Count': str(count)\n }\n return result, 200, headers\n\n\n@auth_simple_user\ndef putRoom(admin, roomNumber, body):\n \"\"\" [API] Update/create a room in the database \"\"\"\n s = db.get_db().get_session()\n\n try:\n new_room = Chambre.from_dict(s, body)\n except VlanNotFound:\n return \"Vlan not found\", 400\n room_exists = roomExists(s, roomNumber)\n\n if room_exists:\n new_room.id = Chambre.find(s, roomNumber).id\n\n s.merge(new_room)\n s.commit()\n\n if room_exists:\n return NoContent, 204\n else:\n return NoContent, 201\n\n\n@auth_simple_user\ndef getRoom(admin, roomNumber):\n \"\"\" [API] Get the room specified \"\"\"\n s = db.get_db().get_session()\n try:\n return dict(Chambre.find(s, roomNumber)), 200\n except RoomNotFound:\n return NoContent, 404\n\n\n@auth_simple_user\ndef deleteRoom(admin, roomNumber):\n \"\"\" [API] Delete room from the database \"\"\"\n s = db.get_db().get_session()\n try:\n s.delete(Chambre.find(s, roomNumber))\n except RoomNotFound:\n return NoContent, 404\n\n s.commit()\n return NoContent, 204\n" }, { "alpha_fraction": 0.6455026268959045, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 22.625, "blob_id": "0489d9f07e688d07a51d59f7eb68fc7f3c9eced2", "content_id": "7705d797a360304d9ca5de9b38e7d0274cb364d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 189, "license_type": "no_license", "max_line_length": 74, "num_lines": 8, "path": "/adh/util/date.py", "repo_name": "bonnetn/backend_adh", "src_encoding": "UTF-8", "text": "from dateutil import parser\n\n\ndef string_to_date(s):\n \"\"\" Converts a ISO 8601 date formatted string to a python datetime \"\"\"\n if not s:\n return None\n return parser.parse(s)\n" } ]
27
seeyoupass/lagou
https://github.com/seeyoupass/lagou
177a1a5a59fe050dd0ca7cea1436dc7a4cbb8cd0
7ed00e4af28dcf08268df6ff76bcfd56e44b1aed
f56f92fafe91488ef4fbf61ab9d09dce18ccfdf0
refs/heads/master
2020-04-11T11:47:14.249169
2019-01-25T07:45:09
2019-01-25T07:45:09
161,759,361
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6760450005531311, "alphanum_fraction": 0.6768488883972168, "avg_line_length": 24.91666603088379, "blob_id": "2374fc25958a2fbcde52c70670bf208ae3ee9c4f", "content_id": "e97c31793a32c3a854a05fde529d2adecd413628", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1244, "license_type": "no_license", "max_line_length": 52, "num_lines": 48, "path": "/git/zhihu/items.py", "repo_name": "seeyoupass/lagou", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define here the models for your scraped items\n#\n# See documentation in:\n# https://doc.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\n\n\nclass ZhihuItem(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n question_title=scrapy.Field()\n tags=scrapy.Field()\n question_detial=scrapy.Field()\n answer=scrapy.Field()\n answer_user=scrapy.Field()\n text=scrapy.Field()\n time=scrapy.Field()\n awesome=scrapy.Field()\n comment_total=scrapy.Field()\n url=scrapy.Field()\n comment_name=scrapy.Field()\n comment_content=scrapy.Field()\n\nclass ZhihuTopicItem(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n question_title=scrapy.Field()\n tags=scrapy.Field()\n question_detial=scrapy.Field()\n answer=scrapy.Field()\n answer_user=scrapy.Field()\n text=scrapy.Field()\n time=scrapy.Field()\n awesome=scrapy.Field()\n comment_total=scrapy.Field()\n url=scrapy.Field()\n\n\nclass ZhihuqsItem(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n answer_user=scrapy.Field()\n text=scrapy.Field()\n awesome=scrapy.Field()\n comment_total=scrapy.Field()\n" }, { "alpha_fraction": 0.6106051206588745, "alphanum_fraction": 0.6308439373970032, "avg_line_length": 39.162601470947266, "blob_id": "3dfce1631723c612fba7c8de17302cb388215c8b", "content_id": "d091d8ee3ddc59d67439a799356632373c7ce19c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5513, "license_type": "no_license", "max_line_length": 435, "num_lines": 123, "path": "/lagou/req/llg.py", "repo_name": "seeyoupass/lagou", "src_encoding": "UTF-8", "text": "\nimport requests\nimport json,csv\nimport pandas as pd\nfrom bs4 import BeautifulSoup\nimport time,pymongo,pymysql,random\nfrom multiprocessing import Pool\n\n\nheaders={'X-Anit-Forge-Code':'0',\n 'X-Anit-Forge-Token':None,\n 'X-Requested-With':'XMLHttpRequest',\n\t\t\t'Cookie':'',#需自行添加\n\t\t\t'Host':'www.lagou.com',\n 'Referer': 'https://www.lagou.com/jobs/list_%E7%88%AC%E8%99%AB?labelWords=&fromSearch=true&suginput=',\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36',\n}\n#headers ={'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36'}\ndef get_start_url(page,keyword,city): #定义关键词和城市获取url\n params = {'city': city,\n 'needAddtionalResult': 'false'\n\n }\n formdata ={'first': 'true',\n 'pn': page,\n 'kd': keyword}\n #url = 'https://www.lagou.com/jobs/positionAjax.json?city=%E4%B8%8A%E6%B5%B7&needAddtionalResult=false'\n #urls = url+urlencode(formdata)\n try:\n html = requests.post('https://www.lagou.com/jobs/positionAjax.json',headers=headers,params=params,data=formdata)\n if html.status_code==200:\n #print(html.text)\n return html\n\n except requests.ConnectionError:\n return None\n\ndef parse_json(html):#解析获取的json并存为字典\n jd = json.loads(html.text)\n #print(jd['content']['positionResult']['result'])\n companyFullName=[]\n detial_url=[]\n workYear=[]\n edu=[]\n positionName=[]\n positionAdvantage=[]\n salary=[]\n industryField=[]\n companySize=[]\n companyLabelList=[]\n positionLables=[]\n district=[]\n skillLables=[]\n item={}\n for id in jd['content']['positionResult']['result']:\n url_id = id['positionId']\n # print(url_id)\n url = 'https://www.lagou.com/jobs/' + str(url_id) + '.html'\n detial_url.append(url)\n #print(detial_url)\n companyFullName_=id['companyFullName']\n companyFullName.append(companyFullName_)\n workYear_=id['workYear']\n workYear.append(workYear_)\n edu.append(id['education'])\n positionName.append(id['positionName'])\n positionAdvantage.append(id['positionAdvantage'])\n salary.append(id['salary'])\n industryField.append(id['industryField'])\n companySize.append(id['companySize'])\n companyLabelList.append(id['companyLabelList'])\n positionLables.append(id['positionLables'])\n district.append(id['district'])\n skillLables.append(id['skillLables'])\n #print(positionName,positionAdvantage,salary,industryField,companySize,companyLabelList, positionLables, district, skillLables,)\n item={'公司名称':companyFullName,'经验':workYear,'教育':edu,'职位':positionName,'诱惑':positionAdvantage,'工资':salary,'公司经验范围':industryField,'公司规模':companySize,'公司要求':companyLabelList,'职位要求':positionLables,'地址':district,'技能':skillLables,'详细网址':detial_url}\n print(item)\n #print(item.values())\n return item\ndef main(page,keyword,city):\n html=get_start_url(page,keyword,city)\n result=parse_json(html)\n save_file(result,page)\n\n\ndef save_file(result,page):#将数据存为csv格式\n df=pd.DataFrame(result)\n df.to_csv(str(page)+'lagou.csv',index=False,sep=',')\n # with open('lagous.csv', 'a',encoding='utf-8') as csv_file:\n # writer = csv.writer(csv_file)\n # for key, value in result.items():\n # writer.writerow([key, value])\n\n\nif __name__ =='__main__':\n #pass\n #main(2,'爬虫','上海')\n for i in range(1,6):\n time.sleep(90)\n main(i,'爬虫','上海')\n #\n # print(get_start_url(i))\n# import csv,json, pandas as pd\n# p=my ={'name': '海知智能开发部招聘爬虫开发实习生', 'company': '海知智能开发部招聘', 'salary': '3k-6k /上海 /经验应届毕业生 /本科及以上 /实习', 'temptation': '人工智能', 'qualifications': '\\n岗位描述:\\n1、负责维护与改进现有爬虫框架;\\\\n2、负责爬取业务相关网站;\\\\n3、负责提取与处理数据。\\n\\n岗位要求:\\n1、统计、计算机或相关学士学位,大四或研究生;\\n2、熟悉编程语言Java/Python;\\n3、熟悉关系型/非关系型数据库(Mysql, Postgresql, Mongo);\\n4、熟悉网络,熟悉Linux,较强技术文档阅读能力;\\n5、熟悉基本数据结构和算法,逻辑思维强;\\6、有一定沟通理解和表达能力,完成和其他人协作,认真负责。\\n', 'url': 'https://www.lagou.com/jobs/3827697.html'}\n# with open('dict.csv', 'a' ,encoding='utf-8') as csv_file:\n# writer = csv.writer(csv_file)\n# for key, value in my.items():\n# writer.writerow([key, value])\n#\n# with open('dict.txt', 'a' ,encoding='utf-8') as f:\n# f.write(json.dumps(my,ensure_ascii=False,)+'/n')\n# f.close()\n#\n# with open('lagou.csv','w',encoding='utf-8') as file:\n# title = 'name\\tcompany\\tsalary\\ttemptation\\tqualifications\\turl'\n# file.write(title)\n# items=str(p['name']) + '\\t' + str(p['company']) + '\\t' + str(p['salary']) + '\\t' + \\\n# str(p['temptation']) + '\\t' + str(p['qualifications']) + '\\t' + str(p['url']) + '\\n'\n# file.write(items)\n# file.close()\n#\n# #pd.DataFrame(my.keys(),my.values())\n# #print(my.keys(),my.values(),my.items())\n# print()\n" }, { "alpha_fraction": 0.613545835018158, "alphanum_fraction": 0.6533864736557007, "avg_line_length": 55.42499923706055, "blob_id": "aaebcc89509f78bc1810c100855daed1e7050d3a", "content_id": "9540de3e2384ed21d098b64551e3cfe31c86097a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2259, "license_type": "no_license", "max_line_length": 696, "num_lines": 40, "path": "/git/zhihu/spiders/zhihuqs.py", "repo_name": "seeyoupass/lagou", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nimport json\nfrom zhihu.items import ZhihuqsItem\n\nclass ZhihuqsSpider(scrapy.Spider):\n name = 'zhihuqs'\n allowed_domains = ['https://www.zhihu.com']\n start_urls = ['http://https://www.zhihu.com/']\n\n def __init__(self):\n self.headers = {\n # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n # 'Accept-Encoding': 'gzip, deflate, br',\n # 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n # 'Cache-Control': 'max-age=0',\n # 'Connection': 'keep-alive',\n 'Host': 'www.zhihu.com',\n 'Referer': 'https://www.zhihu.com/signup?next=%2F',\n # 'Upgrade-Insecure-Requests': '1',\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'\n }\n self.request_url = 'https://www.zhihu.com/api/v4/questions/28626263/answers?include=data%5B*%5D.is_normal%2Cadmin_closed_comment%2Creward_info%2Cis_collapsed%2Cannotation_action%2Cannotation_detail%2Ccollapse_reason%2Cis_sticky%2Ccollapsed_by%2Csuggest_edit%2Ccomment_count%2Ccan_comment%2Ccontent%2Ceditable_content%2Cvoteup_count%2Creshipment_settings%2Ccomment_permission%2Ccreated_time%2Cupdated_time%2Creview_info%2Crelevant_info%2Cquestion%2Cexcerpt%2Crelationship.is_authorized%2Cis_author%2Cvoting%2Cis_thanked%2Cis_nothelp%2Cis_labeled%3Bdata%5B*%5D.mark_infos%5B*%5D.url%3Bdata%5B*%5D.author.follower_count%2Cbadge%5B*%5D.topics&offset=&limit=3&sort_by=default&platform=desktop'\n\n def start_requests(self):\n yield scrapy.Request(url=self.request_url,headers=self.headers,callback=self.parse)\n\n def parse(self, response):\n item=ZhihuqsItem()\n jd = json.loads(response.text)\n for it in jd['data']:\n item['answer_user']= it['author']['name']\n item['comment_total'] = it['comment_count']\n item['text'] = it['content']\n item['awesome'] = it['voteup_count']\n yield item\n\n next_url = jd['paging']['next']\n if next_url is not None:\n yield scrapy.Request(url=next_url,headers=self.headers,callback=self.parse,dont_filter=True)\n\n\n" }, { "alpha_fraction": 0.5967880487442017, "alphanum_fraction": 0.6128477454185486, "avg_line_length": 51.83333206176758, "blob_id": "0d1a9df042d901814fe0fa3d3373babec0bd7f9f", "content_id": "4daaffda8f5be0c0bacaea58d1f220bc351164f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3487, "license_type": "no_license", "max_line_length": 272, "num_lines": 66, "path": "/git/zhihu/spiders/zhihuw.py", "repo_name": "seeyoupass/lagou", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nimport json\nfrom zhihu.items import ZhihuItem\n\n\nclass ZhihuwSpider(scrapy.Spider):\n name = 'zhihuw'\n allowed_domains = ['https://www.zhihu.com']\n start_urls = ['http://www.zhihu.com/']\n\n\n def __init__(self):\n\n self.headers={#'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',\n #'Accept-Encoding': 'gzip, deflate, br',\n #'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n #'Cache-Control': 'max-age=0',\n #'Connection': 'keep-alive',\n 'Host': 'www.zhihu.com',\n 'Referer': 'https://www.zhihu.com/signup?next=%2F',\n #'Upgrade-Insecure-Requests': '1',\n 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'\n }\n self.request_url='https://www.zhihu.com/api/v3/feed/topstory/hot-list-web?limit=50&desktop=true'\n\n\n def start_requests(self):\n yield scrapy.Request(url=self.request_url,headers=self.headers,callback=self.parse)\n\n def parse(self, response):\n jd=json.loads(response.text)\n for url in jd['data']:\n #print(url['target']['link']['url'])\n url_page=url['target']['link']['url']\n yield scrapy.Request(url=url_page,headers=self.headers,callback=self.parse_detail,dont_filter=True)\n\n def parse_detail(self,response):\n item=ZhihuItem()\n item['question_title']=response.css('.QuestionHeader-title ::text').extract_first()\n item['tags']=','.join(response.css('.Tag-content .Popover div ::text').extract())\n item['question_detial']=response.xpath('//span[@class=\"RichText ztext\"]/text()').extract_first()\n item['answer']= response.css('.List-headerText span::text').extract_first()\n item['answer_user']= response.css('.UserLink-link::text').extract_first()\n #item['text']=','.join( response.css('.RichText p ::text').extract())\n item['text'] = ','.join( response.xpath('//span[@class=\"RichText ztext CopyrightRichText-richText\"]/p/text()').extract())\n item['time']= response.css('.ContentItem-time ::text').extract_first()\n item['awesome']=response.css('.Voters button ::text').extract_first()\n item['comment_total']= response.xpath('//div[@class=\"ContentItem-actions RichContent-actions\"]/button/text()').extract_first()\n item['url']=response.url\n print(response.url)\n comment_detail=eval(response.xpath('//div[@class=\"ContentItem AnswerItem\"]/@data-zop').extract_first())['itemId']\n comment_url='https://www.zhihu.com/api/v4/answers/{}/root_comments?include=data%5B*%5D.author%2Ccollapsed%2Creply_to_author%2Cdisliked%2Ccontent%2Cvoting%2Cvote_count%2Cis_parent_author%2Cis_author&order=normal&limit=20&offset=0&status=open'.format(comment_detail)\n yield scrapy.Request(url=comment_url,headers=self.headers,callback=self.parse_comment,dont_filter=True)\n yield item\n\n def parse_comment(self,response):\n item = ZhihuItem()\n jd = json.loads(response.text)\n name=[]\n content=[]\n for comment in jd['data']:\n name.append(comment['author']['member']['name'])\n content.append(comment['content'])\n item['comment_content']=dict(zip(name,content))\n yield item\n" } ]
4
seyfullah/raccoon_dataset
https://github.com/seyfullah/raccoon_dataset
1da90588d08ab3d058c09c4cb22153c89cf86df5
b38f50b98ec4675f48f40aa44e95bda9deab5eb0
a50ddc2b85b26d12af9cee0da7c53be834671462
refs/heads/master
2020-10-01T15:31:08.367619
2019-12-12T15:27:54
2019-12-12T15:27:54
227,565,689
0
0
MIT
2019-12-12T09:13:57
2019-12-10T11:37:51
2019-06-18T14:27:48
null
[ { "alpha_fraction": 0.5435879826545715, "alphanum_fraction": 0.5681299567222595, "avg_line_length": 52.180145263671875, "blob_id": "89e851d3d6981a067b4b45c248a57f9970a6d047", "content_id": "996350d3ea7986d8d361caa014e4da0f9cccf655", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14465, "license_type": "permissive", "max_line_length": 95, "num_lines": 272, "path": "/test_generate_tfrecord.py", "repo_name": "seyfullah/raccoon_dataset", "src_encoding": "UTF-8", "text": "import os\nimport PIL\nimport generate_tfrecord\nimport numpy as np\nimport pandas as pd\nimport tensorflow as tf\n\n\nclass CSVToTFExampleTest(tf.test.TestCase):\n def _assertProtoEqual(self, proto_field, expectation):\n proto_list = [p for p in proto_field]\n self.assertListEqual(proto_list, expectation)\n\n def test_csv_to_tf_example_one_airplane_per_file(self):\n \"\"\"Generate tf records for one airplane from one file.\"\"\"\n image_file_name = 'tmp_airplane_image.jpg'\n image_data = np.random.rand(256, 256, 3)\n save_path = os.path.join(self.get_temp_dir(), image_file_name)\n image = PIL.Image.fromarray(image_data, 'RGB')\n image.save(save_path)\n\n column_names = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax']\n airplane_data = [('tmp_airplane_image.jpg', 256, 256, 'airplane', 64, 64, 192, 192)]\n airplane_df = pd.DataFrame(airplane_data, columns=column_names)\n\n grouped = generate_tfrecord.split(airplane_df, 'filename')\n for group in grouped:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_name.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_name.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.25])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.25])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.75])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.75])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1])\n\n def test_csv_to_tf_example_multiple_airplanes_per_file(self):\n \"\"\"Generate tf records for multiple airplanes from one file.\"\"\"\n image_file_name = 'tmp_airplane_image.jpg'\n image_data = np.random.rand(256, 256, 3)\n save_path = os.path.join(self.get_temp_dir(), image_file_name)\n image = PIL.Image.fromarray(image_data, 'RGB')\n image.save(save_path)\n\n column_names = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax']\n airplane_data = [('tmp_airplane_image.jpg', 256, 256, 'airplane', 64, 64, 192, 192),\n ('tmp_airplane_image.jpg', 256, 256, 'airplane', 96, 96, 128, 128)]\n airplane_df = pd.DataFrame(airplane_data, columns=column_names)\n\n grouped = generate_tfrecord.split(airplane_df, 'filename')\n for group in grouped:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_name.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_name.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.25, 0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.25, 0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.75, 0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.75, 0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane', b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1, 1])\n\n def test_csv_to_tf_example_one_airplanes_multiple_files(self):\n \"\"\"Generate tf records for one airplane for multiple files.\"\"\"\n image_file_one = 'tmp_airplane_image_1.jpg'\n image_file_two = 'tmp_airplane_image_2.jpg'\n image_data = np.random.rand(256, 256, 3)\n save_path_one = os.path.join(self.get_temp_dir(), image_file_one)\n save_path_two = os.path.join(self.get_temp_dir(), image_file_two)\n image = PIL.Image.fromarray(image_data, 'RGB')\n image.save(save_path_one)\n image.save(save_path_two)\n\n column_names = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax']\n airplane_data = [('tmp_airplane_image_1.jpg', 256, 256, 'airplane', 64, 64, 192, 192),\n ('tmp_airplane_image_2.jpg', 256, 256, 'airplane', 96, 96, 128, 128)]\n airplane_df = pd.DataFrame(airplane_data, columns=column_names)\n\n grouped = generate_tfrecord.split(airplane_df, 'filename')\n for group in grouped:\n if group.filename == image_file_one:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_one.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_one.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.25])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.25])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.75])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.75])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1])\n elif group.filename == image_file_two:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_two.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_two.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1])\n\n def test_csv_to_tf_example_multiple_airplanes_multiple_files(self):\n \"\"\"Generate tf records for multiple airplanes for multiple files.\"\"\"\n image_file_one = 'tmp_airplane_image_1.jpg'\n image_file_two = 'tmp_airplane_image_2.jpg'\n image_data = np.random.rand(256, 256, 3)\n save_path_one = os.path.join(self.get_temp_dir(), image_file_one)\n save_path_two = os.path.join(self.get_temp_dir(), image_file_two)\n image = PIL.Image.fromarray(image_data, 'RGB')\n image.save(save_path_one)\n image.save(save_path_two)\n\n column_names = ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax']\n airplane_data = [('tmp_airplane_image_1.jpg', 256, 256, 'airplane', 64, 64, 192, 192),\n ('tmp_airplane_image_1.jpg', 256, 256, 'airplane', 32, 32, 96, 96),\n ('tmp_airplane_image_2.jpg', 256, 256, 'airplane', 96, 96, 128, 128)]\n airplane_df = pd.DataFrame(airplane_data, columns=column_names)\n\n grouped = generate_tfrecord.split(airplane_df, 'filename')\n for group in grouped:\n if group.filename == image_file_one:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_one.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_one.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.25, 0.125])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.25, 0.125])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.75, 0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.75, 0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane', b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1, 1])\n elif group.filename == image_file_two:\n example = generate_tfrecord.create_tf_example(group, self.get_temp_dir())\n self._assertProtoEqual(\n example.features.feature['image/height'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/width'].int64_list.value, [256])\n self._assertProtoEqual(\n example.features.feature['image/filename'].bytes_list.value,\n [image_file_two.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/source_id'].bytes_list.value,\n [image_file_two.encode('utf-8')])\n self._assertProtoEqual(\n example.features.feature['image/format'].bytes_list.value, [b'jpg'])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmin'].float_list.value,\n [0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymin'].float_list.value,\n [0.375])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/xmax'].float_list.value,\n [0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/bbox/ymax'].float_list.value,\n [0.5])\n self._assertProtoEqual(\n example.features.feature['image/object/class/text'].bytes_list.value,\n [b'airplane'])\n self._assertProtoEqual(\n example.features.feature['image/object/class/label'].int64_list.value,\n [1])\n" }, { "alpha_fraction": 0.4377717077732086, "alphanum_fraction": 0.46836256980895996, "avg_line_length": 35.53529357910156, "blob_id": "cc913d455218969c21a41ae8ded8187a95b750a7", "content_id": "5fb2777bfc3c6d7cbdb6fbe6ec19a10b767fe6d2", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6211, "license_type": "permissive", "max_line_length": 119, "num_lines": 170, "path": "/test_xml_to_csv.py", "repo_name": "seyfullah/raccoon_dataset", "src_encoding": "UTF-8", "text": "import shutil\nimport os\nimport tempfile\nimport unittest\nimport xml_to_csv\nfrom xml.etree import ElementTree as ET\n\n\nclass XMLToCSVTest(unittest.TestCase):\n def test_one_airplane_one_xml(self):\n xml_file_one = \"\"\"\n <annotation verified=\"yes\">\n <folder>images</folder>\n <filename>airplane1.png</filename>\n <path>airplane1.png</path>\n <source>\n <database>Unknown</database>\n </source>\n <size>\n <width>256</width>\n <height>256</height>\n <depth>3</depth>\n </size>\n <segmented>0</segmented>\n <object>\n <name>airplane</name>\n <pose>Unspecified</pose>\n <truncated>0</truncated>\n <difficult>0</difficult>\n <bndbox>\n <xmin>96</xmin>\n <ymin>96</ymin>\n <xmax>128</xmax>\n <ymax>128</ymax>\n </bndbox>\n </object>\n </annotation>\n \"\"\"\n\n xml = ET.fromstring(xml_file_one)\n with tempfile.TemporaryDirectory() as tmpdirname:\n tree = ET.ElementTree(xml)\n tree.write(tmpdirname + '/test_airplane_one.xml')\n airplane_df = xml_to_csv.xml_to_csv(tmpdirname)\n self.assertEqual(airplane_df.columns.values.tolist(),\n ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax'])\n self.assertEqual(airplane_df.values.tolist()[0], ['airplane1.png', 256, 256, 'airplane', 96, 96, 128, 128])\n\n def test_multiple_airplane_one_xml(self):\n xml_file_one = \"\"\"\n <annotation verified=\"yes\">\n <folder>images</folder>\n <filename>airplane1.png</filename>\n <path>airplane1.png</path>\n <source>\n <database>Unknown</database>\n </source>\n <size>\n <width>256</width>\n <height>256</height>\n <depth>3</depth>\n </size>\n <segmented>0</segmented>\n <object>\n <name>airplane</name>\n <pose>Unspecified</pose>\n <truncated>0</truncated>\n <difficult>0</difficult>\n <bndbox>\n <xmin>96</xmin>\n <ymin>96</ymin>\n <xmax>128</xmax>\n <ymax>128</ymax>\n </bndbox>\n </object>\n <object>\n <name>airplane</name>\n <pose>Unspecified</pose>\n <truncated>0</truncated>\n <difficult>0</difficult>\n <bndbox>\n <xmin>32</xmin>\n <ymin>32</ymin>\n <xmax>64</xmax>\n <ymax>64</ymax>\n </bndbox>\n </object>\n </annotation>\n \"\"\"\n\n xml = ET.fromstring(xml_file_one)\n with tempfile.TemporaryDirectory() as tmpdirname:\n tree = ET.ElementTree(xml)\n tree.write(tmpdirname + '/test_airplane_one.xml')\n airplane_df = xml_to_csv.xml_to_csv(tmpdirname)\n self.assertEqual(airplane_df.columns.values.tolist(),\n ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax'])\n self.assertEqual(airplane_df.values.tolist()[0], ['airplane1.png', 256, 256, 'airplane', 96, 96, 128, 128])\n self.assertEqual(airplane_df.values.tolist()[1], ['airplane1.png', 256, 256, 'airplane', 32, 32, 64, 64])\n\n def test_one_airplane_multiple_xml(self):\n xml_file_one = \"\"\"\n <annotation verified=\"yes\">\n <folder>images</folder>\n <filename>airplane1.png</filename>\n <path>airplane1.png</path>\n <source>\n <database>Unknown</database>\n </source>\n <size>\n <width>256</width>\n <height>256</height>\n <depth>3</depth>\n </size>\n <segmented>0</segmented>\n <object>\n <name>airplane</name>\n <pose>Unspecified</pose>\n <truncated>0</truncated>\n <difficult>0</difficult>\n <bndbox>\n <xmin>96</xmin>\n <ymin>96</ymin>\n <xmax>128</xmax>\n <ymax>128</ymax>\n </bndbox>\n </object>\n </annotation>\n \"\"\"\n xml_file_two = \"\"\"\n <annotation verified=\"yes\">\n <folder>images</folder>\n <filename>airplane2.png</filename>\n <path>airplane2.png</path>\n <source>\n <database>Unknown</database>\n </source>\n <size>\n <width>256</width>\n <height>256</height>\n <depth>3</depth>\n </size>\n <segmented>0</segmented>\n <object>\n <name>airplane</name>\n <pose>Unspecified</pose>\n <truncated>0</truncated>\n <difficult>0</difficult>\n <bndbox>\n <xmin>128</xmin>\n <ymin>128</ymin>\n <xmax>194</xmax>\n <ymax>194</ymax>\n </bndbox>\n </object>\n </annotation>\n \"\"\"\n xml_list = [xml_file_one, xml_file_two]\n tmpdirname = tempfile.mkdtemp()\n for index, x in enumerate(xml_list):\n xml = ET.fromstring(x)\n tree = ET.ElementTree(xml)\n tree.write(tmpdirname + '/test_airplane_{}.xml'.format(index))\n\n airplane_df = xml_to_csv.xml_to_csv(tmpdirname)\n self.assertEqual(airplane_df.columns.values.tolist(),\n ['filename', 'width', 'height', 'class', 'xmin', 'ymin', 'xmax', 'ymax'])\n self.assertEqual(airplane_df.values.tolist()[0], ['airplane1.png', 256, 256, 'airplane', 96, 96, 128, 128])\n self.assertEqual(airplane_df.values.tolist()[1], ['airplane2.png', 256, 256, 'airplane', 128, 128, 194, 194])\n shutil.rmtree(tmpdirname)\n" }, { "alpha_fraction": 0.7734375, "alphanum_fraction": 0.7890625, "avg_line_length": 38.46154022216797, "blob_id": "61a2e6a68ffd1e30fbeb1c2e40545d81ea269858", "content_id": "ad180dc7ad1067ad62054189ab30efd7eef2c9a2", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 512, "license_type": "permissive", "max_line_length": 107, "num_lines": 13, "path": "/Readme.txt", "repo_name": "seyfullah/raccoon_dataset", "src_encoding": "UTF-8", "text": "Python 3.6 kur, pip dahil \npip install pandas\npip install Pillow\npip install tensorflow==1.4\npip install object_detection\npip install numpy==1.14.5\n\npython generate_tfrecord.py csv_input=data/airplane_labels.csv output_path=data/out.record image_dir=images\n# Create train data:\npython generate_tfrecord.py --csv_input=data/train_labels.csv --output_path=train.record --image_dir=images\n\n# Create test data:\npython generate_tfrecord.py --csv_input=data/test_labels.csv --output_path=test.record --image_dir=images" } ]
3
digitalsleuth/stringsifter
https://github.com/digitalsleuth/stringsifter
cbbb959b6e8f4861b1b90e326a00db94cf6f344f
f3f7d14743fabac53d66873b84593514173635ec
0a71b6a62ea71c4dfa38b9449fb9f08a6fc115ee
refs/heads/master
2023-06-08T06:39:19.300806
2023-06-05T02:18:31
2023-06-05T02:18:31
292,619,706
0
1
Apache-2.0
2020-09-03T16:13:44
2023-06-04T16:06:36
2023-06-05T02:04:26
Python
[ { "alpha_fraction": 0.7277227640151978, "alphanum_fraction": 0.7376237511634827, "avg_line_length": 17.363636016845703, "blob_id": "c77cfec135d2c43984fcfc5d041919e4ca8870a9", "content_id": "4a796cf2000ff0247486b039c47a4206d35ae2dd", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 202, "license_type": "permissive", "max_line_length": 36, "num_lines": 11, "path": "/docker/Dockerfile", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "FROM python:3.8\n\nRUN pip install --upgrade pip pipenv\n\nWORKDIR /src/stringsifter\nCOPY . /src/stringsifter\n\nRUN pipenv install --system --deploy\nRUN pip install -e /src/stringsifter\n\nCMD [ \"/bin/bash\" ]\n" }, { "alpha_fraction": 0.5682926774024963, "alphanum_fraction": 0.5817072987556458, "avg_line_length": 36.272727966308594, "blob_id": "9872316706459ab795e2a47add145a73d1973ad1", "content_id": "70c2314abae5302647268678f8e41fee5a75f3b5", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1640, "license_type": "permissive", "max_line_length": 130, "num_lines": 44, "path": "/stringsifter/flarestrings.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport re\nimport sys\nimport argparse\n\nif __package__ is None or __package__ == \"\":\n from version import __version__\nelse:\n from .version import __version__\n\nASCII_BYTE = b\" !\\\"#\\$%&\\'\\(\\)\\*\\+,-\\./0123456789:;<=>\\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\\[\\]\\^_`abcdefghijklmnopqrstuvwxyz\\{\\|\\}\\\\\\~\\t\"\n\n\ndef main():\n parser = argparse.ArgumentParser()\n # to read binary data from stdin use sys.stdin.buffer.\n # sys.stdin is in 'r' mode, not 'rb'\n parser.add_argument('files', nargs='*', type=argparse.FileType('rb'),\n default=[sys.stdin.buffer], help='files to process, or pipe to stdin')\n parser.add_argument('--version', action='version', version=__version__)\n parser.add_argument('-n', '--min-len', type=int, default=4,\n help='Print sequences of characters that are at least ' +\n 'min-len characters long, instead of the default 4.')\n args = parser.parse_args()\n\n # regular expressions from flare-floss:\n # https://github.com/fireeye/flare-floss/blob/master/floss/strings.py#L7-L9\n re_narrow = re.compile(b'([%s]{%d,})' % (ASCII_BYTE, args.min_len))\n re_wide = re.compile(b'((?:[%s]\\x00){%d,})' % (ASCII_BYTE, args.min_len))\n\n for f in args.files:\n b = f.read()\n for match in re_narrow.finditer(b):\n print(match.group().decode('ascii'))\n for match in re_wide.finditer(b):\n try:\n print(match.group().decode('utf-16'))\n except UnicodeDecodeError:\n pass\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6496519446372986, "alphanum_fraction": 0.6589326858520508, "avg_line_length": 18.590909957885742, "blob_id": "2bcb8108b243cbc55f433018905b94096a547568", "content_id": "76ae314ba17b66088a32e49d859f979b533e765b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 431, "license_type": "permissive", "max_line_length": 55, "num_lines": 22, "path": "/stringsifter/lib/util.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport io\nimport os\nimport sys\nimport contextlib\n\n\ndef package_base():\n \"\"\"\n return package base folder (one level up from here)\n \"\"\"\n pth = os.path.join(os.path.dirname(__file__), '..')\n return os.path.abspath(pth)\n\n\[email protected]\ndef redirect_stderr():\n _stderr = sys.stderr\n sys.stderr = io.StringIO()\n yield\n sys.stderr = _stderr\n" }, { "alpha_fraction": 0.2927461266517639, "alphanum_fraction": 0.4179620146751404, "avg_line_length": 23.125, "blob_id": "a6b27956e746d343978097d3060b8ba83fe40edf", "content_id": "f42975963b63a2d787d055b6eccef20febf8b185", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1158, "license_type": "permissive", "max_line_length": 85, "num_lines": 48, "path": "/stringsifter/lib/stats.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\n\"\"\"\nenglish letter probabilities\n\ntable from http://en.algoritmy.net/article/40379/Letter-frequency-English\n\"\"\"\n\nenglish_letter_probs_percent = [\n ['a', 8.167],\n ['b', 1.492],\n ['c', 2.782],\n ['d', 4.253],\n ['e', 12.702],\n ['f', 2.228],\n ['g', 2.015],\n ['h', 6.094],\n ['i', 6.966],\n ['j', 0.153],\n ['k', 0.772],\n ['l', 4.025],\n ['m', 2.406],\n ['n', 6.749],\n ['o', 7.507],\n ['p', 1.929],\n ['q', 0.095],\n ['r', 5.987],\n ['s', 6.327],\n ['t', 9.056],\n ['u', 2.758],\n ['v', 0.978],\n ['w', 2.360],\n ['x', 0.150],\n ['y', 1.974],\n ['z', 0.074]]\n\nenglish_letter_probs = {lt: (per * 0.01) for lt, per in english_letter_probs_percent}\n\n\n\"\"\"\nScrabble Scores\ntable from https://en.wikipedia.org/wiki/Scrabble_letter_distributions\n\"\"\"\nscrabble_dict = {\"a\": 1, \"b\": 3, \"c\": 3, \"d\": 2, \"e\": 1, \"f\": 4,\n \"g\": 2, \"h\": 4, \"i\": 1, \"j\": 8, \"k\": 5, \"l\": 1,\n \"m\": 3, \"n\": 1, \"o\": 1, \"p\": 3, \"q\": 10, \"r\": 1,\n \"s\": 1, \"t\": 1, \"u\": 1, \"v\": 4, \"w\": 4, \"x\": 8,\n \"y\": 4, \"z\": 10}\n" }, { "alpha_fraction": 0.5940658450126648, "alphanum_fraction": 0.5983045101165771, "avg_line_length": 31.284210205078125, "blob_id": "1a8eb45d999c338e289ea2fc3cfd4a78b8209e15", "content_id": "c0ae36c6d477f3122fd3bd0cc875a736c380f765", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3067, "license_type": "permissive", "max_line_length": 69, "num_lines": 95, "path": "/tests/test_stringsifter.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport os\nimport numpy\nfrom io import StringIO\nimport stringsifter.rank_strings as rank_strings\n\ntest_strings = 'testing text\\n' \\\n 'nagain\\n' \\\n 'wheredoesitgo\\n' \\\n 'testing text\\n' \\\n 'nagain\\n' \\\n 'wheredoesitgo\\n' \\\n 'testing text\\n' \\\n 'nagain\\n' \\\n 'wheredoesitgo\\n' \\\n 'testing text\\n'\n\n\ndef _get_rank_strings_stdoutput(capsys, kwargs):\n rank_strings.main(**kwargs)\n stdout = capsys.readouterr().out\n return stdout.split('\\n')[:-1]\n\n\ndef _get_kwargs(input_strings=test_strings, cutoff=None,\n cutoff_score=numpy.nan, scores=False, batch=False):\n return {'input_strings': StringIO(input_strings),\n 'cutoff': cutoff,\n 'cutoff_score': cutoff_score,\n 'scores': scores,\n 'batch': batch}\n\n\ndef test_string_length(featurizer):\n test_set = [['', 0],\n ['foo', 3],\n ['everybody', 9]]\n for s, true_len in test_set:\n feat_len = featurizer.string_length(s)\n assert feat_len == true_len\n\n\ndef test_default(capsys):\n \"\"\"\n test default processing flow: # strings in == # strings out\n \"\"\"\n output_lines = _get_rank_strings_stdoutput(capsys, _get_kwargs())\n assert len(output_lines) == 10\n\n\ndef test_scores(capsys):\n scores_value = True\n output_lines = _get_rank_strings_stdoutput(\n capsys, _get_kwargs(scores=scores_value))\n split_output_lines = [output_line.split(\",\") for output_line\n in output_lines]\n previous_score = numpy.inf\n for output_score, output_string in split_output_lines:\n assert(type(output_string) is str)\n float_output_score = float(output_score)\n assert(type(float_output_score) is float)\n assert(previous_score >= float_output_score)\n previous_score = float_output_score\n\n\ndef test_cutoff(capsys):\n cutoff_value = 5\n output_lines = _get_rank_strings_stdoutput(\n capsys, _get_kwargs(cutoff=cutoff_value))\n assert len(output_lines) == cutoff_value\n\n\ndef test_cutoff_score(capsys):\n scores_value = True\n cutoff_score_value = 0.0\n output_lines = _get_rank_strings_stdoutput(\n capsys, _get_kwargs(scores=scores_value,\n cutoff_score=cutoff_score_value))\n split_output_lines = [output_line.split(\",\") for output_line\n in output_lines]\n for output_score, output_string in split_output_lines:\n assert float(output_score) >= cutoff_score_value\n\n\ndef test_batch():\n batch_value = 'tests/fixtures/'\n batch_files = [batch_value + batch_file for batch_file in\n os.listdir(batch_value)]\n output_lines = rank_strings.main(\n **_get_kwargs(batch=batch_value))\n for batch_file in batch_files:\n ranking_file = batch_file + '.ranked_strings'\n assert os.path.isfile(ranking_file) is True\n os.remove(ranking_file)\n" }, { "alpha_fraction": 0.5374799370765686, "alphanum_fraction": 0.559758722782135, "avg_line_length": 39.79943084716797, "blob_id": "71dfe36d8bde3226c0e382773c05f7f81f3d6228", "content_id": "f4b8624a658ee053516ff5ded6970578d25e5ce9", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28682, "license_type": "permissive", "max_line_length": 293, "num_lines": 703, "path": "/stringsifter/preprocess.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport re\nimport os\nimport json\nimport math\nimport numpy\nimport base64\nimport joblib\nimport string\nimport binascii\nimport fasttext\nimport unicodedata\nimport collections\nimport sklearn.pipeline\nimport sklearn.feature_extraction.text\nfrom sklearn.base import BaseEstimator, TransformerMixin\n\n\nif __package__ is None or __package__ == \"\":\n from lib import util\n from lib import stats\nelse:\n from .lib import util\n from .lib import stats\n\n# preload from lib\ndirname = os.path.dirname(os.path.abspath(__file__))\nwith open(os.path.join(dirname, 'lib/constants.json'), 'rb') as fid:\n constants = {k: set(v) for k, v in json.load(fid).items()}\n\nwith util.redirect_stderr():\n lid_model = fasttext.load_model(os.path.join(dirname, 'lib/lid.176.ftz'))\nmarkov_model = joblib.load(os.path.join(dirname, 'lib/markov.pkl'))\nlog_transition_probas = markov_model['transition_matrix']\nchar_idx_mapper = markov_model['key_to_idx_map']\n\n\nclass Mapper(BaseEstimator, TransformerMixin):\n def __init__(self, func):\n self.func = func\n\n def fit(self, X, y=None):\n return self\n\n def transform(self, X):\n return numpy.array(list(map(self.func, X))).reshape(-1, 1)\n\n def get_feature_names(self):\n return '0'\n\n\nclass Featurizer():\n def __init__(self):\n self.b64chars = set(string.ascii_letters + string.digits + '+/_-=')\n dnsroot_cache = list(constants['dnsroot tlds']) + \\\n ['bit', 'dev', 'onion']\n self.tldstr = '|'.join(dnsroot_cache)\n\n self.mac_only_regex = \\\n re.compile(r\"\"\"\n ^\n (?:[A-Fa-f0-9]{2}:){5}\n [A-Fa-f0-9]{2}\n $\n \"\"\", re.VERBOSE)\n\n fqdn_base = r'(([a-z0-9_-]{1,63}\\.){1,10}(%s))' % self.tldstr\n fqdn_str = fqdn_base + r'(?:\\W|$)'\n self.fqdn_strict_only_regex = re.compile(r'^' + fqdn_base + r'$', re.I)\n self.fqdn_regex = re.compile(fqdn_str, re.I)\n self.email_valid = re.compile(r'([a-z0-9_\\.\\-+]{1,256}@%s)' % fqdn_base, re.I)\n\n _u8 = r'(?:[1-9]?\\d|1\\d\\d|2[0-4]\\d|25[0-5])'\n _ipv4pre = r'(?:[^\\w.]|^)'\n _ipv4suf = r'(?=(?:[^\\w.]|\\.(?:\\W|$)|$))'\n ip_base = r'((?:%s\\.){3}%s)' % (_u8, _u8)\n self.ip_regex = re.compile(_ipv4pre + ip_base + _ipv4suf)\n\n svc_base = r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?):[0-9]{1,5}'\n self.svc_regex = re.compile(svc_base)\n self.md5_only_regex = re.compile(r'^[A-Fa-f0-9]{32}$')\n self.sha1_only_regex = re.compile(r'^[A-Fa-f0-9]{40}$')\n self.sha256_only_regex = re.compile(r'^[A-Fa-f0-9]{64}$')\n self.url_regex = re.compile(r'\\w+://[^ \\'\"\\t\\n\\r\\f\\v]+')\n self.pkcs_regex = re.compile(r'-----BEGIN ([a-zA-Z0-9 ]+)-----')\n self.format_regex = re.compile(r'%[-|\\+|#|0]?([\\*|0-9])?(\\.[\\*|0-9])?[h|l|j|z|t|L]?[diuoxXfFeEgGaAcspn%]')\n self.linefeed_regex = re.compile(r'\\\\\\\\n$')\n self.path_regex = re.compile(r'[A-Z|a-z]\\:\\\\\\\\[A-Za-z0-9]')\n self.pdb_regex = re.compile(r'\\w+\\.pdb\\b')\n self.guid_regex = re.compile(r'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89ab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}')\n self.event_regex = re.compile(r'On(?!User|Board|Media|Global)(?:[A-Z][a-z]+)+')\n self.keylogger_regex = re.compile(r'\\[[A-Za-z0-9\\_\\-\\+ ]{2,13}\\]')\n self.oid_regex = re.compile(r'((0\\.([0-5]|9))|(1\\.[0-3])|(2\\.(([0-2][0-8])|(4[0-2])|(4[8-9])|(5[0-2])|(999))))(\\.[0-9])+')\n self.ext_regex = re.compile(r'\\w+\\.[a-z]{3,4}\\b')\n self.prod_id_regex = re.compile(r'[0-9]{5}-[0-9A-Z]{3}-[0-9]{7}-[0-9]{5}')\n self.priv_regex = re.compile(r'Se[A-Z][A-z]+Privilege')\n self.sddl_regex = re.compile(r'[DSO]:.+;;;.+$')\n self.sid_regex = re.compile(r'S-(?:[0-5]|9|(11)|(12)|(16))-')\n self.whitespace_regex = re.compile(r'\\s+')\n self.letters_regex = re.compile(r'[^A-Za-z]')\n self.english_ignores = constants['windows api'].union(constants['pma important functions']).union\\\n (constants['dates']).union(constants['languages'])\n self.not_latin_unicode_names = ['ARABIC', 'SYRIAC', 'CYRILLIC', 'CJK', 'GEORGIAN']\n self.uppercase_var_name = re.compile(r'(?:\\(| |^)[A-Z]+(?:\\_[A-Z]+)+(?:\\)| |$)')\n self.period_delimited_var_name = re.compile(r'(?:\\(| |^)[a-z]{2,}(?:\\.[a-z]{2,})+(?:\\)| |$)')\n self.oss_substr_regex = re.compile(\n r'^(?:NT(?:3\\.1|3\\.5|3\\.51))|' +\n r'(?:Ultimate(?: N| Edition|\\_Edition))|' +\n r'(?:Business(?: N| Edition|\\_Edition))|' +\n r'(?:Professional(?: Edition| x64 Edition))|' +\n r'(?:Microsoft Windows (?:ME|95|98|2000|XP))|' +\n r'(?:Storage(?: Server 2003 R2| Server 2003))|' +\n r'(?:Server(?: 2008| 2003| 2003 R2|2008|2008R2))|' +\n r'(?:Windows\\+(?:2000|Home Server|Vista|Server\\+2003|7|8|XP|8\\.1))|' +\n r'(?:WIN(?:32\\_NT|\\_2008R2|\\_7|\\_2008|\\_VISTA|\\_2003|\\_XPe|\\_XP|\\_2000))|' +\n r'(?:(?:Small\\_Business\\_|Small Business |Advanced )Server)|(?:Windows Storage Server 2003)|' +\n r'(?:Windows (?:7 \\(6\\.1\\)|2000|Me|98|95|NT|Vista|7|8|10|XP|8\\.1|Server|Home Server))|' +\n r'(?:Windows Server (?:2012 R2|2003|2003 R2|2008|2008 R2|R2|2000|2012|2003 R2 \\(5\\.2\\)|2008 \\(6\\.0\\)|2008 R2 \\(6\\.1\\)))|' +\n r'(?:Standard(?:\\_Edition|\\_Edition\\_core\\_installation| Edition| Edition\\(Core\\)| x64 Edition| Edition \\(core installation\\)))|' +\n r'(?:Win(?:8|7|Server2003R2|Server2003|2K|XP64|XP| XP| 2000|HomeServer|NT|Server2012|Server2008R2|Server2008| Vista| Srv 2008| 7| 8| Srv 2003| Srv| ))|' +\n r'(?:Datacenter(?: Edition\\(Core\\)| Server| Edition for Itanium\\-based Systems| x64 Edition| Edition| Edition \\(core installation\\)|\\_Edition\\_core\\_installation|\\_Edition))|' +\n r'(?:Home(?: Premium N| Premium| Basic N| Basic| Edition| Basic Edition| Premium Edition|\\_Premium\\_Edition|\\_Basic\\_Edition|\\_Server|\\-Premium\\-Edition|\\-Basic\\-Edition|\\_Edition))|' +\n r'(?:Enterprise(?:\\_Edition|\\_Edition\\_for\\_ItaniumBased\\_System|\\_Edition\\_core\\_installation| N| Edition| Edition\\(Core\\)| x64 Edition| Edition \\(core installation\\)| Edition for Itanium\\-based Systems))|' +\n r'(?:(?:Small\\_Business\\_Server\\_Premium\\_|Small Business Server Premium |Small\\_Business\\_Server\\_Premium\\_Edition|Web\\_Server\\_|Cluster Server |Starter |Starter\\_|Cluster\\_Server\\_|32\\-bit |64\\-bit |Embedded |Tablet PC |Media Center |Web |Compute Cluster |Web Server )Edition)$')\n self.oss_exact_regex = re.compile(r'^(?:2008|2003|2000|Business|Ultimate|Vista|Seven|Professional)$')\n self.user_agents_regex = re.compile(r'[\\w\\-]+\\/[\\w\\-]+\\.[\\w\\-]+(?:\\.[\\w\\-])* ?(?:\\[[a-z]{2}\\] )?\\((?:.+[:;\\-].+|[+ ]?http://.+)\\)')\n self.hive_regex = re.compile(r'[^0-9a-zA-Z](?:hkcu|hklm|hkey\\_current\\_user|hkey\\_local\\_machine)[^0-9a-zA-Z]')\n self.namespace_regex = re.compile(r'\\\\\\\\\\.\\\\.*')\n self.msword_regex = re.compile(r'Word\\.Document')\n self.mozilla_api_regex = re.compile(r'PR\\_(?:[A-Z][a-z]{2,})+')\n self.privilege_constant_regex = re.compile(r'SE\\_(?:[A-Z]+\\_)+NAME')\n self.upx_regex = re.compile(r'\\b(?:[a-z]?upx|[A-Z]?UPX)(?:\\d|\\b)')\n self.crypto_common_regex = re.compile(r'\\b(?:rsa|aes|rc4|salt|md5)\\b')\n self.features = [\n 'string_length',\n 'has_english_text',\n 'entropy_rate',\n 'english_letter_freq_div',\n 'average_scrabble_score',\n 'whitespace_percentage',\n 'alpha_percentage',\n 'digit_percentage',\n 'punctuation_percentage',\n 'vowel_consenant_ratio',\n 'capital_letter_ratio',\n 'title_words_ratio',\n 'average_word_length',\n 'has_ip',\n 'has_ip_srv',\n 'has_url',\n 'has_email',\n 'has_fqdn',\n 'has_namespace',\n 'has_msword_version',\n 'has_packer',\n 'has_crypto_related',\n 'is_blacklisted',\n 'has_privilege_constant',\n 'has_mozilla_api',\n 'is_strict_fqdn',\n 'has_hive_name',\n 'is_mac',\n 'has_extension',\n 'is_md5',\n 'is_sha1',\n 'is_sha256',\n 'is_irrelevant_windows_api',\n 'has_guid',\n 'is_antivirus',\n 'is_whitelisted',\n 'is_common_dll',\n 'is_boost_lib',\n 'is_delphi_lib',\n 'has_event',\n 'is_registry',\n 'has_malware_identifier',\n 'has_sid',\n 'has_keylogger',\n 'has_oid',\n 'has_product_id',\n 'is_oss',\n 'is_user_agent',\n 'has_sddl',\n 'has_protocol',\n 'is_protocol_method',\n 'is_base64',\n 'is_hex_not_numeric_not_alpha',\n 'has_format_specifier',\n 'ends_with_line_feed',\n 'has_path',\n 'has_pdb',\n 'has_privilege',\n 'is_known_xml',\n 'is_cpp_runtime',\n 'is_library',\n 'is_date',\n 'is_pe_artifact',\n 'has_public_key',\n 'markov_junk',\n 'is_x86',\n 'is_common_path',\n 'is_code_page',\n 'is_language',\n 'is_region_tag',\n 'has_not_latin',\n 'is_known_folder',\n 'is_malware_api',\n 'is_environment_variable',\n 'has_variable_name',\n 'has_padding_string'\n ]\n\n def _substring_match_bool(self, string_i, corpus):\n return int(any([(s in string_i) for s in corpus]))\n\n def _exact_match_bool(self, string_i, corpus):\n return int(string_i in corpus)\n\n def string_length(self, string_i):\n return len(string_i)\n\n def has_english_text(self, string_i, thresh_upper=0.9):\n string_i_replace_newlines = ' '.join(string_i.split('\\n'))\n fasttext_prediction = lid_model.predict(string_i_replace_newlines)\n english_prediction = '__label__en' in fasttext_prediction[0]\n confident_prediction = fasttext_prediction[1] > thresh_upper\n num_punctuation = [string_i.count(punc) for punc in string.punctuation]\n contains_no_punctuation = sum(num_punctuation) == 0\n contains_no_path = not self.has_path(string_i)\n contains_no_ext = not self.has_extension(string_i)\n contains_no_fmtSpec = not self.has_format_specifier(string_i)\n\n is_not_ignored = string_i not in self.english_ignores\n\n if english_prediction and confident_prediction and contains_no_path \\\n and contains_no_ext and contains_no_fmtSpec and is_not_ignored:\n return 1\n else:\n return 0\n\n def entropy_rate(self, string_i, base=2,\n thresh_upper=3.65, thresh_lower=1.45):\n entropy_rate = 0\n characters = list(string_i)\n\n if len(characters) <= 1:\n return 1\n\n _, letters = numpy.unique(characters, return_counts=True)\n probabilities = letters / len(characters)\n\n if numpy.count_nonzero(probabilities) <= 1:\n return 1\n\n for i in probabilities:\n entropy_rate -= i * math.log(i, base)\n\n below_thresh_lower = entropy_rate <= thresh_lower\n above_thresh_upper = entropy_rate >= thresh_upper\n if below_thresh_lower or above_thresh_upper:\n return 1\n else:\n return 0\n\n def english_letter_freq_div(self, string_i, thresh_upper=3.0):\n \"\"\"\n estimated KL divergence from english letter distribution\n (case insensitive). Non-alpha bytes are ignored\n low KL divergence <=> letter freqs similar to English\n \"\"\"\n counts = collections.Counter([c for c in string_i.lower() if\n c in string.ascii_lowercase])\n n = sum(counts.values())\n kl = 0.0\n for lett, ct in counts.items():\n p = ct / n\n q = stats.english_letter_probs[lett]\n kl += p * math.log2(p / q)\n return 1 if int(kl <= thresh_upper) else -1\n\n def average_scrabble_score(self, string_i, thresh_lower=1.,\n thresh_upper=3.51):\n lowered_letters = [char for char in string_i.lower() if char.isalpha()]\n if len(lowered_letters) > 0:\n raw_scrabble_score = sum(\n [stats.scrabble_dict.get(char, 0) for char in lowered_letters])\n has_low_score = (raw_scrabble_score / len(lowered_letters) <=\n thresh_lower)\n has_high_score = (raw_scrabble_score / len(lowered_letters) >=\n thresh_upper)\n has_extension = self.has_extension(string_i)\n has_path = self.has_path(string_i)\n has_format_specifier = self.has_format_specifier(string_i)\n has_low_score_substr = self._substring_match_bool(\n string_i.lower(),\n constants[\"low_scrabble_score_strings\"])\n has_relevant_noise = (has_extension or has_path or\n has_format_specifier or has_low_score_substr)\n if not has_relevant_noise and (has_low_score or\n has_high_score):\n return -1\n else:\n return 0\n else:\n return 0\n\n def whitespace_percentage(self, string_i):\n if len(string_i) > 0:\n whitespace_removed = re.sub(self.whitespace_regex, '', string_i)\n return (len(string_i) - len(whitespace_removed)) / len(string_i)\n else:\n return 0\n\n def alpha_percentage(self, string_i):\n whitespace_removed = re.sub(self.whitespace_regex, '', string_i)\n if len(whitespace_removed) > 0:\n num_alpha = len([char_i for char_i in whitespace_removed\n if char_i.isalpha()])\n return num_alpha / len(whitespace_removed)\n else:\n return 0\n\n def digit_percentage(self, string_i):\n whitespace_removed = re.sub(self.whitespace_regex, '', string_i)\n if len(whitespace_removed) > 0:\n num_digits = len([char_i for char_i in whitespace_removed\n if char_i.isdigit()])\n return num_digits / len(whitespace_removed)\n else:\n return 0\n\n def punctuation_percentage(self, string_i):\n whitespace_removed = re.sub(self.whitespace_regex, '', string_i)\n if len(whitespace_removed) > 0:\n num_punctuation = sum(whitespace_removed.count(punc) for\n punc in string.punctuation)\n return num_punctuation / len(whitespace_removed)\n else:\n return 0\n\n def vowel_consenant_ratio(self, string_i):\n only_letters = re.sub(self.letters_regex, '', string_i).lower()\n if len(only_letters) > 0:\n vowels = set(constants['vowel list'])\n num_vowels = sum(only_letters.count(vowel) for vowel in vowels)\n return num_vowels / len(only_letters)\n else:\n return 0\n\n def capital_letter_ratio(self, string_i):\n only_letters = re.sub(self.letters_regex, '', string_i)\n if len(only_letters) > 0:\n num_capital_letters = sum(1 for letter in only_letters if\n letter.isupper())\n return num_capital_letters / len(only_letters)\n else:\n return 0\n\n def title_words_ratio(self, string_i):\n words = string_i.split()\n if len(words) > 0:\n title_words = [word for word in words if word.istitle()]\n return len(title_words) / len(words)\n else:\n return 0\n\n def average_word_length(self, string_i):\n words = string_i.split()\n word_lengths = [len(word) for word in words]\n if len(word_lengths) > 0:\n return sum(word_lengths) / len(word_lengths)\n else:\n return 0\n\n def has_ip_srv(self, string_i):\n has_ip_address = 1 if self.ip_regex.search(string_i) else 0\n exceptions = self._substring_match_bool(\n string_i.lower(), constants['ip exceptions'])\n return int(has_ip_address and not exceptions)\n\n def is_base64(self, string_i):\n # known FPs\n pre_list = ['Create', 'Array', 'GetSystem', 'Windows', 'Direct']\n if any([string_i.startswith(pre) for pre in pre_list]):\n return 0\n\n # base64 character set\n if set(string_i) - self.b64chars:\n return 0\n\n # length is multiple of 4\n if len(string_i) % 4 != 0:\n return 0\n\n try:\n # note: base64 decoder may return without\n # error without decoding the full string\n # -> check decoded length before declaring success\n decoded = base64.b64decode(string_i)\n declen = len(decoded)\n if declen < 0.75 * len(string_i.rstrip('=')) - 2:\n return 0\n except (UnicodeDecodeError, binascii.Error, ValueError):\n return 0\n\n # require one item from each character class,\n # with alphabetic > F (to avoid detecting hex strings)\n groups = [\n string.ascii_uppercase[6:],\n string.ascii_lowercase[6:],\n string.digits\n ]\n if not all([any([c for c in string_i if c in grp]) for grp in groups]):\n return 0\n\n # padding test\n if string_i.endswith('=') and '=' not in string_i.rstrip('='):\n return 1\n\n if len(string_i) <= 20:\n # be picky with short strings without padding;\n # otherwise we get lots of false positives\n if '+' in string_i:\n return 1\n return 0\n\n if len(string_i.rstrip(string_i[-1])) < 4:\n # filter out 'AAAAAAAAAAAA' strings and friends\n return 0\n\n if string.ascii_uppercase in string_i:\n # base64 alphabet\n return 0\n\n return 1\n\n def is_hex_not_numeric_not_alpha(self, string_i):\n is_hex = all(c in string.hexdigits for c in string_i)\n is_not_numeric_not_alpha = not (string_i.isalpha() or\n string_i.isdigit())\n return int(is_hex and is_not_numeric_not_alpha)\n\n def is_strict_fqdn(self, string_i):\n return 1 if self.fqdn_strict_only_regex.match(string_i) else 0\n\n def has_email(self, string_i):\n return 1 if self.email_valid.match(string_i) else 0\n\n def is_md5(self, string_i):\n return 1 if self.md5_only_regex.match(string_i) else 0\n\n def is_sha1(self, string_i):\n return 1 if self.sha1_only_regex.match(string_i) else 0\n\n def is_sha256(self, string_i):\n return 1 if self.sha256_only_regex.match(string_i) else 0\n\n def is_mac(self, string_i):\n return 1 if self.mac_only_regex.match(string_i) else 0\n\n def has_keylogger(self, string_i):\n return 1 if self.keylogger_regex.match(string_i) else 0\n\n def has_oid(self, string_i):\n return 1 if self.oid_regex.match(string_i) else 0\n\n def has_privilege(self, string_i):\n return 1 if self.priv_regex.match(string_i) else 0\n\n def has_sddl(self, string_i):\n return 1 if self.sddl_regex.match(string_i) else 0\n\n def has_mozilla_api(self, string_i):\n return 1 if self.mozilla_api_regex.match(string_i) else 0\n\n def is_oss(self, string_i):\n is_oss_exact = 1 if self.oss_exact_regex.match(string_i) else 0\n is_oss_substr = 1 if self.oss_substr_regex.search(string_i) else 0\n return is_oss_exact or is_oss_substr\n\n def has_packer(self, string_i):\n has_upx_packer = 1 if self.upx_regex.search(string_i) else 0\n has_other_packer = 1 if self._substring_match_bool(\n string_i.lower(), constants['packers']) else 0\n return has_upx_packer or has_other_packer\n\n def has_crypto_related(self, string_i):\n has_crypto_common = \\\n 1 if self.crypto_common_regex.search(string_i) else 0\n has_crypto_uncommon = 1 if self._substring_match_bool(\n string_i.lower(), constants['crypto uncommon']) else 0\n return has_crypto_common or has_crypto_uncommon\n\n def is_blacklisted(self, string_i):\n is_exact_blacklist = 1 if self._exact_match_bool(\n string_i, constants['blacklist fullmatch']) else 0\n is_substring_blacklist = 1 if self._substring_match_bool(\n string_i, constants['blacklist substring']) else 0\n is_substring_lower_blacklist = 1 if self._substring_match_bool(\n string_i.lower(), constants['blacklist substring lower']) else 0\n\n is_windows_api = self._exact_match_bool(\n string_i, constants['windows api'])\n is_pma_api = self._exact_match_bool(\n string_i, constants['pma important functions'])\n is_not_api_blacklist = not (is_windows_api or is_pma_api) and \\\n is_substring_lower_blacklist\n\n return is_exact_blacklist or is_substring_blacklist or \\\n is_not_api_blacklist\n\n def has_namespace(self, string_i):\n return 1 if self.namespace_regex.search(string_i) else 0\n\n def has_msword_version(self, string_i):\n return 1 if self.msword_regex.search(string_i) else 0\n\n def has_privilege_constant(self, string_i):\n return 1 if self.privilege_constant_regex.search(string_i) else 0\n\n def has_fqdn(self, string_i):\n return 1 if self.fqdn_regex.search(string_i) else 0\n\n def has_product_id(self, string_i):\n return 1 if self.prod_id_regex.search(string_i) else 0\n\n def has_ip(self, string_i):\n return 1 if self.svc_regex.search(string_i) else 0\n\n def has_sid(self, string_i):\n return 1 if self.sid_regex.search(string_i) else 0\n\n def has_url(self, string_i):\n return 1 if self.url_regex.search(string_i) else 0\n\n def ends_with_line_feed(self, string_i):\n return 1 if self.linefeed_regex.search(string_i) else 0\n\n def has_path(self, string_i):\n return 1 if self.path_regex.search(string_i) else 0\n\n def has_event(self, string_i):\n return 1 if self.event_regex.search(string_i) else 0\n\n def has_guid(self, string_i):\n return 1 if self.guid_regex.search(string_i) else 0\n\n def has_public_key(self, string_i):\n return 1 if self.pkcs_regex.search(string_i) else 0\n\n def has_pdb(self, string_i):\n return 1 if self.pdb_regex.search(string_i) else 0\n\n def is_user_agent(self, string_i):\n return 1 if self.user_agents_regex.search(string_i) else 0\n\n def has_hive_name(self, string_i):\n return 1 if self.hive_regex.search(string_i) else 0\n\n def has_variable_name(self, string_i):\n has_uppercase_var_name = self.uppercase_var_name.search(string_i)\n has_period_delimited_var_name = \\\n self.period_delimited_var_name.search(string_i)\n has_no_extension = not self.has_extension(string_i)\n return 1 if (has_uppercase_var_name or\n (has_period_delimited_var_name and\n has_no_extension)) else 0\n\n def has_format_specifier(self, string_i):\n if len(string_i) < 5:\n return 0\n return 1 if self.format_regex.search(string_i) else 0\n\n def has_extension(self, string_i):\n is_not_common_dll = not self.is_common_dll(string_i)\n return 1 if (is_not_common_dll and\n self.ext_regex.search(string_i)) else 0\n\n def has_padding_string(self, string_i):\n return self._substring_match_bool(string_i,\n ['PADDING'])\n\n def has_malware_identifier(self, string_i):\n return self._substring_match_bool(string_i.lower(),\n constants['malware identifiers'])\n\n def is_registry(self, string_i):\n return self._substring_match_bool(string_i,\n constants['regs'])\n\n def is_antivirus(self, string_i):\n return self._substring_match_bool(string_i.lower(),\n constants['avs'])\n\n def is_whitelisted(self, string_i):\n return self._substring_match_bool(string_i,\n constants['white'])\n\n def has_protocol(self, string_i):\n return self._substring_match_bool(string_i.upper(),\n constants['protocols'])\n\n def is_protocol_method(self, string_i):\n return self._substring_match_bool(string_i,\n constants['protocol methods'])\n\n def is_common_path(self, string_i):\n return self._substring_match_bool(string_i.lower(),\n constants['paths'])\n\n def is_common_dll(self, string_i):\n has_common_dll = self._exact_match_bool(\n string_i.split('.')[0].lower(), constants['common dlls'])\n has_malware_dll = self._exact_match_bool(\n string_i.split('.')[0].lower(), constants['malware dlls'])\n return has_common_dll and not has_malware_dll\n\n def is_boost_lib(self, string_i):\n return self._exact_match_bool(string_i, constants['lib boost'])\n\n def is_delphi_lib(self, string_i):\n return self._exact_match_bool(string_i, constants['lib delphi'])\n\n def is_irrelevant_windows_api(self, string_i):\n return self._exact_match_bool(string_i, constants['windows api'])\n\n def is_cpp_runtime(self, string_i):\n return self._exact_match_bool(string_i, constants['cpp'])\n\n def is_library(self, string_i):\n return self._exact_match_bool(string_i, constants['lib'])\n\n def is_date(self, string_i):\n return self._exact_match_bool(string_i, constants['dates'])\n\n def is_known_xml(self, string_i):\n return self._exact_match_bool(string_i, constants['known xml'])\n\n def is_pe_artifact(self, string_i):\n return self._exact_match_bool(string_i, constants['pe artifacts'])\n\n def is_language(self, string_i):\n return self._exact_match_bool(string_i, constants['languages'])\n\n def is_code_page(self, string_i):\n return self._exact_match_bool(string_i, constants['code pages'])\n\n def is_region_tag(self, string_i):\n return self._exact_match_bool(string_i, constants['region tags'])\n\n def is_known_folder(self, string_i):\n return self._exact_match_bool(\n string_i, constants['known folders'])\n\n def is_malware_api(self, string_i):\n return self._exact_match_bool(\n string_i, constants['pma important functions'])\n\n def is_environment_variable(self, string_i):\n if len(string_i) > 0:\n return int(string_i[0] == '%' and string_i[-1] == '%')\n else:\n return 0\n\n def is_x86(self, string_i):\n if len(string_i) <= 5:\n if len(set(list(string_i))) == 1:\n return 1\n if len(string_i) >= 2 and string_i[1] == '$':\n return 1\n return 0\n\n def has_not_latin(self, string_i):\n try:\n unicode_names = [unicodedata.name(char) for char in string_i]\n for unicode_name in unicode_names:\n if self._substring_match_bool(unicode_name,\n self.not_latin_unicode_names):\n return 1\n return 0\n except ValueError:\n return 0\n\n def markov_junk(self, string_i, thresh_lower=0.004):\n log_prob = 0.0\n transition_count = 0\n for char_i, char_j in self._two_gram(string_i.lower()):\n char_i_idx = char_idx_mapper.get(char_i, char_idx_mapper['unk'])\n char_j_idx = char_idx_mapper.get(char_j, char_idx_mapper['unk'])\n log_prob += log_transition_probas[char_i_idx][char_j_idx]\n transition_count += 1\n if transition_count >= 1:\n below_markov_threshold = \\\n math.exp(log_prob / transition_count) <= thresh_lower\n else:\n below_markov_threshold = math.exp(log_prob) <= thresh_lower\n has_no_format_specifier = not self.has_format_specifier(string_i)\n return below_markov_threshold and has_no_format_specifier\n\n def _two_gram(self, string_i):\n for start in range(0, len(string_i) - 2 + 1):\n yield ''.join(string_i[start:start + 2])\n" }, { "alpha_fraction": 0.6336633563041687, "alphanum_fraction": 0.6534653306007385, "avg_line_length": 19.200000762939453, "blob_id": "6c8209bf5628086abb6a34e5b1bf7c9a5a10024e", "content_id": "bc217604b3a2fc7be3ee962c7e9cc144f6f37649", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "TOML", "length_bytes": 606, "license_type": "permissive", "max_line_length": 67, "num_lines": 30, "path": "/Pipfile", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "[[source]]\nurl = \"https://pypi.org/simple\"\nverify_ssl = true\nname = \"pypi\"\n\n[packages]\nlightgbm = \"~=3.1\"\nnumpy = \">=1.24\"\nscikit-learn = \"~=1.2\"\njoblib = \"~=1.0\"\nfasttext = \"*\"\npybind11 = \"*\"\ncython = \"*\"\n\n[dev-packages]\npytest = \"*\"\nsetuptools = \"*\"\ntwine = \"*\"\nwheel = \"*\"\n\n[requires]\npython_version = \"3\"\n\n[scripts]\nrank_strings = \"python -m stringsifter.rank_strings\"\nflarestrings = \"python -m stringsifter.flarestrings\"\ntests = \"python -m pytest\"\nbuild = \"python setup.py sdist bdist_wheel\"\nupload_test = \"python -m twine upload --repository testpypi dist/*\"\nupload = \"python -m twine upload dist/*\"\n" }, { "alpha_fraction": 0.7251184582710266, "alphanum_fraction": 0.7440758347511292, "avg_line_length": 22.44444465637207, "blob_id": "2b90d2c21462ace656f81d89caabd7799a20a434", "content_id": "4e5bcad6e0108796b814799f21190d77f9e23f96", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 211, "license_type": "permissive", "max_line_length": 55, "num_lines": 9, "path": "/tests/conftest.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport pytest\nimport stringsifter.preprocess as preprocess\n\[email protected](scope='module')\ndef featurizer():\n f = preprocess.Featurizer()\n yield f\n" }, { "alpha_fraction": 0.5546969175338745, "alphanum_fraction": 0.5567576885223389, "avg_line_length": 39.15861892700195, "blob_id": "1ba80c807b928ea5d7f95b9d3d219b12b68290a2", "content_id": "f36afae3f3c1d0fc32229f7bcbe645dd5e263395", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5823, "license_type": "permissive", "max_line_length": 89, "num_lines": 145, "path": "/stringsifter/rank_strings.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nimport os\nimport sys\nimport numpy\nimport joblib\nimport argparse\n\n\nif __package__ is None or __package__ == \"\":\n from lib import util\n from version import __version__\nelse:\n from .lib import util\n from .version import __version__\n\n\ndef is_valid_dir(parser, arg):\n arg = os.path.abspath(arg)\n if not os.path.exists(arg):\n parser.error(\"The directory %s does not exist!\" % arg)\n else:\n return arg\n\n\ndef main(input_strings, cutoff, cutoff_score, scores, batch):\n modeldir = os.path.join(util.package_base(), \"model\")\n featurizer = joblib.load(os.path.join(modeldir, \"featurizer.pkl\"))\n ranker = joblib.load(os.path.join(modeldir, \"ranker.pkl\"))\n\n if not batch:\n strings = numpy.array([line.strip() for line in\n input_strings.readlines()], dtype=object)\n\n if len(strings) < 1:\n raise ValueError(\"No strings found within input.\")\n\n X_test = featurizer.transform(strings)\n y_scores = ranker.predict(X_test)\n\n if not numpy.isnan(cutoff_score):\n above_cutoff_indices = numpy.where(y_scores >= cutoff_score)\n y_scores = y_scores[above_cutoff_indices]\n strings = strings[above_cutoff_indices]\n\n argsorted_y_scores = numpy.argsort(y_scores)[::-1]\n sorted_strings = strings[argsorted_y_scores]\n cutoff_sorted_strings = sorted_strings.tolist()[:cutoff]\n\n if scores:\n sorted_y_scores = y_scores[argsorted_y_scores]\n print(\"\\n\".join([\"%.2f,%s\" % pair for pair in\n zip(sorted_y_scores, cutoff_sorted_strings)]))\n else:\n print(\"\\n\".join(cutoff_sorted_strings))\n else:\n strings = []\n qids = []\n batch_files = os.listdir(batch)\n\n for batch_input_file in batch_files:\n with open(os.path.join(batch, batch_input_file)) as batch_input_fp:\n string_i = [line.strip() for line in\n batch_input_fp.readlines()]\n strings.extend(string_i)\n qids.append(len(string_i))\n\n if len(strings) < 1:\n raise ValueError(\"No strings found in batch directory.\")\n\n X_test = featurizer.transform(strings)\n y_scores = ranker.predict(X_test)\n\n strings_grouped = numpy.split(strings,\n numpy.cumsum(qids))[:-1]\n y_scores_grouped = numpy.split(y_scores, numpy.cumsum(qids))[:-1]\n\n batch_file_suffix = \".ranked_strings\"\n for batch_file, strings_i, y_scores_i in zip(batch_files,\n strings_grouped,\n y_scores_grouped):\n with open(os.path.join(batch, batch_file + batch_file_suffix),\n \"w\") as batch_output_fp:\n\n if not numpy.isnan(cutoff_score):\n above_cutoff_indices_i = numpy.where(\n y_scores_i >= cutoff_score)\n y_scores_i = y_scores_i[above_cutoff_indices_i]\n strings_i = strings_i[above_cutoff_indices_i]\n\n argsorted_y_scores_i = numpy.argsort(y_scores_i)[::-1]\n sorted_strings_i = strings_i[argsorted_y_scores_i]\n cutoff_sorted_strings_i = sorted_strings_i.tolist()[:cutoff]\n cutoff_sorted_strings_newlines_i = map(lambda s: s + \"\\n\",\n cutoff_sorted_strings_i)\n if scores:\n sorted_y_scores_i = y_scores_i[argsorted_y_scores_i]\n scores_strings_i = zip(sorted_y_scores_i,\n cutoff_sorted_strings_newlines_i)\n scores_strings_combined_i = [\"%.2f,%s\" % (score_i, string_i)\n for score_i, string_i in\n scores_strings_i]\n batch_output_fp.writelines(scores_strings_combined_i)\n else:\n batch_output_fp.writelines(\n cutoff_sorted_strings_newlines_i)\n\n\n# entry point for script\ndef argmain():\n parser = argparse.ArgumentParser(\n description=\"StringSifter ranks strings based on their \\\n relevance for malware analysis.\",\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n parser.add_argument(\n \"input_strings\", nargs=\"?\", type=argparse.FileType(\"r\"),\n default=sys.stdin, help=\"Read input strings from stdin\")\n parser.add_argument('--version', action='version', version=__version__)\n parser.add_argument(\n '--scores', '-s', action='store_true',\n help=\"display rank scores within output \\\n (default: scores not displayed)\")\n parser.add_argument(\n '--batch', '-b', type=lambda adir: is_valid_dir(parser, adir),\n help=\"enable batch mode, where dir contains files \\\n containing Strings outputs to be ranked by \\\n StringSifter. This creates new files in dir \\\n with StringSifter results denoted with the \\\n .ranked_strings extention\")\n\n group = parser.add_mutually_exclusive_group()\n group.add_argument(\n '--limit', '-l', type=int, default=None,\n help=\"limit output to the top `limit` ranked strings (default: no limit)\")\n group.add_argument(\n '--min-score', '-m', type=float, default=numpy.nan,\n help=\"limit output to strings with score >= `min-score` (default: no min score)\")\n args = parser.parse_args()\n\n main(args.input_strings, args.limit, args.min_score,\n args.scores, args.batch)\n\n\nif __name__ == '__main__':\n argmain()\n" }, { "alpha_fraction": 0.5727896094322205, "alphanum_fraction": 0.5769817233085632, "avg_line_length": 31.799999237060547, "blob_id": "8c1e42b07118487c706b61f9cf76438b459271cf", "content_id": "46d552772ea5e3f0595e2bbdcfa0262514bb647a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2624, "license_type": "permissive", "max_line_length": 77, "num_lines": 80, "path": "/setup.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "# Copyright (C) 2019 FireEye, Inc. All Rights Reserved.\n\nfrom setuptools import setup\nimport os\nimport re\n\n__all__ = ['metadata', 'setup']\n\n# Get the base directory\nhere = os.path.dirname(__file__)\nif not here:\n here = os.path.curdir\n\n# Text describing the module\nlong_description = 'stringsifter is a machine learning-based tool ' + \\\n 'that automatically ranks the output of the ' + \\\n '`strings` program for binary triage analysis.'\n\n# Get the version\nversfile = os.path.join(here, 'stringsifter', 'version.py')\n_version = {}\nwith open(versfile, 'r') as fid:\n exec(fid.read(), _version)\n\n# Do some Pipfile parsing to avoid two copies of the requirements,\n# but this is fragile\nreqsfile = os.path.join(here, 'Pipfile')\nrequirements = []\nwith open(reqsfile, 'r') as fid:\n in_packages_section = False\n for line in fid.readlines():\n if line.startswith('['):\n in_packages_section = line.rstrip() == '[packages]'\n continue\n if in_packages_section:\n m = re.match(r'([\\w-]+) *= *\"(.*)\"', line)\n if m:\n if m.group(2) == '*':\n requirements.append(m.group(1))\n else:\n requirements.append(m.group(1) + m.group(2))\n\n# Get the list of scripts\nscripts = []\n\n_packages = ['stringsifter', 'stringsifter/lib']\n\n_package_data = {'stringsifter': ['model/*.pkl',\n 'lib/*.pkl',\n 'lib/*.ftz',\n 'lib/*.json']}\n\n# Set the parameters for the setup script\nmetadata = {\n # Setup instructions\n 'provides': ['stringsifter'],\n 'packages': _packages,\n 'package_data': _package_data,\n 'scripts': scripts,\n 'entry_points': {\n 'console_scripts': ['rank_strings=stringsifter.rank_strings:argmain',\n 'flarestrings=stringsifter.flarestrings:main']\n },\n 'install_requires': requirements,\n 'python_requires': '<3.11',\n # Metadata\n 'name': 'stringsifter',\n 'version': _version['__version__'],\n 'description': 'stringsifter is a machine learning-based tool that ' + \\\n 'automatically ranks the output of the `strings` ' + \\\n 'program for binary triage analysis.',\n 'long_description': long_description,\n 'url': 'https://github.com/mandiant/stringsifter',\n 'download_url': 'https://github.com/mandiant/stringsifter',\n 'keywords': ['stringsifter', 'rank', 'strings', 'binary', 'triage'],\n }\n\n# Execute the setup script\nif __name__ == '__main__':\n setup(**metadata)\n" }, { "alpha_fraction": 0.25925925374031067, "alphanum_fraction": 0.5925925970077515, "avg_line_length": 26, "blob_id": "c7a8b91c33a546d3565e0338767afaea4119b289", "content_id": "47a72c05aa4e7b9302136c85e5889b8861516ae0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27, "license_type": "permissive", "max_line_length": 26, "num_lines": 1, "path": "/stringsifter/version.py", "repo_name": "digitalsleuth/stringsifter", "src_encoding": "UTF-8", "text": "__version__ = '2.20201202'\n" } ]
11
vpekar/yamltests
https://github.com/vpekar/yamltests
2ff9ff93715fce8471aceaca54c77f231ce5eee3
375f35e985d39057f5d5c91598e30f19d50fd2e3
676a6eac736eee3d1737d4c06ade85de1eb0eee2
refs/heads/master
2020-04-27T14:19:42.108625
2013-01-31T23:15:21
2013-01-31T23:15:21
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5645604133605957, "alphanum_fraction": 0.5659340620040894, "avg_line_length": 19.08333396911621, "blob_id": "e3850c505b9a4dd3234f5ffee5a263c9a792b714", "content_id": "478034a48e87b3759312cf3f3bd805e4c1619f40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 728, "license_type": "no_license", "max_line_length": 77, "num_lines": 36, "path": "/example/extractor.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "\"\"\"Extracts occurrences of twitter handles in text\n\"\"\"\n\nimport re\n\nP = re.compile(r'(?:^|\\s)@(\\w+)')\n\n\ndef extract(text):\n \"\"\"Returns the first match as a string\n \"\"\"\n return P.findall(text)[0]\n\n\nclass Extractor:\n \n def __init__(self):\n pass\n \n def extract(self, text):\n \"\"\"Returns a list of matched strings\n \"\"\"\n return P.findall(text)\n\n\nclass ExtractorWithInitKwargs:\n \n def __init__(self, lookup):\n \"\"\"lookup is an object with names to be excluded\n \"\"\"\n self.lookup = lookup\n \n def extract(self, text):\n \"\"\"Returns a list of matched strings\n \"\"\"\n return [x for x in P.findall(text) if not self.lookup.is_excluded(x)]\n \n" }, { "alpha_fraction": 0.550000011920929, "alphanum_fraction": 0.550000011920929, "avg_line_length": 34.93333435058594, "blob_id": "264bb81c6362c7ef52d9b978a4e0dc336c16e960", "content_id": "4dfdcde46a4722de0a2e4d2a3f55c18edfa0b787", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 540, "license_type": "no_license", "max_line_length": 85, "num_lines": 15, "path": "/example/tests/init_kwargs.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "class Lookup:\n \"\"\"A class containing usernames not to be extracted\n \"\"\"\n def __init__(self):\n self.excluded_names = ['username']\n \n def is_excluded(self, name):\n return True if name in self.excluded_names else False\n\n# a dict, where each key is the name of a class being tested and the value is kwargs \n# to be passed to the __init__ method of the class\ninit_kwargs = {'ExtractorWithInitKwargs': {\n 'lookup': Lookup()\n },\n }\n\n" }, { "alpha_fraction": 0.6722891330718994, "alphanum_fraction": 0.6722891330718994, "avg_line_length": 26.53333282470703, "blob_id": "dbb4ea8dcf94a14608689208c1f1642c6f954668", "content_id": "479357a02e7165bf731e5f6ba0d8cd545e6f2471", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 830, "license_type": "no_license", "max_line_length": 69, "num_lines": 30, "path": "/tests.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "import os\nimport unittest\nfrom nose.plugins import PluginTester\nfrom yamltests import YamlTests\n\nclass TestRunsPythonTests(PluginTester, unittest.TestCase):\n \"\"\"Make sure python tests run as expected, with yamltests enabled\n \"\"\"\n\n activate = \"--with-yamltests\"\n plugins = [YamlTests()]\n suitepath = os.path.join(os.getcwd(), 'example/tests/tests.py')\n \n def test_run_python_tests(self):\n assert \"FAILED\" not in self.output\n\nclass TestRunsYAMLTests(PluginTester, unittest.TestCase):\n \"\"\"Make sure YAML tests run as expected\n \"\"\"\n\n activate = \"--with-yamltests\"\n plugins = [YamlTests()]\n suitepath = os.path.join(os.getcwd(), 'example/tests/tests.yml')\n \n def test_run_yaml_tests(self):\n assert \"FAILED\" not in self.output\n\n\nif __name__ == '__main__':\n unittest.main()\n " }, { "alpha_fraction": 0.5937846899032593, "alphanum_fraction": 0.6037735939025879, "avg_line_length": 27.1875, "blob_id": "b799eb21f57ac91bfb35c108ff06559dbbba6a20", "content_id": "ae1be4fad6cee538dd8b2cb821613bfebf6d0312", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 901, "license_type": "no_license", "max_line_length": 54, "num_lines": 32, "path": "/setup.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "from setuptools import setup, find_packages\n\nclassifiers = [\n 'Development Status :: 3 - Alpha',\n 'License :: OSI Approved :: BSD License',\n 'Intended Audience :: Developers',\n 'Environment :: Console',\n 'Operating System :: OS Independent',\n 'Topic :: Software Development :: Testing',\n 'Programming Language :: Python :: 2.7'\n]\n\nsetup(\n name=\"yamltests\",\n version=\"0.0.1\",\n description=\"\"\"Running yaml tests from nose\"\"\",\n long_description=\"\"\" \"\"\",\n author=\"Viktor Pekar\",\n author_email=\"[email protected]\",\n url=\"https://github.com/vpekar/yamltests\",\n license=\"New BSD License\",\n classifiers=classifiers,\n packages=find_packages(),\n install_requires=['nose', 'pyyaml', 'setuptools'],\n py_modules=['yamltests'],\n zip_safe = False,\n entry_points = {\n 'nose.plugins.0.10': [\n 'yamltests = yamltests:YamlTests'\n ]\n },\n)" }, { "alpha_fraction": 0.55471271276474, "alphanum_fraction": 0.5560038685798645, "avg_line_length": 32.49189376831055, "blob_id": "6561c49946f50cfc8262987297a5b24a5aa23e09", "content_id": "719b02403ad43dcd684d07de4d36056ca5cb48dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6196, "license_type": "no_license", "max_line_length": 80, "num_lines": 185, "path": "/yamltests.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "\"\"\"A nose plugin for tests written in YAML (.yml). \n\nCreated on 21 Jan 2013\n\n@author: vpekar\n\"\"\"\n\n\nimport sys, os, logging, unittest\nfrom nose.plugins.base import Plugin\nfrom yaml import load\ntry:\n from yaml import CLoader as Loader\nexcept ImportError:\n from yaml import Loader\n\n\nLOGGER = logging.getLogger('nose.plugins.yamltests')\n\n\ndef do_import(name):\n \"\"\"Import a module specified as a string\n \"\"\"\n LOGGER.debug('Importing %s' % name)\n components = name.split('.')\n module = __import__(components[0])\n for comp in components[1:]:\n module = getattr(module, comp)\n return module\n \n\nclass Case(unittest.TestCase):\n \"\"\"A test case to be created at run time\n \"\"\"\n \n input = ''\n expected = None\n actual = None\n desc = ''\n func_name = None\n class_name = None\n file_name = None\n init_kwargs = {}\n \n def setUp(self):\n if self.class_name:\n self.instance = globals()[self.class_name](**self.init_kwargs)\n \n def runTest(self):\n \n if self.class_name:\n func = self.instance.__class__.__dict__[self.func_name]\n self.actual = func(self.instance, self.input)\n else:\n func = globals()[self.func_name]\n self.actual = func(self.input)\n \n msg_vars = (self.file_name, self.desc, self.actual, self.expected)\n \n expected_is_list = isinstance(self.expected, list)\n actual_is_list = isinstance(self.actual, list)\n expected_is_str = isinstance(self.expected, str) or \\\n isinstance(self.expected, unicode)\n actual_is_str = isinstance(self.actual, str) or \\\n isinstance(self.actual, unicode)\n if expected_is_list and actual_is_list:\n msg = \"%s: %s\\n\\tActual: %s\\n\\t!=\\n\\tExpected %s\" % msg_vars\n self.assertListEqual(self.actual, self.expected, msg)\n elif expected_is_str:\n if actual_is_list:\n msg = \"%s: %s\\n\\tActual %s does not have Expected \\\"%s\\\"\"\n self.assertIn(self.expected, self.actual, msg % msg_vars)\n elif actual_is_str:\n msg = \"%s: %s\\n\\tActual: \\\"%s\\\" != Expected \\\"%s\\\"\" % msg_vars\n self.assertEqual(self.actual, self.expected, msg)\n else:\n raise TypeError(\"Actual value should be a list, a string, or\"\n \"unicode. Got %s (%s)\" % (type(self.actual), self.file_name))\n else:\n raise TypeError(\"Either both actual and expected values should \"\n \"be lists, or expected value should be a string or unicode.\"\n \" Got actual type %s, expected type %s (%s)\" % \n (type(self.actual), type(self.expected), self.file_name))\n\n\nclass YamlTestParser:\n \"\"\"Parses YAML files and creates test cases\n \"\"\"\n \n def __init__(self):\n self.logger = logging.getLogger(__name__)\n \n def create_testcase(self, case_data, func_name, file_name,\n class_name=None, init_kwargs={}):\n \"\"\"Construct a test case\n \"\"\"\n case = Case()\n case.func_name = func_name\n case.input = case_data['text']\n case.desc = case_data['description']\n case.expected = case_data['expected']\n case.class_name = class_name\n case.file_name = file_name\n case.init_kwargs = init_kwargs\n return case\n\n def parse_yaml(self, fname):\n return load(open(fname), Loader=Loader)\n\n def get_class(self, fname):\n \"\"\"Yield test data for one class/function\n \"\"\"\n for module_name, module_tests in self.parse_yaml(fname).items():\n for class_name, class_tests in module_tests.items():\n yield module_name, class_name, class_tests\n\n def get_cases(self, fname):\n \"\"\"Yield TestCases found in the file.\n \"\"\"\n \n sys.path.append(os.path.dirname(fname))\n try:\n from init_kwargs import init_kwargs\n LOGGER.debug(\"Imported from %s: %s\" % (fname, repr(init_kwargs)))\n except ImportError:\n init_kwargs = {}\n LOGGER.debug(\"Failed to import init kwargs from %s\" % fname)\n \n for module_name, class_name, class_tests in self.get_class(fname):\n # import the class/function\n full_name = \"%s.%s\" % (module_name, class_name)\n globals()[class_name] = do_import(full_name)\n \n kwargs = {'file_name': fname}\n \n if isinstance(class_tests, list):\n # testing a function\n kwargs['func_name'] = class_name\n for case_data in class_tests:\n yield self.create_testcase(case_data, **kwargs)\n \n elif isinstance(class_tests, dict):\n # testing a class\n kwargs['init_kwargs'] = init_kwargs.get(class_name, {})\n kwargs['class_name'] = class_name\n for method_name, method_tests in class_tests.items():\n kwargs['func_name'] = method_name\n for case_data in method_tests:\n yield self.create_testcase(case_data, **kwargs)\n\n\nclass YamlTests(Plugin):\n \"\"\"Run unittests specified in YAML files (.yml)\n \"\"\"\n \n name = 'yamltests'\n yaml_test_parser = YamlTestParser()\n\n def options(self, parser, env=os.environ):\n super(YamlTests, self).options(parser, env=env)\n\n def configure(self, options, conf):\n #print >> sys.stderr, (\"Conf: %s\" % conf)\n super(YamlTests, self).configure(options, conf)\n if not self.enabled:\n return\n\n def finalize(self, result):\n pass\n\n def wantFile(self, fname):\n if fname.endswith('.yml'):\n return True\n return None\n\n def loadTestsFromFile(self, fname):\n \"\"\"Return iterable containing TestCases\n \"\"\"\n name = os.path.abspath(fname)\n flag = False\n for case in self.yaml_test_parser.get_cases(name):\n flag = True\n yield case\n if not flag:\n yield False\n" }, { "alpha_fraction": 0.6192830801010132, "alphanum_fraction": 0.621343195438385, "avg_line_length": 27.89285659790039, "blob_id": "db50436b82b96501d22cc917a793c7060b84692e", "content_id": "3fb7811c0539b9839da769636bdd5c2e608ca6e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2427, "license_type": "no_license", "max_line_length": 225, "num_lines": 84, "path": "/README.rst", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "yamltests\n=======\n\nOverview\n---------\n\nyamltests is `nose <http://somethingaboutorange.com/mrl/projects/nose>`_ plugin for running tests written in `YAML <http://en.wikipedia.org/wiki/YAML>`_. \n\n\nInstallation\n-----------\n\nFrom the setup directory::\n\n python setup.py install\n \nUsing pip::\n\n pip install https://github.com/vpekar/yamltests/archive/master.zip\n\nTesting the plugin\n-----------------\n\nFrom the setup directory::\n\n python tests.py\n\nUsage\n------\n\nAdd the ``--with-yaml-tests`` option. Can be combined with other options, e.g., ``--with-coverage``::\n\n nosetests --with-yaml-tests\n \nTo test the *example* package::\n\n $ nosetests example --with-yamltests\n .....\n ----------------------------------------------------------------------\n Ran 6 tests in 0.014s\n\n OK\n \n\nFormat of YAML files\n--------------------\n\nShould be as follows::\n\n path.to.myModule:\n myFunctionInMyModule:\n description: \"Extract twitter handle\"\n text: \"... @username ...\"\n expected: \"username\"\n myClassInMyModule:\n myMethod:\n description: \"Extract twitter handle\"\n text: \"... @username ...\"\n expected: [\"username\"]\n\nwhere ``text`` is the input to the function/method being tested and ``expected`` is its expected output.\n\nFilenames should begin with \"test\" and have extension \".yml\".\n\nFor a working example, see ``example/tests/tests.yml``.\n\nTypes of actual and expected outputs\n-------------------------------------\n\nEach function/method being tested can output either a string or a \nlist. The expected value can be either a string or a list.\n\n- If the expected value is a string and actual value is a string, assertEqual is used.\n\n- If the expected value is a string and actual value is a list, assertIn is used.\n\n- If the expected value is a list and actual value is a list, assertListEqual is used.\n\nNotes\n------\n\n- If the yaml file contains a dot-separated module name (``SomePackage.SomeModule``) then the ``__init__.py`` file inside SomePackage should explicitly import all modules (see the ``__init__.py`` file in the example folder)\n\n- Optionally, the class being tested can be initialised with arguments (e.g., a mocked database) specified in ``init_kwargs.py``, placed in the same folder as the test*.yml files. See ``init_kwargs.py`` in the example folder.\n" }, { "alpha_fraction": 0.6384839415550232, "alphanum_fraction": 0.6384839415550232, "avg_line_length": 28.826086044311523, "blob_id": "c57b078769722423319d01c9362c353d2b742275", "content_id": "c6020d308298d4b2a3150aa4f2b57016bc3dad92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 686, "license_type": "no_license", "max_line_length": 58, "num_lines": 23, "path": "/example/tests/tests.py", "repo_name": "vpekar/yamltests", "src_encoding": "UTF-8", "text": "import unittest\nfrom example.extractor import extract, Extractor\n\nclass TestExtractFunction(unittest.TestCase):\n \n def test_extract(self):\n msg = \"Extract mention at the begining of a tweet\"\n text = \"@username reply\"\n expected = \"username\"\n actual = extract(text)\n self.assertEqual(actual, expected)\n \nclass TestExtractorClass(unittest.TestCase):\n \n def setUp(self):\n self.ex = Extractor()\n \n def test_extract(self):\n msg = \"Extract mention at the begining of a tweet\"\n text = \"@username reply\"\n expected = [\"username\"]\n actual = self.ex.extract(text)\n self.assertEqual(actual, expected)\n" } ]
7
jmatejekmorris/jmatejekmorris.github.io
https://github.com/jmatejekmorris/jmatejekmorris.github.io
27572afe6902dc017c6db4381f8584f2bfb94298
05ab3aa624da9392b4492fae4f37e3bb752cd2d8
8f7b0863d24457011117c6807dbc8caeeffca52d
refs/heads/master
2023-07-06T11:12:24.237107
2023-06-26T15:13:20
2023-06-26T15:13:20
199,733,727
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5844748616218567, "alphanum_fraction": 0.586758017539978, "avg_line_length": 15.259259223937988, "blob_id": "e9e3e46fd0acbd5ed92f6610bc07a84835ad4f1c", "content_id": "89bc73243b4c778f70cf416f6aa87473da078a9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 438, "license_type": "permissive", "max_line_length": 43, "num_lines": 27, "path": "/remove-exif.py", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "import os \n\n\n\nimport numpy as np \n\n\n\nfrom PIL import Image \nfrom skimage import io\n\n\nimage_exts = ['jpeg', 'jpg', 'png']\n\nfor root, dirs, files in os.walk('.'):\n # find all image files \n for file in files:\n ext = file.split('.')[-1]\n\n if not ext in image_exts: continue \n\n filename = os.path.join(root, file)\n\n print (filename)\n im = np.array(Image.open(filename))\n\n io.imsave(filename, im)" }, { "alpha_fraction": 0.6860215067863464, "alphanum_fraction": 0.6946236491203308, "avg_line_length": 20.18181800842285, "blob_id": "dd3b742a8159cf103da258cd0d03eb79dd871a18", "content_id": "15d5cee046e73f33095c06f2e58eee0d55807ef2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 465, "license_type": "permissive", "max_line_length": 87, "num_lines": 22, "path": "/images/bars/downsample.py", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "import os\nimport glob\n\n\nfrom skimage import io, transform\n\n\n\nif not os.path.exists('full-resolution-bars'):\n os.mkdir('full-resolution-bars')\n\nfilenames = glob.glob('*png')\n\nfor filename in filenames:\n image = io.imread(filename)\n\n new_filename = 'full-resolution-bars/{}'.format(filename)\n os.rename(filename, new_filename)\n\n image_resized = transform.resize(image, (image.shape[0] // 4, image.shape[1] // 4))\n\n io.imsave(filename, image_resized)" }, { "alpha_fraction": 0.6423941850662231, "alphanum_fraction": 0.660693883895874, "avg_line_length": 60, "blob_id": "bf288cf3f693fa124dae5ba388ad550aecbf1995", "content_id": "a12a8e885943e86be169bc54f0853b6d797aa6d4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 5246, "license_type": "permissive", "max_line_length": 400, "num_lines": 86, "path": "/dont-say-poop.html", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "<!DOCTYPE HTML>\n<html>\n <head>\n <!-- Global site tag (gtag.js) - Google Analytics -->\n <script async src=\"https://www.googletagmanager.com/gtag/js?id=G-GEW2M0CR7K\"></script>\n <script>\n window.dataLayer = window.dataLayer || [];\n function gtag(){dataLayer.push(arguments);}\n gtag('js', new Date());\n\n gtag('config', 'G-GEW2M0CR7K');\n </script>\n\n <title>Don't Say Poop!</title>\n <link rel=\"stylesheet\" href=\"/css/default.css\"/>\n <link rel=\"stylesheet\" href=\"/css/poop.css\"/>\n <link rel=\"stylesheet\" href=\"/css/colored-bars.css\"/>\n <script src=\"/scripts/jquery-3.5.1.min.js\"></script>\n <script type=\"text/javascript\" src=\"scripts/default.js\"></script>\n\n <!--FACEBOOK -->\n <meta property=\"og:image\" content=\"http://jmatejekmorris.com/images/cropped-headshot-scaled.jpg\">\n <meta property=\"og:image:type\" content=\"image/jpeg\">\n <meta property=\"og:image:width\" content=\"200\">\n <meta property=\"og:image:height\" content=\"200\">\n <meta property=\"og:type\" content=\"website\" />\n </head>\n <body>\n <div class=\"horizontal-color-bar poop-first-bar brown-horizontal-dont-say-poop\">\n <nav>\n <ul class=\"menu-default\">\n <li class=\"poop-menu-option\"><a href=\"/index.html\"><span class=\"menu-default-span\">Home</span></a></li>\n <li class=\"poop-menu-option\"><a href=\"/about.html\"><span class=\"menu-default-span\">About Me</span></a></li>\n <li onclick=\"ExpandTopMenuBooks()\" class=\"poop-menu-selected books-menu-dropdown\"><a><span class=\"menu-default-span\">Books</span></a>\n <ul id=\"books-menu-default\" class=\"poop-books-menu\">\n <li><a href=\"/my-ex-imaginary-friend.html\"><span class=\"menu-default-span\">My Ex-Imaginary Friend</span></a></li>\n <li><a href=\"/dont-say-poop.html\"><span class=\"menu-default-span\">Don't Say Poop!</span></a></li>\n <li><a href=\"/forsooth.html\"><span class=\"menu-default-span\">Forsooth</span></a></li>\n </ul>\n </li>\n <li class=\"poop-menu-option\"><a href=\"/frequently-asked-questions.html\"><span class=\"menu-default-span\">F.A.Q.s</span></a></li>\n <li class=\"poop-menu-option\"><a href=\"/seldom-asked-questions.html\"><span class=\"menu-default-span\">S.A.Q.s</span></a></li>\n <li class=\"poop-menu-option\"><a href=\"/contact.html\"><span class=\"menu-default-span\">Contact</span></a></li>\n </ul>\n </nav>\n </div>\n <div class=\"horizontal-color-bar poop-second-bar blue-horizontal-dont-say-poop\">&nbsp</div>\n <div class=\"horizontal-color-bar poop-third-bar green-horizontal-dont-say-poop\">&nbsp</div>\n\n <div id=\"content\">\n <div class=\"poop-header\">Don't Say Poop!</div>\n <div class=\"poop-cover\">\n <img class=\"poop-book-image\" src=\"/images/dont-say-poop-cover.jpg\" alt=\"The cover of the book features the title Don't Say Poop in a speech bubble coming from the embarrassed narrator's mouth. The small child, standing against a sky blue background, blushes as he is forced to say that dreadful word aloud. Three anthropomorphized poops dance on either side of him. They have excellent moves.\">\n </div>\n <div class=\"poop-blurb\">\n <i>Everyone Poops</i> meets <i>The Book with No Pictures</i> in this irresistibly naughty read-aloud. When you get the urge to say you-know-what, don't. DON'T SAY POOP!\n <br><br>\n Why say a vile word like <i>poop</i> when you could say humdrum bum crumbs, float-or-sinker, major stinker, sometimes mushy from your tushy, or smelly belly funky jelly. See how much nicer that is?\n <br><br>\n This silly book of tongue twisters will have kids doubled over as they learn some alternatives to their favorite potty words. Perfect for reading aloud, and for reading again and again!\n <br><br>\n Illustrated by the amazing <a class=\"book-link\" href=\"https://www.instagram.com/fredblunt/?hl=en\" target=\"_blank\">Fred Blunt.</a>\n <br><br>\n <div class=\"reviews-section-header\">Reviews</div><br>\n \n \"As text by Matejek-Morris rollicks along, scratchy art by Blunt adds to the chaotically comic effect, reminding readers that bathroom humor and its ilk can be good clean fun.\" - <i>Publishers Weekly</i>\n <br><br>\n Out now from Houghton Mifflin Harcourt. Order now at:\n\n <div class=\"buy-at-options\">\n <div class=\"buy-at-option\"><a href=\"https://bookshop.org/books/don-t-say-poop/9780358423331\" target=\"_blank\">Bookshop.org</a></div>\n <div class=\"buy-at-option\"><a href=\"https://www.indiebound.org/book/9780358423331\" target=\"_blank\">Indiebound</a></div>\n <div class=\"buy-at-option\"><a href=\"https://www.barnesandnoble.com/w/dont-say-poop-jimmy-matejek-morris/1137169221?ean=9780358423331\" target=\"_blank\">Barnes and Noble</a></div>\n <!-- <div class=\"buy-at-option\"><a href=\"https://www.target.com/p/don-t-say-poop-by-jimmy-matejek-morris-hardcover/-/A-80501878\" target=\"_blank\">Target</a></div> -->\n <div class=\"buy-at-option\"><a href=\"https://www.amazon.com/Dont-Say-Poop-Jimmy-Matejek-Morris/dp/0358423333/ref=tmm_hrd_swatch_0?_encoding=UTF8&qid=1597974272&sr=8-2\" target=\"_blank\">Amazon</a></div>\n </div>\n\n Or check it out at a library near you!\n </div>\n\n <div class=\"footer-div\">&nbsp;</div>\n\n </div>\n\n </body>\n</html>\n" }, { "alpha_fraction": 0.8064516186714172, "alphanum_fraction": 0.8064516186714172, "avg_line_length": 30, "blob_id": "625012118825791ed4df883d1243ae943bc8a9be", "content_id": "94c02da385406262b1f4e5529b7b968cf559494d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 62, "license_type": "permissive", "max_line_length": 34, "num_lines": 2, "path": "/README.md", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "# jmatejekmorris.github.io\nPersonal author website for Jimmy Matejek-Morris\n" }, { "alpha_fraction": 0.6815920472145081, "alphanum_fraction": 0.711442768573761, "avg_line_length": 24.25, "blob_id": "1b536171d5d5a9a4d5ca62de945511c17975ed42", "content_id": "82c51253a992d84345fcf6f19b682e34ceb0c213", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 201, "license_type": "permissive", "max_line_length": 50, "num_lines": 8, "path": "/scripts/default.js", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "window.onresize = function() {\n\tvar content = document.getElementById('content');\n\tcontent.style.minHeight = '100vh';\n}\n\nfunction ExpandTopMenuBooks() {\n $('#books-menu-default').slideToggle(150);\n}" }, { "alpha_fraction": 0.6987179517745972, "alphanum_fraction": 0.7115384340286255, "avg_line_length": 19.34782600402832, "blob_id": "942cf1e2cd071a331b238658a0762d3c9dd41cb8", "content_id": "1d06ff3945fa2ae7bf3b9de639e05452006a8381", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 468, "license_type": "permissive", "max_line_length": 63, "num_lines": 23, "path": "/images/about/downsample.py", "repo_name": "jmatejekmorris/jmatejekmorris.github.io", "src_encoding": "UTF-8", "text": "import os\nimport glob\nimport numpy as np\n\n\nfrom skimage import io, transform\n\n\n\nif not os.path.exists('full-resolution-images'):\n os.mkdir('full-resolution-images')\n\nfilenames = sorted(glob.glob('*jpeg'))\n\nfor filename in filenames:\n image = io.imread(filename)\n\n new_filename = 'full-resolution-images/{}'.format(filename)\n os.rename(filename, new_filename)\n\n image_resized = transform.resize(image, (800, 800))\n\n io.imsave(filename, image_resized)\n" } ]
6
Supercopter/wave-watchers-robot
https://github.com/Supercopter/wave-watchers-robot
edc62eb7b939f838ee2b2d12198198444ba3d3d1
471269ccf030eea407edf78a4859a5f13f7c96bc
6d4f6f13413c88596715e7bc889fe0bffeff5a93
refs/heads/master
2016-09-10T12:04:47.030611
2015-04-11T18:03:45
2015-04-11T18:03:45
33,782,896
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5918972492218018, "alphanum_fraction": 0.5918972492218018, "avg_line_length": 38.36000061035156, "blob_id": "3170493a56b1853fe328eed3e77bce05a188aa71", "content_id": "bb7688050401c23d016256d74ed37b542493e553", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2024, "license_type": "no_license", "max_line_length": 365, "num_lines": 50, "path": "/run_on_firstload.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "from google.appengine.ext import db\r\nimport wavewatcher_class\r\n\r\nMEMBERS = ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']\r\nOWNERS = ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']\r\nNICE_ROBOTS = ['[email protected]', '[email protected]', '[email protected]']\r\nTRUSTED = ['[email protected]', '[email protected]']\r\n\r\nBLACKLIST = ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]',]\r\nGREYLIST = ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]',]\r\n\r\nfor wavewatcher in MEMBERS:\r\n rec = wavewatcher_class.WaveWatchers(userid=wavewatcher, level=wavewatcher_class.ACCESS_LEVELS['MEMBER'])\r\n rec.put()\r\n \r\nfor wavewatcher in OWNERS:\r\n rec = wavewatcher_class.WaveWatchers(userid=wavewatcher, level=wavewatcher_class.ACCESS_LEVELS['OWNER'])\r\n rec.put()\r\n\r\nfor robot in NICE_ROBOTS:\r\n rec = wavewatcher_class.WaveWatchers(userid=robot, level=wavewatcher_class.ACCESS_LEVELS['ROBOT'])\r\n rec.put()\r\n\r\nfor trusted in TRUSTED:\r\n rec = wavewatcher_class.WaveWatchers(userid=trusted, level=wavewatcher_class.ACCESS_LEVELS['TRUSTED'])\r\n rec.put()\r\n\r\nfor troll in BLACKLIST:\r\n rec = wavewatcher_class.Villain(userid=troll, level=wavewatcher_class.VILLAIN_LEVELS['BLACKLIST'])\r\n rec.put()\r\n \r\nfor grey in GREYLIST:\r\n rec = wavewatcher_class.Villain(userid=grey, level=wavewatcher_class.VILLAIN_LEVELS['BLACKLIST'])\r\n rec.put()\r\n\r\nif __name__ == \"__main__\":\r\n print()\r\n print()\r\n print(MEMBERS)\r\n print()\r\n print(OWNERS)\r\n print()\r\n print(NICE_ROBOTS)\r\n print()\r\n print(TRUSTED)\r\n print()\r\n print(BLACKLIST)\r\n print()\r\n print(GREYLIST)\r\n print()\r\n \r\n" }, { "alpha_fraction": 0.6657585501670837, "alphanum_fraction": 0.679188072681427, "avg_line_length": 47.1698112487793, "blob_id": "568a9331f96dc0a483a187b76a94418604ce535a", "content_id": "4ab32cfa39f70b5c8b27afb1a7e853311cb2f403", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26062, "license_type": "no_license", "max_line_length": 202, "num_lines": 530, "path": "/wavewatchers_default.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "from waveapi import events\r\nfrom waveapi import ops\r\nfrom waveapi import robot\r\nfrom waveapi import element\r\nfrom waveapi import wavelet\r\nimport logging\r\n#import cgi\r\n#import testing\r\n#import hashlib\r\nfrom google.appengine.api import mail\r\n#from google.appengine.ext import webapp\r\n#from google.appengine.ext.webapp.util import run_wsgi_app\r\nfrom google.appengine.ext import db\r\nfrom waveapi import appengine_robot_runner\r\nimport wavewatcher_class\r\nINDEX_WAVE_ID = 'googlewave.com!w+JOQvIuevS' #Here are the waveids for all of the waves that the robot updates.\r\nWAVELET_ID = 'googlewave.com!conv+root'\r\nSECONDARY_INDEX_ID = 'googlewave.com!w+2DFkTj9KC'\r\nSHORT_PRIMARY_INDEX_ID = 'googlewave.com!w+EXoDbYjDH'\r\nSHORT_SECONDARY_INDEX_ID = 'googlewave.com!w+EXoDbYjDJ'\r\n#The next lines create variables storing any useful searches/lists of names.\r\nWAVEWATCHERS_ALL = []\r\nq = db.GqlQuery('SELECT * FROM WaveWatchers WHERE level = 2')\r\nfor i in q:\r\n WAVEWATCHERS_ALL.append(i.userid)\r\nWAVEWATCHERS_OWNERS = []\r\nq = db.GqlQuery('SELECT * FROM WaveWatchers WHERE level = 1')\r\nfor i in q:\r\n WAVEWATCHERS_OWNERS.append(i.userid)\r\n WAVEWATCHERS_ALL.append(i.userid)\r\nGREYLIST = []\r\nVILLAINS = []\r\nq = db.GqlQuery('SELECT * FROM Villain WHERE level = 2')\r\nfor i in q:\r\n GREYLIST.append(i.userid)\r\n VILLAINS.append(i.userid)\r\nBLACKLIST = []\r\nq = db.GqlQuery('SELECT * FROM Villain WHERE level = 1')\r\nfor i in q:\r\n BLACKLIST.append(i.userid)\r\n VILLAINS.append(i.userid)\r\n\r\n\r\n\r\ndef addWaveWatcher(event, wavelet, wavewatcher, level = 2):\r\n logging.warning(\"%s is trying to add a new wavewatcher (%s) as level %s\" % (modified_by, wavewatcher, level)) #Adds a record of the attempt to the logs\r\n if event.modified_by in WAVEWATCHERS_OWNERS:\r\n import wavewatcher_class\r\n rec = wavewatcher_class.WaveWatchers(userid=wavewatcher, level=level)\r\n rec.put()\r\n easyLevels = {}\r\n for key in wavewatcher_class.ACCESS_LEVELS.keys():\r\n easyLevels[wavewatcher_class.ACCESS_LEVELS[key]] = key\r\n if level == 1:\r\n i = \"as an\"\r\n elif level == 3:\r\n i = \"as\"\r\n else:\r\n i = \"as a\"\r\n wavelet.reply(\"%s successfully added %s %s %s\" % (event.modified_by, wavewatcher, i, easylevels[level]))\r\n logging.warning(\"%s was added successfully\" % wavewatcher)\r\n else:\r\n wavelet.reply(\"You do not have permission to perform that operation - Ask a group owner to do it for you.\")\r\n logging.info(\"addWaveWatcher completed\")\r\n\r\ndef addBadUser(event, wavelet, troll, level = 2):\r\n if event.modified_by in WAVEWATCHERS_OWNERS:\r\n import wavewatcher_class\r\n rec = wavewatcher_class.Villain(userid=troll, level=level)\r\n rec.put()\r\n if level == 1:\r\n wavelet.reply(\"%s was successfully BLACKLISTED by %s\" % (troll, event.modified_by))\r\n body = \"%s successfully added %s to the wavewatchers blacklist.\" % (event.modified_by, troll)\r\n mail.send_mail(sender=\"Blacklist Alerts <[email protected]>\",\r\n to=[\"Pooja Srinivas <[email protected]>\", \"Nathanael Abbotts <[email protected]>\", \"[email protected]\"],\r\n subject=\"%s BLACKLISTED\" % troll,\r\n body=body)\r\n else:\r\n wavelet.reply(\"%s was successfully GREYLISTED %s\" % (troll, event.modified_by))\r\n body = \"%s successfully added %s to the wavewatchers greylist.\" % (event.modified_by, troll)\r\n mail.send_mail(sender=\"Greylist Alerts <[email protected]>\",\r\n to=[\"Pooja Srinivas <[email protected]>\", \"Nathanael Abbotts <[email protected]>\", \"[email protected]\"],\r\n subject=\"%s GREYLISTED\" % troll,\r\n body=body)\r\n else:\r\n wavelet.reply(\"You do not have permission to perform that operation - Ask a group owner to do it for you.\")\r\n\r\n \r\n\r\ndef displayCommands(wavelet):\r\n \"\"\"displayCommands(wavelet):\r\n Adds the full list of commands in a reply to wavelet\"\"\"\r\n logging.info(\"displayCommands Called\")\r\n #variable cmds lists all the commands & their descriptions in different list elements\r\n cmds = [\"\\n\\nList of commands:\\n\", \"addAll\", \" - adds all the wavewatchers.*\\n\", \"updateIndex\", \" - re-posts a link to the index wave.\\n\",\r\n \"isSafe\", \" - displays info about the publicity & participants of a wave.\\n\", \"makePublic\", \" - adds the wave-watchers group & \\\r\npublic to the wave.\\n\", \"displayCommands\", \" - displays this help message.\\n\", \"publishWave\", \" - combines addAll and updateIndex. Used when\\\r\n creating a WW wave.\\n\", \"\\n*Can only be used by a wave-watcher.\"]\r\n #The following lines set up helpful variables giving lengths of the different commands and decriptions\r\n start_1 = len(cmds[0])\r\n end_1 = start_1 + len(cmds[1])\r\n start_2 = end_1 + len(cmds[2])\r\n end_2 = start_2 + len(cmds[3])\r\n start_3 = end_2 + len(cmds[4])\r\n end_3 = start_3 + len(cmds[5])\r\n start_4 = end_3 + len(cmds[6])\r\n end_4 = start_4 + len(cmds[7])\r\n start_5 = end_4 + len(cmds[8])\r\n end_5 = start_5 + len(cmds[9])\r\n start_6 = end_5 + len(cmds[10])\r\n end_6 = start_6 + len(cmds[11])\r\n all = ''\r\n #The for loop here combines all commands stored under cmds into one string all\r\n for i in cmds:\r\n all += i\r\n #replies to wavelet with all\r\n blip = wavelet.reply(all)\r\n #Annotates as bold the commands using len()s stored above\r\n blip.range(start_1, end_1).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_2, end_2).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_3, end_3).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_4, end_4).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_5, end_5).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_6, end_6).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(0, len(cmds[0]) - 1).annotate(\"style/fontWeight\", \"bold\")\r\n #Increases font size of cmds[0] \r\n blip.range(0, len(cmds[0]) - 1).annotate('style/fontSize', '1.75em')\r\n logging.info(\"displayCommands completed\")\r\n\r\ndef addWavewatchers(event, wavelet, addAll = True):\r\n \"\"\"addWavewatchers(event, wavelet)\"\"\"\r\n logging.info(\"addWavewatchers Called\")\r\n #Addall function\r\n logging.info(\"addWavewatchers called. Modified by: \" + event.modified_by) #Sends the name of the person calling the commands to the logs\r\n opQueue = wavelet.get_operation_queue() #Gets the operation queue (see ops module)\r\n wave_id = wavelet.wave_id #Gets the wave_id of the wave, for use by the ops module\r\n wavelet_id = wavelet.wavelet_id # Gets wavelet_id of wave, for use by ops module\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\") #Adds me as a participant on the wave (I want to be notified of everything now) :]\r\n if event.modified_by not in WAVEWATCHERS_ALL: #If the active user is not a wave-watcher/on the safe list...\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\") #Adds the group as a participant on the wave.\r\n wavelet.reply(\"Wavewatchers Team Notified\") #Tell them that the wave-watchers have been notified\r\n return True #End the addAll function, return to whatever called it\r\n allAdded = None #create variable 'alladded' to be used later.\r\n ownersAdded = None\r\n results = WAVEWATCHERS_OWNERS #Queries the datastore & returns wavewatchers that are owners.\r\n for participant in results:\r\n if participant not in wavelet.participants:\r\n opQueue.wavelet_add_participant(wave_id, wavelet_id, participant)\r\n ownersAdded = True\r\n if (not addAll) and (event.modified_by not in results): \r\n results = WAVEWATCHERS_ALL #Queries the datastore & returns wavewatchers that are members.\r\n for participant in results:\r\n if participant not in wavelet.participants:\r\n opQueue.wavelet_add_participant(wave_id, wavelet_id, participant)\r\n allAdded = True\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\") #Adds the group as a participant on the wave.\r\n elif addAll:\r\n results = WAVEWATCHERS_ALL\r\n for participant in results:\r\n if participant not in wavelet.participants:\r\n opQueue.wavelet_add_participant(wave_id, wavelet_id, participant)\r\n allAdded = True\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\") #Adds the group as a participant on the wave. \r\n if allAdded:\r\n wavelet.reply(\"WaveWatchers Team All Added\")\r\n elif ownersAdded:\r\n wavelet.reply(\"Owners Added\")\r\n else:\r\n wavelet.reply(\"WaveWatchers Team Already Partipants\")\r\n logging.info(\"addWavewatchers Completed\")\r\n return False #Placeholder in case I missed something below. Will be removed in future.\r\n \r\ndef tagWavelet(event, wavelet):\r\n \"\"\"Tags the wave tagWavelet(event, wavelet)\"\"\"\r\n logging.info(\"TagWavelet Called\")\r\n opQueue = wavelet.get_operation_queue() #gets operation queue\r\n wave_id = wavelet.wave_id\r\n wavelet_id = wavelet.wavelet_id\r\n current_tags = wavelet.tags\r\n if \"wavewatchers\" not in current_tags:\r\n opQueue.wavelet_modify_tag(wave_id, wavelet_id, \"wavewatchers\") #tags with first tag\r\n if \"wave-watchers\" not in current_tags:\r\n opQueue.wavelet_modify_tag(wave_id, wavelet_id, \"wave-watchers\") #tags with second tag\r\n logging.info(\"TagWavelet Completed\")\r\n \r\ndef checkBadParticipants(event, wavelet):\r\n \"\"\"checkBadParticipants(event, wavelet) checks for people in the black/grey list. Returns 2 args\"\"\"\r\n logging.info(\"checkBadParticipants Called\")\r\n blacklisted = []\r\n greylisted = []\r\n blacklist = BLACKLIST\r\n greylist = GREYLIST\r\n for participant in wavelet.participants:\r\n if participant in blacklist:\r\n blacklisted.append(participant)\r\n else:\r\n greylist.append(participant)\r\n intro_str = \"\"\r\n bad_p_str = \"\"\r\n intro_str2 = \"\"\r\n bad_p_str2 = \"\"\r\n if blacklisted:\r\n intro_str = \"\\nKnown BLACKLISTED users that are participants:\"\r\n for villain in blacklisted:\r\n bad_p_str += villain + ', '\r\n if greylisted:\r\n intro_str2 = \"\\nKnown GREYLISTED users that are participants:\"\r\n for villain in greylisted:\r\n bad_p_str2 += villain + ', '\r\n logging.info(\"checkBadParticipants Completed\")\r\n return intro_str, bad_p_str, intro_str2, bad_p_str2\r\n \r\ndef checkRobots(event, wavelet):\r\n \"\"\"checkRobots(event, wavelet) checks for robots. Returns 2 args\"\"\"\r\n robotParticipants = []\r\n for participant in wavelet.participants:\r\n if participant.split(\"@\")[1] == \"appspot.com\":\r\n robotParticipants.append(participant)\r\n intro_str = \"\\nRobots that are participants: \"\r\n robots_str = ''\r\n for participant in robotParticipants:\r\n if len(robotParticipants) <= 1:\r\n robots_str += participant\r\n else:\r\n robots_str += participant + \", \"\r\n return intro_str, robots_str\r\n \r\ndef updateIndex(event, wavelet, state = False):\r\n global myRobot\r\n content = [\"\\n\\n\\n\\nTitle: \",]\r\n if wavelet.title:\r\n content.append(wavelet.title)\r\n else:\r\n content.append(\"(untitled wave)\")\r\n content += [\"\\nIndexed By: \", event.modified_by, \"\\nWave ID: \", wavelet.wave_id, \"\\nWave was created by: \", wavelet.creator, None, None, None, None, None, None]\r\n content[8], content[9], content[10], content[11] = checkBadParticipants(event, wavelet)\r\n content[12], content[13] = checkRobots(event, wavelet)\r\n lastline = 0\r\n if \"[email protected]\" in wavelet.participants:\r\n content.append(\"\\nThe wave is public.\")\r\n lastline = 1\r\n elif \"[email protected]\" in wavelet.participants:\r\n content.append(\"\\nThe wave is not public, but viewable by a wave-watcher.\")\r\n lastline = 2\r\n else:\r\n content.append(\"\\nThe wave is not public, but viewable by a wave-watcher.\")\r\n lastline = 2\r\n\r\n titleLength = len(wavelet.title)\r\n start_1 = 0\r\n #Oooh. Pretty alignment...\r\n end_1 = start_1 + len(content[0])\r\n start_2 = end_1 + len(content[1])\r\n end_2 = start_2 + len(content[2])\r\n start_3 = end_2 + len(content[3])\r\n end_3 = start_3 + len(content[4])\r\n start_4 = end_3 + len(content[5])\r\n end_4 = start_4 + len(content[6])\r\n start_5 = end_4 + len(content[7])\r\n end_5 = start_5 + len(content[8])\r\n start_6 = end_5 + len(content[9])\r\n end_6 = start_6 + len(content[10])\r\n start_7 = end_6 + len(content[11])\r\n end_7 = start_7 + len(content[12])\r\n start_8 = end_7 + len(content[13])\r\n end_8 = start_8 + len(content[14])\r\n text = ''\r\n for i in content:\r\n text += i\r\n if event.modified_by in WAVEWATCHERS_ALL:\r\n indexWave = myRobot.fetch_wavelet(INDEX_WAVE_ID, WAVELET_ID)\r\n blip = indexWave.reply(text)\r\n blip.range(start_1, end_1).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_2, end_2).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_3, end_3).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_4, end_4).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_5, end_5).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_6, end_6).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_7, end_7).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_8, end_8).annotate(\"style/fontStyle\", \"italic\")\r\n if lastline == 1:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(229, 51, 51)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(255, 255, 255)')\r\n elif lastline == 2:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(96, 217, 120)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(0, 0, 0)')\r\n elif lastline == 3:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(255, 229, 0)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(0, 0, 0)')\r\n blip.range(end_1, start_2).annotate(\"link/wave\", wavelet.wave_id)\r\n blip.range(end_1, start_2).annotate(\"wave-watchers/id\", wavelet.wave_id)\r\n blip.range(end_3, start_4).annotate(\"style/fontFamily\", 'monospace')\r\n myRobot.submit(indexWave)\r\n shortIndexWave = myRobot.fetch_wavelet(SHORT_PRIMARY_INDEX_ID, WAVELET_ID)\r\n lenTitle = len(wavelet.title)\r\n rootBlip = shortIndexWave.root_blip\r\n line1 = element.Line(line_type='li')\r\n rootBlip.append(line1)\r\n lenBeforeEdit = len(shortIndexWave.root_blip.text) - 1\r\n firstAppend = wavelet.title + \" \"\r\n rootBlip.append(firstAppend)\r\n rootBlip.range(lenBeforeEdit, lenBeforeEdit + len(firstAppend)).annotate(\"link/wave\", wavelet.wave_id)\r\n line2 = element.Line(line_type='li', indent = 1)\r\n myRobot.submit(shortIndexWave) \r\n else:\r\n secondaryIndexWave = myRobot.fetch_wavelet(SECONDARY_INDEX_ID, WAVELET_ID)\r\n blip = secondaryIndexWave.reply(text)\r\n blip.range(start_1, end_1).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_2, end_2).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_3, end_3).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_4, end_4).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_5, end_5).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_6, end_6).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_7, end_7).annotate(\"style/fontWeight\", \"bold\")\r\n blip.range(start_8, end_8).annotate(\"style/fontStyle\", \"italic\")\r\n if lastline == 1:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(229, 51, 51)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(255, 255, 255)')\r\n elif lastline == 2:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(96, 217, 120)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(0, 0, 0)')\r\n elif lastline == 3:\r\n blip.range(start_8, end_8).annotate(\"style/backgroundColor\", 'rgb(255, 229, 0)')\r\n blip.range(start_8, end_8).annotate(\"style/color\", 'rgb(0, 0, 0)')\r\n blip.range(end_1, start_2).annotate(\"link/wave\", wavelet.wave_id)\r\n blip.range(end_3, start_4).annotate(\"wave-watchers/id\", wavelet.wave_id)\r\n blip.range(end_3, start_4).annotate(\"style/fontFamily\", 'monospace')\r\n myRobot.submit(secondaryIndexWave)\r\n shortIndexWave = myRobot.fetch_wavelet(SHORT_SECONDARY_INDEX_ID, WAVELET_ID)\r\n lenTitle = len(wavelet.title)\r\n rootBlip = shortIndexWave.root_blip\r\n line1 = element.Line(line_type='li')\r\n rootBlip.append(line1)\r\n lenBeforeEdit = len(shortIndexWave.root_blip.text) - 1\r\n firstAppend = wavelet.title + \" \"\r\n rootBlip.append(firstAppend)\r\n rootBlip.range(lenBeforeEdit, lenBeforeEdit + len(firstAppend)).annotate(\"link/wave\", wavelet.wave_id)\r\n myRobot.submit(shortIndexWave)\r\n logging.debug(text)\r\n logging.info(\"updateIndex func Completed\")\r\n \r\ndef BlockTroll(event, wavelet):\r\n logging.debug(\"BlockTroll Called\")\r\n \r\ndef OnWaveletSelfAdded(event, wavelet):\r\n logging.info(\"OnWaveletSelfAdded called\")\r\n tagWavelet(event, wavelet)\r\n results = WAVEWATCHERS_ALL\r\n if event.modified_by not in results:\r\n addWavewatchers(event, wavelet)\r\n logging.info(\"Program continues.\")\r\n updateIndex(event, wavelet)\r\n else:\r\n wavelet.reply(\"\\nType 'publishWave' in a reply to add the wave-watchers individually & submit this wave to the index.\")\r\n displayCommands(wavelet)\r\n \r\ndef OnBlipSubmitted(event, wavelet):\r\n logging.info(\"OnBlipSubmitted Called\")\r\n if event.blip.text:\r\n logging.info(\"Blip text = \" + event.blip.text)\r\n else:\r\n return\r\n if len(event.blip.text) <= 4:\r\n return\r\n test = False\r\n commands = [\"makePublic\", \"addAll\", \"addOwners\", \"isSafe\", \"updateIndex\", \"displayCommands\", \"publishWave\", \"chuckNorris\", \"addMember\", \"addOwner\", \"addRobot\", \"addTrusted\", \"greylist(\", \"blacklist(\"]\r\n for command in commands:\r\n if command in event.blip.text:\r\n test = True\r\n if not test:\r\n return\r\n if \"#!NO\" in event.blip.text[:6]:\r\n return\r\n if \"#!NO\" in event.blip.text[-6]:\r\n return\r\n if (event.modified_by in VILLAINS):\r\n blip = wavelet.reply(\"Unfortunately, you are not authorised to use the wavewatchers robot, as you have been blacklisted or greylisted.\")\r\n blip.append(\" See this wave for information on how to get yourself off the blacklist.\", bundled_annotations=[(\"link/wave\", 'googlewave.com!w+aSSVnLMbA')])\r\n return\r\n logging.info(\"WaveID = \" + wavelet.wave_id)\r\n logging.info(\"WaveletID = \" + wavelet.wavelet_id)\r\n logging.info(\"Blip ID = \" + event.blip_id)\r\n logging.info(event.blip.annotations.serialize())\r\n opQueue = wavelet.get_operation_queue()\r\n if event.blip.text:\r\n if \"makePublic\" in event.blip.text:\r\n logging.info(\"makePublic Found\")\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\")\r\n opQueue.wavelet_add_participant(wavelet.wave_id, wavelet.wavelet_id, \"[email protected]\")\r\n wavelet.reply(\"\\nMake sure that public is set as read-only!\")\r\n logging.info(\"makePublic Completed\")\r\n if \"addAll\" in event.blip.text:\r\n logging.info(\"addAll Found\")\r\n addWavewatchers(event, wavelet, addAll = True)\r\n logging.info(\"addAll Completed\")\r\n if \"addOwners\" in event.blip.text:\r\n addWavewatchers(event, wavelet, addAll = False)\r\n if \"isSafe\" in event.blip.text:\r\n logging.info(\"isSafe Found\")\r\n isUnsafe = None\r\n nonWW = []\r\n results = WAVEWATCHERS_ALL\r\n for participant in wavelet.participants:\r\n if participant not in results:\r\n isUnsafe = True\r\n nonWW.append(participant)\r\n if not isUnsafe:\r\n wavelet.reply(\"\\nOnly Wave Watchers can view this wave.\")\r\n elif \"[email protected]\" in wavelet.participants:\r\n wavelet.reply(\"\\nAll Wave users can view this wave.\")\r\n else:\r\n content = \"\\nSome Participants are not wave-watchers. Those are:\\n\"\r\n for participant in nonWW:\r\n content += participant + \" ,\\n\"\r\n wavelet.reply(content)\r\n logging.info(\"isSafe Completed\")\r\n if \"updateIndex\" in event.blip.text:\r\n logging.info(\"updateIndex Found\")\r\n updateIndex(event, wavelet)\r\n logging.info(\"updateIndex Completed\")\r\n if \"displayCommands\" in event.blip.text:\r\n logging.info(\"displayCommands Found\")\r\n displayCommands(wavelet)\r\n logging.info(\"displayCommands Completed\")\r\n if \"publishWave\" in event.blip.text:\r\n logging.info(\"publishWave Found\")\r\n state = addWavewatchers(event, wavelet)\r\n updateIndex(event, wavelet, state)\r\n logging.info(\"publishWave Completed\")\r\n if \"chuckNorris\" in event.blip.text:\r\n if event.modified_by not in WAVEWATCHERS_ALL:\r\n logging.info(\"OnBlipSubmitted Completed\")\r\n return\r\n global myRobot\r\n if \"chuckNorris(\" in event.blip.text:\r\n text = event.blip.text.split(\"chuckNorris(\")[1]\r\n text = text.split(\")\")\r\n chuckNorris = myRobot.new_wave(wavelet.domain, participants = [\"[email protected]\", event.modified_by, text[0]], submit = True)\r\n chuckNorrisIndex = myRobot.fetch_wavelet('googlewave.com!w+mTNnWQtAx', WAVELET_ID)\r\n blip = chuckNorrisIndex.reply(\"\\n\" + text[0] + \" \")\r\n blip.range(0, len(\"\\n\" + text[0])).annotate(\"link/wave\", chuckNorris.wave_id)\r\n chuckNorrisOpQ = chuckNorris.get_operation_queue()\r\n chuckNorrisOpQ.wavelet_set_title(chuckNorris.wave_id, chuckNorris.wavelet_id, \"Chuck Norris just kicked \" + text[0] + \" troll ASS!\")\r\n reply = wavelet.reply(\"\\nOoooh! \" + text[0] + \" just got Chuck Norris'ed!\") \r\n else:\r\n chuckNorris = myRobot.new_wave(wavelet.domain, participants = [\"[email protected]\", event.modified_by], message = '', submit = True)\r\n chuckNorrisIndex = myRobot.fetch_wavelet('googlewave.com!w+mTNnWQtAx', WAVELET_ID)\r\n blip = chuckNorrisIndex.reply(\"\\nA Troll Got Chuck Norris'ed \" )\r\n blip.range(0, len(\"\\nA Troll Got Chuck Norris'ed\")).annotate(\"link/wave\", chuckNorris.wave_id)\r\n chuckNorrisOpQ = chuckNorris.get_operation_queue()\r\n chuckNorrisOpQ.wavelet_set_title(chuckNorris.wave_id, chuckNorris.wavelet_id, \"Chuck Norris just kicked a troll's ASS!\")\r\n reply = wavelet.reply(\"\\nOoooh! A troll just got Chuck Norris'ed!\")\r\n reply.range(0, 7).annotate(\"link/wave\", chuckNorris.wave_id)\r\n #wavelet.root_blip.append(element.Image(url = 'http://lh4.ggpht.com/_21nXtfYRLLQ/S8TWNljJD3I/AAAAAAAABqk/KbMTcXE27GA/chuckwave.png',caption = 'Your conquerer'))\r\n chuckNorris.root_blip.append(element.Image(url = 'http://lh4.ggpht.com/_21nXtfYRLLQ/S8TWNljJD3I/AAAAAAAABqk/KbMTcXE27GA/chuckwave.png',caption = 'Your conquerer'))\r\n myRobot.submit(chuckNorrisIndex)\r\n myRobot.submit(chuckNorris)\r\n if \"addMember(\" in event.blip.text:\r\n addWaveWatcher(event, wavelet, event.blip.text.split(\"addMember(\")[1].split(\")\")[0], 2)\r\n if \"addOwner(\" in event.blip.text:\r\n addWaveWatcher(event, wavelet, event.blip.text.split(\"addOwner(\")[1].split(\")\")[0], 1)\r\n if \"addTrusted(\" in event.blip.text:\r\n addWaveWatcher(event, wavelet, event.blip.text.split(\"addTrusted(\")[1].split(\")\")[0], 3)\r\n if \"addRobot(\" in event.blip.text:\r\n addWaveWatcher(event, wavelet, event.blip.text.split(\"addRobot(\")[1].split(\")\")[0], 4)\r\n if \"blacklist(\" in event.blip.text:\r\n addBadUser(event, wavelet, event.blip.text.split(\"blacklist(\")[1].split(\")\")[0], 1)\r\n if \"greylist(\" in event.blip.text:\r\n addBadUser(event, wavelet, event.blip.text.split(\"greylist(\")[1].split(\")\")[0], 2)\r\n logging.info(\"OnBlipSubmitted Completed\")\r\n \r\ndef OnWaveletCreated(event, wavelet):\r\n logging.critical(\"OnWaveletCreated Called\") #Even after a hard-reset, I still don't get this called. Level set as CRITICAL for easy spotting.\r\n\r\ndef OnWaveletTitleChanged(event, wavelet):\r\n logging.critical(\"OnWaveletTitleChanged Called\") #Even after a hard-reset, I still don't get this called. Level set as CRITICAL for easy spotting.\r\n updateIndex(event, wavelet)\r\n \r\ndef OnGadgetStateChanged(event, wavelet):\r\n logging.debug(\"OnGadgetStateChanged Called\")\r\n if wavelet.wave_id != 'googlewave.com!w+Gyh_bn35B':\r\n return\r\n alerts = ['GREEN','YELLOW','RED']\r\n logging.debug(event.index)\r\n logging.debug(event.blip.elements)\r\n for pos in range(len(event.blip.elements)):\r\n logging.info(event.blip.elements[pos])\r\n if \"waveapi.element.Gadget\" in str(event.blip.elements[pos]):\r\n pos2 = pos\r\n break\r\n gad = event.blip.elements[pos]\r\n if gad is not None and gad.get('url')=='http://everybodywave.appspot.com/gadget/alerter/gad.xml': \r\n alert_text = alerts[int(gad.get('level'))]\r\n new_title = \"[%s] Public Waves Abuse Alert Level\" % alert_text\r\n opQ = wavelet.get_operation_queue()\r\n opQ.wavelet_set_title(wavelet.wave_id, wavelet.wavelet_id, new_title)\r\n\r\ndef OnWaveletParticipantsChanged(event, wavelet):\r\n logging.debug(\"OnWaveletParticipantsChanged Called\")\r\n opQ = wavelet.get_operation_queue() #Gets operation queue for tagging the wavelet.\r\n unsafe = False #Creates a variable for determining if the wave is safe or not.\r\n results = WAVEWATCHERS_ALL\r\n for participant in wavelet.participants:\r\n if participant not in results:\r\n unsafe = True \r\n if unsafe:\r\n if \"not-safe\" not in wavelet.tags:\r\n opQ.wavelet_modify_tag(wavelet.wave_id, wavelet.wavelet_id, \"not-safe\") #Tags the wavelet\r\n if \"is-safe\" in wavelet.tags:\r\n opQ.wavelet_modify_tag(wavelet.wave_id, wavelet.wavelet_id, \"is-safe\", modify_how = \"remove\") #Removes a tag.\r\n else:\r\n if \"is-safe\" not in wavelet.tags:\r\n opQ.wavelet_modify_tag(wavelet.wave_id, wavelet.wavelet_id, \"is-safe\")\r\n if \"not-safe\" in wavelet.tags:\r\n opQ.wavelet_modify_tag(wavelet.wave_id, wavelet.wavelet_id, \"not-safe\", modify_how = \"remove\")\r\n logging.info(\"OnWaveletParticipantsChanged Completed\")\r\n\r\nif __name__ == '__main__':\r\n myRobot = robot.Robot(\"WaveWatcherBot\", \r\n image_url='http://wave-watchers.appspot.com/Wave-Watchers.png',\r\n profile_url='http://groups.google.com/group/wave-watchers')\r\n import verify\r\n myRobot.register_handler(events.WaveletSelfAdded, OnWaveletSelfAdded)\r\n myRobot.register_handler(events.WaveletTitleChanged, OnWaveletTitleChanged)\r\n myRobot.register_handler(events.GadgetStateChanged, OnGadgetStateChanged)\r\n myRobot.register_handler(events.WaveletCreated, OnWaveletCreated)\r\n myRobot.register_handler(events.BlipSubmitted, OnBlipSubmitted)\r\n myRobot.register_handler(events.WaveletParticipantsChanged, OnWaveletParticipantsChanged)\r\n myRobot.setup_oauth(verify.consumerKey, verify.consumerSecret, server_rpc_base='http://gmodules.com/api/rpc')\r\n appengine_robot_runner.run(myRobot)\r\n\r\n" }, { "alpha_fraction": 0.6633166074752808, "alphanum_fraction": 0.6683416962623596, "avg_line_length": 97.75, "blob_id": "ed8ddb603315b7d41c8071e1bd19cc52c5bb919f", "content_id": "fa3bc7f5c253f5c380b94e197f12c8b46df3f142", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "no_license", "max_line_length": 313, "num_lines": 4, "path": "/register.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "import logging\r\nif __name__ == '__main__':\r\n print('Content-type: text/html\\n\\n')\r\n print \"<h4>Enter your Wave username and choose a <u>new</u> password for wave watchers:</h4><form action='https://wave-watchers.appspot.com/registerTrans' method='POST'>Username: <input type='text' name='un' /><br />Password: <input type='password' name='pwd' /><br/><input type='submit' value='Go' /></form>\" " }, { "alpha_fraction": 0.7978436946868896, "alphanum_fraction": 0.7978436946868896, "avg_line_length": 52, "blob_id": "cf346fc6097b90caf17d3edc502acf716e2d8865", "content_id": "9291739add7a6acf14c21428e978959373bcd2de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 371, "license_type": "no_license", "max_line_length": 263, "num_lines": 7, "path": "/README.md", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "# wave-watchers-robot\n\nThe wave-watchers robot, for use with google wave, allows anyone to contact the wave-watchers group in a secure and easy way. The robot also has the functionality to enable quick and easy communication and collaboration between members of the wave-watchers group.\n\nMore info: http://groups.google.com/group/wave-watchers/\n\nhttp://wavewatchers.org/\n" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 30, "blob_id": "3bf94f80bfd99c31999eaac4265cbefbc129ee7e", "content_id": "40fa450ee33f456c74ddaee08cd86b71c4193f00", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 30, "license_type": "no_license", "max_line_length": 30, "num_lines": 1, "path": "/do_nothing.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "print \"Homepage not yet setup\"" }, { "alpha_fraction": 0.6810551285743713, "alphanum_fraction": 0.6870503425598145, "avg_line_length": 41.94736862182617, "blob_id": "7f1215fb8e3a532e213c658753a9001ca69940bf", "content_id": "ded2c8f2f1e6ce9ddf1bc30a77e298ccdee5fabd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 834, "license_type": "no_license", "max_line_length": 107, "num_lines": 19, "path": "/register_trans.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "import logging\r\nimport cgi\r\nimport hashlib\r\nfrom google.appengine.api import mail\r\n\r\nMAIL_TO = \"[email protected]\"\r\n\r\nif __name__ == '__main__':\r\n query= cgi.FieldStorage()\r\n us= query.getvalue('un')\r\n pw= query.getvalue('pwd')\r\n logging.info(\"user name is %s pwd is %s\" % (us,pw))\r\n print('Content-type: text/html\\n\\n')\r\n print('Username and Password submitted. You may need to wait 1 or 2 days for the service to be updated.')\r\n #print('Your hashed password is %s' % hashlib.sha1(pw).hexdigest())\r\n # TODO - Save the username and hashed pwd in the datastore instead\r\n # Option 2: mail the username and password to the moderator (python email module)\r\n mail_body = \"The username %s has chosen the password %s\" % (us,hashlib.sha1(pw).hexdigest())\r\n mail.send_mail(sender=\"[email protected]\",to=MAIL_TO,subject=\"Addition to WW\",body=mail_body) " }, { "alpha_fraction": 0.6209964156150818, "alphanum_fraction": 0.6245551705360413, "avg_line_length": 23.590909957885742, "blob_id": "10302d069992116277a730a080ae275d336b2199", "content_id": "98f1b2a444789e9b3002e73b47fd3399eb11a94a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 562, "license_type": "no_license", "max_line_length": 44, "num_lines": 22, "path": "/auth_setup.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "#Setup code for wave-watchers breakwatery\r\nimport hashlib\r\nimport cgi\r\nimport simplejson\r\nauthDict = {\"nat.abbotts\":'5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8',\r\n\"Albonobo\":'5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'}\r\n\r\nif __name__ == '__main__':\r\n query= cgi.FieldStorage()\r\n us= query.getvalue('username')\r\n pw= query.getvalue('password')\r\n #us_accept = 'guest'\r\n auth = \"0\"\r\n if us in authDict:\r\n us_accept = us\r\n pwd_accept = authDict[us]\r\n if pw = pwd_accept:\r\n auth = '1'\r\n json_body = {'auth':auth}\r\n json_str = simplejson.dumps(json_body)\r\n print \"Content-type: application/json\\n\\n\"\r\n print json_str " }, { "alpha_fraction": 0.7155025601387024, "alphanum_fraction": 0.7257240414619446, "avg_line_length": 34.6875, "blob_id": "a93d97b0287f101f0accc750886a27db48382238", "content_id": "bbf055f5604d7ee510ee23f2a8b3c6e23afbf52f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 587, "license_type": "no_license", "max_line_length": 63, "num_lines": 16, "path": "/wavewatcher_class.py", "repo_name": "Supercopter/wave-watchers-robot", "src_encoding": "UTF-8", "text": "from google.appengine.ext import db\r\n\r\nACCESS_LEVELS = {'OWNER':1, 'MEMBER':2, 'TRUSTED':3, 'ROBOT':4}\r\nVILLAIN_LEVELS = {'BLACKLIST':1, 'GREYLIST':2}\r\n\r\nclass WaveWatchers(db.Model):\r\n userid = db.StringProperty(required=True)\r\n fullname = db.StringProperty(required=False)\r\n level = db.IntegerProperty(required=True)\r\n when = db.DateTimeProperty(auto_now_add=True)\r\n\r\nclass Villain(db.Model):\r\n userid = db.StringProperty(required=True)\r\n fullname = db.StringProperty(required=False)\r\n level = db.IntegerProperty(required=True)\r\n when = db.DateTimeProperty(auto_now_add=True)\r\n" } ]
8
taohu-cn/xmonitor
https://github.com/taohu-cn/xmonitor
7cf6f2fe4bf9946942149e82a1c5655a118e2e44
ae9b0ba713537b34eddf4a67d9d42ed7f6927b34
c90f3ee949f3728948436eaa2a9a0e5a18261943
refs/heads/master
2020-06-12T07:00:32.318361
2016-12-05T07:48:23
2016-12-05T07:48:23
75,598,805
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6688596606254578, "alphanum_fraction": 0.6732456088066101, "avg_line_length": 15.285714149475098, "blob_id": "e151a3ff593de9e54c54e96d40775f2a3b95b022", "content_id": "169d3ef01e11f0fe2ccc52267766378e00252322", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 456, "license_type": "no_license", "max_line_length": 79, "num_lines": 28, "path": "/xclient/plugins/plugin_api.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nfrom xclient.plugins.linux import life_status, cpu, memory, network, host_alive\n\n\ndef sign_of_life():\n return life_status.monitor()\n\n\ndef get_linux_cpu():\n return cpu.monitor()\n\n\ndef host_alive_check():\n return host_alive.monitor()\n\n\ndef GetNetworkStatus():\n return network.monitor()\n\n\ndef get_memory_info():\n return memory.monitor()\n" }, { "alpha_fraction": 0.5024541616439819, "alphanum_fraction": 0.5243782997131348, "avg_line_length": 38.17948532104492, "blob_id": "e9793568648c22b110b7c4177487810334ba765f", "content_id": "6ba1133262873a679014f9baf0f85dd58d24dae6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6154, "license_type": "no_license", "max_line_length": 119, "num_lines": 156, "path": "/xclient/core/client.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\"\n\nimport time\nimport sys\nfrom xclient.conf import settings\nimport json\nimport threading\nfrom xclient.plugins import plugin_api\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\nclass ClientHandle(object):\n def __init__(self):\n self.monitored_services = {}\n self.get = \"http://%s:%s/%s/%s\" % (\n settings.configs['Server'],\n settings.configs[\"ServerPort\"],\n settings.configs['get'],\n settings.configs['HostID'])\n\n self.post = \"http://%s:%s/%s/\" % (\n settings.configs['Server'],\n settings.configs[\"ServerPort\"],\n settings.configs['post'])\n\n # load the latest monitor configs from monitor server\n def load_latest_configs(self):\n latest_configs = self.httpget()\n latest_configs = json.loads(latest_configs)\n self.monitored_services.update(latest_configs)\n\n def forever_run(self):\n \"\"\"\n :func: start the client program forever\n :return:\n \"\"\"\n exit_flag = False\n config_last_update_time = 0\n while not exit_flag:\n # 是否重载配置\n if time.time() - config_last_update_time > settings.configs['ConfigUpdateInterval']:\n self.load_latest_configs()\n config_last_update_time = time.time()\n\n # self.monitored_services['services'] = {'services': {'Mem': ['n/a', 60], 'CPU': ['n/a', 60]}}\n for service_name, val in self.monitored_services['services'].items():\n # no timestamp in val, means it's the first time to monitor, add 0 as first timestamp\n if len(val) == 2:\n # self.monitored_services['services'][service_name].append(0)\n val.append(0)\n\n monitor_interval = val[1] # u'监控间隔'\n last_invoke_time = val[2] # u'上次监控时间'\n\n if time.time() - last_invoke_time > monitor_interval: # needs to run the plugin\n # u'更新时间戳'\n self.monitored_services['services'][service_name][2] = time.time()\n\n # start a new thread to call each monitor plugin\n t = threading.Thread(target=self.invoke_plugin, args=(service_name, val))\n t.start()\n print(\"Going to monitor [%s]\" % service_name)\n\n else:\n print(\"Going to monitor [%s] in [%s] secs\" % (service_name,\n monitor_interval - (time.time() - last_invoke_time)))\n\n time.sleep(1)\n\n def invoke_plugin(self, service_name, val):\n \"\"\"\n invoke the monitor plugin here, and send the data to monitor server after plugin returned data each time\n :param service_name:\n :param val: [pulgin_name,monitor_interval,last_run_time]\n :return:\n \"\"\"\n report_data = {\n 'client_id': 1,\n 'service_name': 'Net',\n 'data': json.dumps(\n # {'status': 0, 'iowait': '0.00', 'system': '4.55', 'idle': '95.04', 'user': '0.41', 'steal': '0.00',\n # 'nice': '0.00'}\n {'status': 0,\n 'data': {'lo': {'t_in': '79.56', 't_out': '79.56'}, 'em1': {'t_in': '0.00', 't_out': '0.00'},\n 'em4': {'t_in': '0.00', 't_out': '0.00'}, 'em3': {'t_in': '102.65', 't_out': '15.47'},\n 'em2': {'t_in': '0.00', 't_out': '0.00'}}}\n )\n }\n self.httppost(params=report_data)\n # plugin_name = val[0]\n # if hasattr(plugin_api, plugin_name.lower()):\n # func = getattr(plugin_api, plugin_name)\n # plugin_callback = func()\n #\n # report_data = {\n # 'client_id': settings.configs['HostID'],\n # 'service_name': service_name,\n # 'data': json.dumps(plugin_callback)\n # }\n #\n # print('---report data:', report_data)\n # self.httppost(params=report_data)\n # else:\n # print(\"\\033[31;1mCannot find plugin names [%s] in plugin_api\\033[0m\" % plugin_name)\n # print('--plugin:', val)\n\n def httpget(self):\n print('\\033[1m33m%s\\033[0m' % __file__, self.get)\n if sys.version.split('.')[0] == '3':\n import urllib.request\n\n req = urllib.request.urlopen(self.get, data=None, timeout=settings.configs['RequestTimeout'])\n callback = data_format_convert(req.read())\n print('\\033[1m33m%s\\033[0m' % __file__, callback)\n return callback\n\n elif sys.version.split('.')[0] == '2':\n import urllib\n import urllib2\n\n req = urllib2.Request(self.get)\n req_data = urllib2.urlopen(req, timeout=settings.configs['RequestTimeout'])\n callback = req_data.read()\n return callback\n\n def httppost(self, **extra_data):\n print('\\033[1m33m%s\\033[0m' % __file__, self.post)\n if sys.version.split('.')[0] == '3':\n import urllib.request\n import urllib.parse\n\n data = urllib.parse.urlencode(extra_data['params'])\n data = data.encode('utf-8')\n request = urllib.request.Request(url=self.post)\n request.add_header(\"Content-Type\", \"application/x-www-form-urlencoded;charset=utf-8\")\n f = urllib.request.urlopen(request, data)\n callback_msg = f.read().decode('utf-8')\n # print(callback_msg)\n\n return callback_msg\n\n elif sys.version.split('.')[0] == '2':\n import urllib\n import urllib2\n\n data_encode = urllib.urlencode(extra_data['params'])\n req = urllib2.Request(url=self.post, data=data_encode)\n res_data = urllib2.urlopen(req, timeout=settings.configs['RequestTimeout'])\n callback = res_data.read()\n callback = json.loads(callback)\n return callback\n" }, { "alpha_fraction": 0.5814889073371887, "alphanum_fraction": 0.5855130553245544, "avg_line_length": 15.566666603088379, "blob_id": "d21cfc8e62ad938cf8a77570009cf4ba9753ea86", "content_id": "10b303f3ab6c12ec64508200619dca6c34941fe1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 505, "license_type": "no_license", "max_line_length": 39, "num_lines": 30, "path": "/xclient/bin/xClient.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nimport sys\nimport os\n\n# xclient 所在目录\nBASE_DIR = os.path.dirname(\n os.path.dirname(\n os.path.dirname(\n os.path.abspath(__file__)\n )\n )\n)\nsys.path.append(BASE_DIR)\n\nif True:\n from xclient.core import main\n\nif __name__ == \"__main__\":\n import logging\n\n try:\n client = main.Handler(sys.argv)\n except Exception as e:\n logging.exception(e)\n" }, { "alpha_fraction": 0.5500575304031372, "alphanum_fraction": 0.5592635273933411, "avg_line_length": 24.558822631835938, "blob_id": "ad6a79c23f7096e1dfbd37981c298146af4291ef", "content_id": "bb43f2e42f0b253435171eb1d8e3a6a48f713b51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 869, "license_type": "no_license", "max_line_length": 91, "num_lines": 34, "path": "/xclient/plugins/linux/cpu.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport subprocess\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\ndef monitor(frist_invoke=1):\n cmd = 'sar 1 3 | tail -n 1'\n res = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n res_err = res.stderr.read()\n res_msg = data_format_convert(res.stdout.read())\n\n if res_err:\n value_dic = {'status': res_err}\n else:\n user, nice, system, iowait, steal, idle = res_msg.split()[2:]\n value_dic = {\n 'user': user,\n 'nice': nice,\n 'system': system,\n 'iowait': iowait,\n 'steal': steal,\n 'idle': idle,\n 'status': 0,\n }\n return value_dic\n\n\nif __name__ == '__main__':\n print(monitor())\n" }, { "alpha_fraction": 0.5275895595550537, "alphanum_fraction": 0.5498548150062561, "avg_line_length": 27.69444465637207, "blob_id": "756583f46d0e50d699ef4394ac4d80554c929d42", "content_id": "89c10b1bb56a0312b58b38fa7280bda73d77ac58", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1033, "license_type": "no_license", "max_line_length": 91, "num_lines": 36, "path": "/xclient/plugins/linux/life_status.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport subprocess\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\ndef monitor():\n cmd = 'uptime'\n res = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n res_err = res.stderr.read()\n res_msg = data_format_convert(res.stdout.read())\n\n if res_err:\n value_dic = {'status': res_err}\n else:\n days = res_msg.split(',')[0].split('up')[1].split('days')[0].strip()\n hours = res_msg.split(',')[1].split(':')[0].strip()\n minutes = res_msg.split(',')[1].split(':')[1].strip()\n load1, load5, load15 = res_msg.split('load average:')[1].split(',')\n\n value_dic = {\n 'uptime': days + 'd ' + hours + 'h ' + minutes + 'm',\n 'load1': load1,\n 'load5': load5,\n 'load15': load15,\n 'status': 0\n }\n return value_dic\n\n\nif __name__ == '__main__':\n print(monitor())\n" }, { "alpha_fraction": 0.623046875, "alphanum_fraction": 0.630859375, "avg_line_length": 20.33333396911621, "blob_id": "9b7da6ffb9ef833213f1a522e55bbf174c976523", "content_id": "b7696c73f9cababf46de60575993e0ff8bf965a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 512, "license_type": "no_license", "max_line_length": 73, "num_lines": 24, "path": "/xclient/plugins/linux/host_alive.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport subprocess\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\ndef monitor(frist_invoke=1):\n cmd = 'uptime'\n res = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n res_msg = data_format_convert(res.stdout.read())\n\n value_dic = {\n 'uptime': res_msg,\n 'status': 0\n }\n return value_dic\n\n\nif __name__ == '__main__':\n print(monitor())\n" }, { "alpha_fraction": 0.5575447678565979, "alphanum_fraction": 0.6061381101608276, "avg_line_length": 19.578947067260742, "blob_id": "49d31a4f6a07047c8beaf0db086add3cef0b4a21", "content_id": "7a36cd806ebb6e33cef9d8cb4b522acab6b319d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 391, "license_type": "no_license", "max_line_length": 77, "num_lines": 19, "path": "/xclient/conf/settings.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nconfigs = {\n 'HostID': 1,\n \"Server\": \"127.0.0.1\",\n \"ServerPort\": 9001,\n\n 'get': 'api/client/config', # acquire all the services will be monitored\n 'post': 'api/client/service/report',\n\n 'RequestTimeout': 30,\n 'ConfigUpdateInterval': 300, # 5 mins as default\n\n}\n" }, { "alpha_fraction": 0.5727272629737854, "alphanum_fraction": 0.6636363863945007, "avg_line_length": 26.5, "blob_id": "db165ed3fafe7b0c2f27619a49bdbbb21465f03a", "content_id": "3e37ee345fed8fc05fc1f3e404a7c68b6ffe0617", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 110, "license_type": "no_license", "max_line_length": 49, "num_lines": 4, "path": "/api/tests.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "from django.test import TestCase\n\n# Create your tests here.\nprint('\\033[1;33m %s \\033[0m' % __file__, 'test')\n" }, { "alpha_fraction": 0.717587947845459, "alphanum_fraction": 0.7185929417610168, "avg_line_length": 27.428571701049805, "blob_id": "41b4d8ff99710ff4a819efdcd939e97dacc3847d", "content_id": "95718312f50a9a12c939b68a404183a0eae191ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1990, "license_type": "no_license", "max_line_length": 117, "num_lines": 70, "path": "/api/admin.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nfrom django.contrib import admin\n\nfrom api import models\n\n\n# Register your models here.\n\nclass HostAdmin(admin.ModelAdmin):\n list_display = ('id', 'ip', 'status')\n filter_horizontal = ('groups', 'templates')\n\n\nclass HostGroupAdmin(admin.ModelAdmin):\n list_display = ('name', 'memo')\n\n\nclass TemplateAdmin(admin.ModelAdmin):\n list_display = ('name',)\n filter_horizontal = ('services', 'triggers')\n\n\nclass ServiceAdmin(admin.ModelAdmin):\n filter_horizontal = ('items',)\n list_display = ('name', 'interval', 'plugin_name')\n # list_select_related = ('items',)\n\n\nclass ServiceIndexAdmin(admin.ModelAdmin):\n list_display = ('name', 'key', 'data_type')\n\n\nclass TriggerExpressionInline(admin.TabularInline):\n model = models.TriggerExpression\n # exclude = ('memo',)\n # readonly_fields = ['create_date']\n\n\nclass TriggerAdmin(admin.ModelAdmin):\n list_display = ('name', 'severity', 'enabled')\n inlines = [TriggerExpressionInline, ]\n # filter_horizontal = ('expressions',)\n\n\nclass TriggerExpressionAdmin(admin.ModelAdmin):\n list_display = (\n 'trigger', 'service', 'service_index', 'specified_index_key', 'operator_type', 'data_calc_func', 'threshold',\n 'logic_type')\n\n\nclass UserProfileAdmin(admin.ModelAdmin):\n list_display = ('user', 'name', 'phone', 'weixin', 'email')\n\n\nadmin.site.register(models.Host, HostAdmin)\nadmin.site.register(models.HostGroup, HostGroupAdmin)\nadmin.site.register(models.Template, TemplateAdmin)\nadmin.site.register(models.Service, ServiceAdmin)\nadmin.site.register(models.Trigger, TriggerAdmin)\nadmin.site.register(models.TriggerExpression, TriggerExpressionAdmin)\nadmin.site.register(models.ServiceIndex, ServiceIndexAdmin)\nadmin.site.register(models.Action)\nadmin.site.register(models.ActionOperation)\nadmin.site.register(models.Maintenance)\nadmin.site.register(models.UserProfile, UserProfileAdmin)\n" }, { "alpha_fraction": 0.6790123581886292, "alphanum_fraction": 0.6851851940155029, "avg_line_length": 22.14285659790039, "blob_id": "c67d0228056fa2ae13324bb8bd93638729f7c3c4", "content_id": "e9a14bed777ebe3780d4c321cbf57cf96f738e45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 324, "license_type": "no_license", "max_line_length": 113, "num_lines": 14, "path": "/api/backends/redis_conn.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nimport redis\n\n\ndef redis_conn(django_settings):\n pool = redis.ConnectionPool(host=django_settings.REDIS_CONN['HOST'], port=django_settings.REDIS_CONN['PORT'])\n r = redis.Redis(connection_pool=pool)\n return r\n" }, { "alpha_fraction": 0.5106382966041565, "alphanum_fraction": 0.5271867513656616, "avg_line_length": 20.149999618530273, "blob_id": "09e38df8e13dd1ae1d86d91271385e54f1cc6bdd", "content_id": "0815cdb6b5879199438150bc18513d6268bd6d91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 423, "license_type": "no_license", "max_line_length": 46, "num_lines": 20, "path": "/api/byte_to_str.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\nimport sys\n\n\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nclass ByteToStr(object):\n # def __init__(self, data):\n # self.data = data\n\n @staticmethod\n def converter(data):\n if sys.version.split('.')[0] == '3':\n data = str(data, encoding='utf-8')\n elif sys.version.split('.')[0] == '2':\n data = str(data)\n return data\n" }, { "alpha_fraction": 0.6264010071754456, "alphanum_fraction": 0.62889164686203, "avg_line_length": 27.678571701049805, "blob_id": "ee6e7e74374242366eae3f2e30b7b36e6d8b6c41", "content_id": "ca4a47c23863618379f2dd80b5ce454ac20d4922", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 825, "license_type": "no_license", "max_line_length": 72, "num_lines": 28, "path": "/api/urls.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nfrom django.conf.urls import url\n\nfrom api import views\n\nurlpatterns = [\n # 默认页\n url(r'^$', views.index),\n # 获取配置\n url(r'client/config/(\\d+)/$', views.client_configs),\n # 汇报数据\n url(r'client/service/report/$', views.service_data_report),\n\n url(r'^dashboard/$', views.dashboard, name='dashboard'),\n url(r'^triggers/$', views.triggers, name='triggers'),\n url(r'hosts/$', views.hosts, name='hosts'),\n url(r'hosts/(\\d+)/$', views.host_detail, name='host_detail'),\n url(r'trigger_list/$', views.trigger_list, name='trigger_list'),\n\n url(r'hosts/status/$', views.hosts_status, name='get_hosts_status'),\n url(r'graphs/$', views.graphs_gerator, name='get_graphs')\n]\n" }, { "alpha_fraction": 0.5198237895965576, "alphanum_fraction": 0.5253304243087769, "avg_line_length": 22.28205108642578, "blob_id": "196d0bdfd763bb9d0df25ee3503dd7f4afa79e8c", "content_id": "ced3130b5dab44c94a3e8bd14350aa2514a3e7ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 940, "license_type": "no_license", "max_line_length": 52, "num_lines": 39, "path": "/xclient/core/main.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nfrom xclient.core import client\n\n\nclass Handler(object):\n def __init__(self, sys_args):\n self.sys_args = sys_args\n\n self.msg = \"\"\"\n start: start monitor client\n stop : stop monitor client\n \"\"\"\n self.selfcheck()\n self.command_allowcator()\n\n # 检查命令格式是否正确\n def selfcheck(self):\n if len(self.sys_args) < 2:\n exit(self.msg)\n\n # 启动/停止 入口\n def command_allowcator(self):\n if hasattr(self, self.sys_args[1]):\n func = getattr(self, self.sys_args[1])\n return func()\n else:\n exit(self.msg)\n\n def start(self):\n obj = client.ClientHandle()\n obj.forever_run()\n\n def stop(self):\n print(\"stopping the monitor client\")\n" }, { "alpha_fraction": 0.5194515585899353, "alphanum_fraction": 0.5343886017799377, "avg_line_length": 40.15137481689453, "blob_id": "786d02c840208c551c67f822b055f64bd5e55e56", "content_id": "210f2ab4b6f619e9940b235cd02786234acdd67b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9139, "license_type": "no_license", "max_line_length": 120, "num_lines": 218, "path": "/api/backends/data_optimization.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\n\nfrom xmonitor import settings\nimport sys\nimport time\nimport json\nimport copy\n\n\nclass DataStore(object):\n \"\"\"processing the reported data , do some data optimiaztion and save it into redis DB\"\"\"\n\n def __init__(self, client_id, service_name, data, redis_obj):\n \"\"\"\n :param client_id:\n :param service_name:\n :param data: the client reported service clean data\n :return:\n \"\"\"\n self.py_version = sys.version.split('.')[0]\n self.client_id = client_id\n self.service_name = service_name\n self.data = data\n self.redis_conn_obj = redis_obj\n self.process_and_save()\n\n def converter(self, data):\n if self.py_version == '3':\n if type(data) is bytes:\n data = str(data, encoding='utf-8')\n elif self.py_version == '2':\n data = str(data)\n return data\n\n def process_and_save(self):\n \"\"\"\n :func: processing data and save into redis\n :return:\n \"\"\"\n if self.data[\"status\"] == 0:\n\n # for key, data_store_definition in {'latest': [0, 600]}.items()\n for key, data_store_definition in settings.STATUS_DATA_OPTIMIZATION.items():\n data_series_key_in_redis = \"StatusData_%s_%s_%s\" % (self.client_id, self.service_name, key)\n\n last_point_from_redis = self.redis_conn_obj.lrange(data_series_key_in_redis, -1, -1)\n if not last_point_from_redis: # the key doesn't exist in redis, then generate a timestamp\n self.redis_conn_obj.rpush(data_series_key_in_redis, json.dumps([None, time.time()]))\n\n if data_store_definition[0] == 0: # save Real-time data\n self.redis_conn_obj.rpush(data_series_key_in_redis, json.dumps([self.data, time.time()]))\n\n else: # data might needs to be optimized\n last_saved_data_b = self.redis_conn_obj.lrange(data_series_key_in_redis, -1, -1)[0]\n last_saved_data_s = self.converter(last_saved_data_b)\n last_saved_data_j = json.loads(last_saved_data_s)\n last_point_data, last_point_save_time = last_saved_data_j\n\n # 超出更新时间间隔, 取最新数据, 再次生成优化的数据\n if time.time() - last_point_save_time >= data_store_definition[0]:\n lastest_data_key_in_redis = \"StatusData_%s_%s_latest\" % (self.client_id, self.service_name)\n data_set = self.get_data_slice(lastest_data_key_in_redis, data_store_definition[0])\n\n if data_set:\n # 优化数据集\n optimized_data = self.get_optimized_data(data_series_key_in_redis, data_set)\n if optimized_data:\n self.save_optimized_data(data_series_key_in_redis, optimized_data)\n\n # 同时确保数据在redis中的存储数量不超过settings中指定 的值\n if self.redis_conn_obj.llen(data_series_key_in_redis) >= data_store_definition[1]:\n self.redis_conn_obj.lpop(data_series_key_in_redis) # 删除最旧的一个数据\n else:\n print(\"report data is invalid::\", self.data)\n raise ValueError\n\n def get_data_slice(self, lastest_data_key, optimization_interval):\n \"\"\"\n :param lastest_data_key:\n :param optimization_interval: e.g: 600, means get latest 10 mins real data from redis\n :return:\n \"\"\"\n all_real_data = self.redis_conn_obj.lrange(lastest_data_key, 1, -1)\n # all_real_data = self.converter(all_real_data)\n data_set = []\n for item in all_real_data:\n data = json.loads(self.converter(item))\n if len(data) == 2:\n service_data, last_save_time = data\n if time.time() - last_save_time <= optimization_interval: # filter this data point out\n data_set.append(data)\n else:\n pass\n return data_set\n\n def save_optimized_data(self, data_series_key_in_redis, optimized_data):\n \"\"\"\n save the optimized data into db\n :param data_series_key_in_redis:\n :param optimized_data:\n :return:\n \"\"\"\n self.redis_conn_obj.rpush(data_series_key_in_redis, json.dumps([optimized_data, time.time()]))\n\n def get_optimized_data(self, data_set_key, data_set):\n \"\"\"\n calculate out ava,max,min,mid value from raw service data set\n :param data_set_key: where the optimized data needed to save to in redis db\n :param data_set: [ [{'steal': '0.00', ..., 'status': 0}, 1480485017.90692], ..., ]\n :param data_set: [ [{'data': {'em1': {'t_out': '0.00', 't_in': '0.00'}, ...,}, 'status': 0}, 1480506115.061609]]\n :return:\n \"\"\"\n optimized_dic = {} # set a empty dic, will save optimized data later\n service_data_keys = data_set[0][0].keys() # [iowait, idle, system...] 或者 [data]\n first_service_data_point = data_set[0][0] # use this to build up a new empty dic\n\n if \"data\" not in service_data_keys: # means this dic has no subdic, works for service like cpu,memory\n for key in service_data_keys:\n optimized_dic[key] = [] # {'nice': [], 'idle': [], ..., 'system': []}\n\n # 把数据按照指标转储成列表形式\n for service_data_item, last_save_time in data_set: # service_data_item: {}, last_save_time: timestamp\n for k, v in service_data_item.items():\n optimized_dic[k].append(round(float(v), 2))\n\n for service_k, v_list in optimized_dic.items():\n avg_res = self.get_average(v_list)\n max_res = self.get_max(v_list)\n min_res = self.get_min(v_list)\n mid_res = self.get_mid(v_list)\n optimized_dic[service_k] = [avg_res, max_res, min_res, mid_res]\n print(optimized_dic)\n else:\n \"\"\"\n has sub dic, inside key is \"data\".\n works for a service has multiple independent items, like many ethernet/disks and so on.\n first_service_data_point: {'data': {'em1': {'t_out': '0.00', 't_in': '0.00'}, ...,}, 'status': 0}\n \"\"\"\n # 生成数据存储字典的格式\n for service_item_key, v_dic in first_service_data_point[\"data\"].items():\n optimized_dic[service_item_key] = {}\n for k2, v2 in v_dic.items():\n optimized_dic[service_item_key][k2] = [] # {etho0:{t_in:[],t_out:[]}}\n\n tmp_data_dic = copy.deepcopy(optimized_dic)\n if tmp_data_dic: # some times this tmp_data_dic might be empty due to client report err\n for service_data_item, last_save_time in data_set:\n for service_index, val_dic in service_data_item[\"data\"].items(): # service_index: eth0, eth1...\n for service_item_sub_key, val in val_dic.items(): # service_item_sub_key: t_in, t_out\n\n tmp_data_dic[service_index][service_item_sub_key].append(round(float(val), 2))\n\n for service_k, v_dic in tmp_data_dic.items():\n for service_sub_k, v_list in v_dic.items():\n avg_res = self.get_average(v_list)\n max_res = self.get_max(v_list)\n min_res = self.get_min(v_list)\n mid_res = self.get_mid(v_list)\n optimized_dic[service_k][service_sub_k] = [avg_res, max_res, min_res, mid_res]\n\n else:\n print(\"\\033[41;1mMust be sth wrong with client report data\\033[0m\")\n print(\"optimized empty dic:\", optimized_dic)\n\n return optimized_dic\n\n def get_average(self, data_set):\n \"\"\"\n calc the avg value of data set\n :param data_set:\n :return:\n \"\"\"\n if len(data_set) > 0:\n res = sum(data_set) / len(data_set)\n return round(res, 2)\n else:\n return 0\n\n def get_max(self, data_set):\n \"\"\"\n calc the max value of the data set\n :param data_set:\n :return:\n \"\"\"\n if len(data_set) > 0:\n return max(data_set)\n else:\n return 0\n\n def get_min(self, data_set):\n \"\"\"\n calc the minimum value of the data set\n :param data_set:\n :return:\n \"\"\"\n if len(data_set) > 0:\n return min(data_set)\n else:\n return 0\n\n def get_mid(self, data_set):\n \"\"\"\n calc the mid value of the data set\n :param data_set:\n :return:\n \"\"\"\n data_set.sort()\n # [1,4,99,32,8,9,4,5,9]\n # [1,3,5,7,9,22,54,77]\n if len(data_set) > 0:\n return data_set[int(len(data_set) / 2)]\n else:\n return 0\n" }, { "alpha_fraction": 0.5736160278320312, "alphanum_fraction": 0.583038866519928, "avg_line_length": 34.375, "blob_id": "1cdc366ab9d46c7aad3e88a40e55131fe1815660", "content_id": "d01e2138b345452e76d551cc119ce6241d0e1294", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1698, "license_type": "no_license", "max_line_length": 116, "num_lines": 48, "path": "/xclient/plugins/linux/memory.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport subprocess\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\ndef monitor(frist_invoke=1):\n monitor_dic = {\n 'SwapUsage': 'percentage',\n 'MemUsage': 'percentage',\n }\n\n cmd = \"grep 'MemTotal\\|MemFree\\|Buffers\\|^Cached\\|SwapTotal\\|SwapFree' /proc/meminfo | awk -F 'kB' '{print $1}'\"\n res = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n res_err = res.stderr.read()\n res_msg = data_format_convert(res.stdout.read())\n\n if res_err: # cmd exec error\n value_dic = {'status': res_err}\n else:\n value_dic = {'status': 0}\n for line in res_msg.split('\\n'):\n if not line:\n continue\n key = line.split(':')[0] # factor name\n value = line.split(':')[1].strip() # factor value\n value_dic[key] = value\n\n if monitor_dic['SwapUsage'] == 'percentage':\n value_dic['SwapUsage_p'] = str(100 - int(value_dic['SwapFree']) * 100 / int(value_dic['SwapTotal']))\n # real SwapUsage value\n value_dic['SwapUsage'] = int(value_dic['SwapTotal']) - int(value_dic['SwapFree'])\n\n MemUsage = int(value_dic['MemTotal']) - (\n int(value_dic['MemFree']) + int(value_dic['Buffers']) + int(value_dic['Cached']))\n if monitor_dic['MemUsage'] == 'percentage':\n value_dic['MemUsage_p'] = str(int(MemUsage) * 100 / int(value_dic['MemTotal']))\n # real MemUsage value\n value_dic['MemUsage'] = MemUsage\n return value_dic\n\n\nif __name__ == '__main__':\n print(monitor())\n" }, { "alpha_fraction": 0.5841463208198547, "alphanum_fraction": 0.5975610017776489, "avg_line_length": 30.538461685180664, "blob_id": "4c0ffff559dbef4a85851d9a931430568d9ab63b", "content_id": "a5bc2dd59cac6589c7f8e0e48888cfdc20053cd2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 820, "license_type": "no_license", "max_line_length": 91, "num_lines": 26, "path": "/xclient/plugins/linux/network.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport subprocess\nfrom xclient.plugins.linux.data_format_convert import data_format_convert\n\n\ndef monitor(frist_invoke=1):\n cmd = 'sar -n DEV 1 5 | grep -v IFACE | grep Average'\n res = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n res_err = res.stderr.read()\n res_msg = data_format_convert(res.stdout.read()).strip()\n\n if res_err:\n value_dic = {'status': res_err, 'data': {}}\n else:\n value_dic = {'status': 0, 'data': {}}\n for line in res_msg:\n line = line.split()\n nic_name, t_in, t_out = line[1], line[4], line[5]\n value_dic['data'][nic_name] = {\"t_in\": line[4], \"t_out\": line[5]}\n\n return value_dic\n" }, { "alpha_fraction": 0.6711649298667908, "alphanum_fraction": 0.6743401288986206, "avg_line_length": 32.818790435791016, "blob_id": "1842c33aa2c41c599613e8ff0b0243e72f99bbb4", "content_id": "eb4f6fcedcfb955b2f65040982ae42ddb90f74c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5155, "license_type": "no_license", "max_line_length": 112, "num_lines": 149, "path": "/api/views.py", "repo_name": "taohu-cn/xmonitor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# __author__: taohu\n\n# import sys\n# reload(sys)\n# sys.setdefaultencoding(\"utf-8\")\nimport json\nfrom xmonitor import settings\nfrom django.shortcuts import render, HttpResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom api import graphs\nfrom api import models\nfrom api import serializer\nfrom api.serializer import ClientHandler, get_host_triggers\nfrom api.backends import redis_conn\nfrom api.backends import data_optimization\nfrom api.backends import data_processing\n\nREDIS_OBJ = redis_conn.redis_conn(settings)\n\n\ndef index(request):\n return render(request, 'monitor/index.html')\n\n\ndef dashboard(request):\n return render(request, 'monitor/dashboard.html')\n\n\ndef triggers(request):\n return render(request, 'monitor/triggers.html')\n\n\ndef hosts(request):\n host_list = models.Host.objects.all()\n return render(request, 'monitor/hosts.html', {'host_list': host_list})\n\n\ndef host_detail(request, host_id):\n host_obj = models.Host.objects.get(id=host_id)\n return render(request, 'monitor/host_detail.html', {'host_obj': host_obj})\n\n\ndef host_detail_old(request, host_id):\n host_obj = models.Host.objects.get(id=host_id)\n\n config_obj = ClientHandler(host_obj.id)\n monitored_services = {\n \"services\": {},\n \"sub_services\": {} # 存储一个服务有好几个独立子服务 的监控,比如网卡服务 有好几个网卡\n }\n\n template_list = list(host_obj.templates.select_related())\n\n for host_group in host_obj.host_groups.select_related():\n template_list.extend(host_group.templates.select_related())\n print('\\033[1;33m %s \\033[0m' % __file__, template_list)\n for template in template_list:\n # print(template.services.select_related())\n\n for service in template.services.select_related(): # loop each service\n print(service)\n if not service.has_sub_service:\n monitored_services['services'][service.name] = [service.plugin_name, service.interval]\n else:\n monitored_services['sub_services'][service.name] = []\n\n # get last point from redis in order to acquire the sub-service-key\n last_data_point_key = \"StatusData_%s_%s_latest\" % (host_obj.id, service.name)\n last_point_from_redis = REDIS_OBJ.lrange(last_data_point_key, -1, -1)[0]\n if last_point_from_redis:\n data, data_save_time = json.loads(last_point_from_redis)\n if data:\n service_data_dic = data.get('data')\n for serivce_key, val in service_data_dic.items():\n monitored_services['sub_services'][service.name].append(serivce_key)\n\n return render(request, 'host_detail.html', {'host_obj': host_obj, 'monitored_services': monitored_services})\n\n\ndef hosts_status(request):\n hosts_data_serializer = serializer.StatusSerializer(request, REDIS_OBJ)\n hosts_data = hosts_data_serializer.by_hosts()\n\n return HttpResponse(json.dumps(hosts_data))\n\n\n# 获取配置\ndef client_configs(request, client_id):\n print(\"--->\", client_id)\n config_obj = ClientHandler(client_id)\n config = config_obj.fetch_configs()\n\n if config:\n return HttpResponse(json.dumps(config))\n\n\n@csrf_exempt\ndef service_data_report(request):\n data = json.loads(request.POST['data'])\n client_id = request.POST.get('client_id')\n service_name = request.POST.get('service_name')\n # 数据优化及存储\n data_saveing_obj = data_optimization.DataStore(client_id, service_name, data, REDIS_OBJ)\n\n # redis_key_format = \"StatusData_%s_%s_latest\" %(client_id,service_name)\n # data['report_time'] = time.time()\n # REDIS_OBJ.lpush(redis_key_format,json.dumps(data))\n\n # 在这里同时触发监控\n host_obj = models.Host.objects.get(id=client_id)\n service_triggers = get_host_triggers(host_obj)\n\n trigger_handler = data_processing.DataHandler(settings, connect_redis=False)\n for trigger in service_triggers:\n trigger_handler.load_service_data_and_calulating(host_obj, trigger, REDIS_OBJ)\n print(\"service trigger::\", service_triggers)\n\n # 更新主机存活状态\n # host_alive_key = \"HostAliveFlag_%s\" % client_id\n # REDIS_OBJ.set(host_alive_key,time.time())\n\n return HttpResponse(json.dumps(\"---report success---\"))\n\n\ndef graphs_gerator(request):\n graphs_generator = graphs.GraphGenerator2(request, REDIS_OBJ)\n graphs_data = graphs_generator.get_host_graph()\n\n return HttpResponse(json.dumps(graphs_data))\n\n\ndef graph_bak(request):\n # host_id = request.GET.get('host_id')\n # service_key = request.GET.get('service_key')\n\n # print(\"graph:\", host_id,service_key)\n\n graph_generator = graphs.GraphGenerator(request, REDIS_OBJ)\n graph_data = graph_generator.get_graph_data()\n if graph_data:\n return HttpResponse(json.dumps(graph_data))\n\n\ndef trigger_list(request):\n trigger_handle_obj = serializer.TriggersView(request, REDIS_OBJ)\n trigger_data = trigger_handle_obj.fetch_related_filters()\n\n return render(request, 'monitor/trigger_list.html', {'trigger_list': trigger_data})\n" } ]
17
Seraph2000/convex_tech_test
https://github.com/Seraph2000/convex_tech_test
5bfe71b4ef2111f187747cb958ce8ceb572ff703
ba9d240985a8074c48ff7a786a549196abaf6e45
147ccadd7b651d6524d069491ba4bae3a45a6b23
refs/heads/master
2022-07-06T19:02:12.914221
2020-05-18T10:57:41
2020-05-18T10:57:41
264,757,434
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5053333044052124, "alphanum_fraction": 0.5400000214576721, "avg_line_length": 19.83333396911621, "blob_id": "058eabd0541fcf6f5022257fd97b2eb102199cb8", "content_id": "59a6a4808555544181eeb07b0386c1a82d9e175b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1500, "license_type": "no_license", "max_line_length": 80, "num_lines": 72, "path": "/.aws/create_role.py", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "import boto3\nimport json\n\naccount_id = '842664811632'\nsession = boto3.session.Session(profile_name='default')\niam = session.client('iam')\n\npath = '/'\n# this needs to be unique\nrole_name = 'ec2-test-role-1'\n# this needs to be unique\ndescription = 'boto3 ec2 test role 1'\n\ntrust_policy = {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"\",\n \"Effect\": \"Allow\",\n \"Principal\": {\n \"Service\": \"ec2.amazonaws.com\"\n },\n \"Action\": \"sts:AssumeRole\"\n }\n ]\n}\n\nresponse = iam.create_role(\n Path=path,\n RoleName=role_name,\n AssumeRolePolicyDocument=json.dumps(trust_policy),\n Description=description,\n MaxSessionDuration=3600\n)\n\nmanaged_policy = {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Sid\": \"S3ReadOnly\",\n \"Effect\": \"Allow\",\n \"Action\": [\n \"s3:Get*\",\n \"s3:List*\"\n ],\n \"Resource\": [\n \"*\"\n ]\n },\n {\n \"Sid\": \"Ec2FullAccess\",\n \"Action\": \"ec2:*\",\n \"Effect\": \"Allow\",\n \"Resource\": \"*\"\n }\n ]\n}\n\nresponse = iam.create_policy(\n PolicyName='boto3-test-policy-1',\n PolicyDocument=json.dumps(managed_policy)\n)\n\niam.attach_role_policy(\n # this needs to match policy name\n PolicyArn='arn:aws:iam::' + str(account_id) + ':policy/boto3-test-policy-1',\n # this needs to be same as rolename above\n RoleName='ec2-test-role-1'\n)\n\n\n# spin up an ec2 instance\n" }, { "alpha_fraction": 0.607692301273346, "alphanum_fraction": 0.6282051205635071, "avg_line_length": 21.941177368164062, "blob_id": "dff1b586cec5ce6a321355fec288ea3215d978f3", "content_id": "e79d32393262978c0b858ecca14b35d140782177", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 390, "license_type": "no_license", "max_line_length": 61, "num_lines": 17, "path": "/.aws/upload_file_to_s3.py", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "import boto3\n\n\ndef upload_to_aws(local_file, bucket, s3_file):\n s3 = boto3.client('s3')\n try:\n s3.upload_file(local_file, bucket, s3_file)\n print(\"Upload Successful\")\n return True\n except FileNotFoundError:\n print(\"The file was not found\")\n return False\n\n\nuploaded = upload_to_aws(\n '../example.parquet', 'convex-bucket1', 'example.parquet'\n)\n" }, { "alpha_fraction": 0.7064075469970703, "alphanum_fraction": 0.7211134433746338, "avg_line_length": 26.579710006713867, "blob_id": "7f387daf8a78664d103cd147d3aac7183705eaa7", "content_id": "773ec0fd247166184f4fc75ef2e044c9cd562ab3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1904, "license_type": "no_license", "max_line_length": 224, "num_lines": 69, "path": "/.aws/README.md", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "#### *Note: I am usuming you are gong to be running the scripts using Mac of Linux operating systems.*\n\n# Instructions for installation\n\n\n## 1. Create a virtual environment.\n\n`virtualenv --python=<path-to-python3> venv`\n\n## 2. activate virtual environment.\n\n`source venv/bin/activate`\n\n## 3. Clone the code from GitHub.\n\n`git clone [email protected]:Seraph2000/convex_tech_test.git`\n\n## 4. Install dependencies\n\n`pip install -r requirements.txt`\n\n# Instructions for operating the scripts\n\n## 1. Run the scripts in the following order, like so:\n\n### i. Create a Parquet file locally, with some data in it. Run the following command to do this:\n\n#### `python create_parquet_file.py`\n\n\n## 2. Create an S3 bucket, and upload the parquet file to the bucket.\n\n### i. Make sure you're properly connected to AWS via the terminal, by running Amazon's configuration tool, like so, and following the wizard. You'll be asked for credentials corresponding to the user you want to connect to.\n\n### `aws configure`\n\n### ii. To generate the bucket enter:\n\n### `python generate_bucket.py`\n\n### iii. To upload the parquet file, enter:\n\n### `python upload_file_to_s3.py`\n\n## 3. Create an IAM Role that gives Read & List access to the S3 bucket and upload the parquet file to the bucket. Run the following command:\n\n### i. `python `\n\n\n## 4. Spin up an EC2 instance that has the above IAM Role attached.\n\n### iii. Run the following script, to generate key pair for ec2 instance:\n\n `python generate_key_pair.py'\n\n### ii. Make sure you change the mode of the key pair file to read-only using the following command \n# in bash terminal, otherwise it will be denied access.\n\n `chmod 400 ec2-keypair.pem`\n\n### iii. Run the following to create the ec2 instance\n\n `python generate_ec2.py`\n\n\n## 5. Install R on the EC2 instance and generate a parquet reader script.\n\n\n## 7. Run the *Parquet Reader* R Script, inside the EC2 instance.\n\n" }, { "alpha_fraction": 0.6821378469467163, "alphanum_fraction": 0.7201125025749207, "avg_line_length": 19.897058486938477, "blob_id": "dbed5e45bc505e1bdc2fbc440ced07ed652bfb1b", "content_id": "399670b949e2c3bbfabcca600c640ad2cee0d610", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1430, "license_type": "no_license", "max_line_length": 73, "num_lines": 68, "path": "/.aws/automate.py", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "from create_parquet_file import create_parquet_file\nfrom generate_bucket import generate_bucket\n\n\n# 1. Create a parquet file\ncreate_parquet_file('example1.parquet')\n\n# 2. Create an S3 bucket, and upload the parquet file to the bucket\ngenerate_bucket('convex-bucket2')\n\n\n#3 Create an IAM Role that gives Read & List access to the S3 bucket\n\n\n#4 Spin up an EC2 instance that has the above IAM Role attached\n\n\n#5 Install R on the EC2 instance\n\n\n#6 Copy a “Parquet Reader” R Script to the EC2 instance\n\n#7 Run the “Parquet Reader” R Script\n#https://arrow.apache.org/docs/r/reference/read_parquet.html\n\n# i.e. # NOT RUN {\n# df <- read_parquet(system.file(\"v0.7.1.parquet\", package=\"arrow\"))\n# head(df)\n# # }\n\n# this should be in a separate file\n\n#######credentials########\n\n# user can sign in at: https://842664811632.signin.aws.amazon.com/console\n# access key id: AKIA4IMV4TRYE3NFTPE7\n# secret access key id: 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\n\n####watch tutorial series####\n# https://www.youtube.com/watch?v=Bom9_K4m4sg\n\n\n\n\n\n\n\n#1 create an IAM account in AWS\n# login: https://842664811632.signin.aws.amazon.com/console\n# access key id: AKIA4IMV4TRYKFXTYLN5\n# secret access key: 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\n# install boto3\n\n#2 create a .aws folder\n#3 cd into .aws & create the following two files\n#4 create credentials & config\n# touch credentials\n# touch config\n# edit the files accordingly\n\n#3 now create the script!\n\n\n\n\n# try $ aws configure\n# install aws-cli\n# sudo snap install aws-cli\n\n" }, { "alpha_fraction": 0.5682451128959656, "alphanum_fraction": 0.5877437591552734, "avg_line_length": 18.94444465637207, "blob_id": "e1c2c05801688e52f08fd5c4e5164b5046b678f8", "content_id": "27f9df01412540571771b206e671c590dc5e7299", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 359, "license_type": "no_license", "max_line_length": 45, "num_lines": 18, "path": "/.aws/generate_bucket.py", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "import boto3\n\n\ndef generate_bucket(bucket_name):\n\n s3 = boto3.resource('s3')\n\n s3.create_bucket(\n ACL='public-read-write',\n Bucket=bucket_name,\n CreateBucketConfiguration={\n 'LocationConstraint': 'eu-west-2'\n }\n )\n\n # print out all buckets created so far\n for i in s3.buckets.all():\n print(i.name)\n" }, { "alpha_fraction": 0.5909090638160706, "alphanum_fraction": 0.6012396812438965, "avg_line_length": 21, "blob_id": "59e3469e94c5db15534a20730ed7cbada600afd5", "content_id": "6c9d801c43c29d3f378af9b93e4ecb872f571bc0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 484, "license_type": "no_license", "max_line_length": 50, "num_lines": 22, "path": "/.aws/create_parquet_file.py", "repo_name": "Seraph2000/convex_tech_test", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nimport pyarrow as pa\nimport pyarrow.parquet as pq\n\n\ndef create_parquet_file(file_name):\n # name needs to take the form filename.parquet\n\n df = pd.DataFrame({\n 'one': [-1, np.nan, 2.5],\n 'two': ['foo', 'bar', 'baz'],\n 'three': [True, False, True]},\n index=list('abc')\n )\n\n table = pa.Table.from_pandas(df)\n\n pq.write_table(table, file_name)\n\n table2 = pq.read_table(file_name)\n table2.to_pandas()\n" } ]
6
symac/pymarc
https://github.com/symac/pymarc
1410389ba8b969f235839a99c84c4211c426610b
25798820861a94209fa67b681b218883043cb3a7
15028592c6884d8e8b664fbbcf6bc4eecb4ac679
refs/heads/master
2020-05-29T11:04:27.699360
2015-09-23T09:28:13
2015-09-23T09:28:13
38,690,316
0
0
null
2015-07-07T13:44:22
2015-07-06T22:45:24
2015-02-18T22:34:18
null
[ { "alpha_fraction": 0.5557586550712585, "alphanum_fraction": 0.5575868487358093, "avg_line_length": 22.255319595336914, "blob_id": "09cfedb91c98068af75bb2c719acca4294203a9d", "content_id": "f30afd4141babbd2420cf76c74f71fd56c9f69fe", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1094, "license_type": "permissive", "max_line_length": 62, "num_lines": 47, "path": "/pymarc/writer.py", "repo_name": "symac/pymarc", "src_encoding": "UTF-8", "text": "from pymarc import Record, WriteNeedsRecord\n\nclass Writer(object):\n \n def write(self, record):\n pass\n\nclass MARCWriter(Writer):\n \"\"\"\n A class for writing MARC21 records in transmission format.\n\n Simple usage:\n\n from pymarc import MARCWriter\n\n ## pass in a file\n writer = MARCWriter(file('file.dat','w'))\n writer.write(record)\n \n ## use StringIO if you want to write to a string\n string = StringIO()\n writer = MARCWriter(string)\n writer.write(record)\n print string\n \"\"\"\n\n def __init__(self, file_handle):\n \"\"\"\n You need to pass in a file like object.\n \"\"\"\n super(MARCWriter, self).__init__()\n self.file_handle = file_handle\n\n def write(self, record):\n \"\"\"\n Writes a record.\n \"\"\"\n if not isinstance(record, Record):\n raise WriteNeedsRecord\n self.file_handle.write(record.as_marc())\n\n def close(self):\n \"\"\"\n Closes the file.\n \"\"\"\n self.file_handle.close()\n self.file_handle = None\n\n" }, { "alpha_fraction": 0.6358407139778137, "alphanum_fraction": 0.6446902751922607, "avg_line_length": 29.945205688476562, "blob_id": "39732971f6a8ed52d5f603505f8a09e09f471e91", "content_id": "e59c473e75989fac0199e4bc36452171c1e53c98", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2260, "license_type": "permissive", "max_line_length": 78, "num_lines": 73, "path": "/pymarc/__init__.py", "repo_name": "symac/pymarc", "src_encoding": "UTF-8", "text": "# __init__.py\n\nr'''\n\nThe pymarc module provides an API for reading, writing and modifying\nMARC records. MARC (MAchine Readable Cataloging) is a metadata format for\nbibliographic data. More about MARC can be found at the Library of Congress:\nhttp://lcweb.loc.gov/marc\n\nBelow are some common examples of how you might want to use pymarc. If you\nrun across an example that you think should be here please contribute it\nby writing to the author.\n\n1. Reading a batch of records and printing out the 245 subfield a. If you \n are curious this example uses the batch file available in the distribution.\n\n >>> from pymarc import MARCReader\n >>> reader = MARCReader(open('test/marc.dat', 'rb'))\n >>> for record in reader: \n ... print record['245']['a']\n The pragmatic programmer :\n Programming Python /\n Learning Python /\n Python cookbook /\n Python programming for the absolute beginner /\n Web programming :\n Python programming on Win32 /\n Python programming :\n Python Web programming /\n Core python programming /\n Python and Tkinter programming /\n Game programming with Python, Lua, and Ruby /\n Python programming patterns /\n Python programming with the Java class libraries :\n Learn to program using Python :\n Programming with Python /\n BSD Sockets programming from a multi-language perspective /\n Design patterns :\n Introduction to algorithms /\n ANSI Common Lisp /\n\n2. Creating a record and writing it out to a file.\n\n >>> from pymarc import Record, Field\n >>> record = Record()\n >>> record.addField(\n ... Field(\n ... tag = '245', \n ... indicators = ['0','1'],\n ... subfields = [\n ... 'a', 'The pragmatic programmer : ',\n ... 'b', 'from journeyman to master /', \n ... 'c', 'Andrew Hunt, David Thomas.'\n ... ]))\n >>> out = open('file.dat', 'wb')\n >>> out.write(record.asMARC21())\n >>> out.close()\n\n'''\n\n\nfrom .record import *\nfrom .field import * \nfrom .exceptions import *\nfrom .reader import *\nfrom .writer import *\nfrom .constants import *\nfrom .marc8 import marc8_to_unicode, MARC8ToUnicode\nfrom .marcxml import *\n\nif __name__ == \"__main__\":\n import doctest\n doctest.testmod()\n\n" }, { "alpha_fraction": 0.582446813583374, "alphanum_fraction": 0.5890957713127136, "avg_line_length": 24.066667556762695, "blob_id": "21b7119bc9ec484ce30d8c3597ab080e0c353512", "content_id": "782415ad5a8bb9be080a6c8a4213cd7b0f340ea9", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 752, "license_type": "permissive", "max_line_length": 70, "num_lines": 30, "path": "/test/writer.py", "repo_name": "symac/pymarc", "src_encoding": "UTF-8", "text": "import unittest\nimport pymarc\nimport os\n\nclass MARCWriterTest(unittest.TestCase):\n\n def test_write(self):\n\n # write a record off to a file\n writer = pymarc.MARCWriter(open('test/writer-test.dat', 'wb'))\n record = pymarc.Record()\n field = pymarc.Field('245', ['0', '0'], ['a', 'foo'])\n record.add_field(field)\n writer.write(record)\n writer.close()\n\n # read it back in\n reader = pymarc.MARCReader(open('test/writer-test.dat', 'rb'))\n r = next(reader)\n reader.close()\n\n # remove it\n os.remove('test/writer-test.dat')\n\ndef suite():\n test_suite = unittest.makeSuite(MARCWriterTest, 'test')\n return test_suite\n\nif __name__ == '__main__':\n unittest.main()\n" } ]
3
s-andrew/console_draw
https://github.com/s-andrew/console_draw
c1dfb92c6dffe0294af1d339c6c7d14e0cc7e2e5
6f1c38c69eedccf8585f7fb089b209b49a9c4d8a
d6845d10598510aac0ce323da472a879c8756ef8
refs/heads/master
2020-03-22T21:40:04.658543
2020-02-13T21:12:25
2020-02-13T21:12:25
140,705,307
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6593866348266602, "alphanum_fraction": 0.6593866348266602, "avg_line_length": 32.4375, "blob_id": "2beb19d0019ef903b8d50b985fd24b65bb6d26fd", "content_id": "4f99d2e3cb2a8f585f31938703d9f478a5b993f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2152, "license_type": "no_license", "max_line_length": 116, "num_lines": 64, "path": "/run.py", "repo_name": "s-andrew/console_draw", "src_encoding": "UTF-8", "text": "import argparse\n\nfrom console_draw import draw_string, draw_image\n\n\nmain_parser = argparse.ArgumentParser()\nmain_parser.add_argument('-i', '--image', type=str, help='Path to image', dest='params', action='append')\nmain_parser.add_argument('-t', '--text', type=str, help='Text to print', dest='params', action='append')\nmain_parser.add_argument('--fill-sym', type=str, help='Fill symbol', default=' ')\n\nsupport_parser = argparse.ArgumentParser(\n add_help=False, parents=[main_parser], conflict_handler='resolve',\n description='For example: python run.py -i img/crash.png -t \" Let\\'s crash this party!\" --fill-sym \" \"'\n\n)\nsupport_parser.add_argument('-i', '--image', help='Path to image', dest='images', action='append')\nsupport_parser.add_argument('-t', '--text', help='Text to print', dest='texts', action='append')\n\n\ntypes = support_parser.parse_args()\nmain = main_parser.parse_args()\nreport = []\n\nfor v in main.params:\n if types.images is not None and v in types.images:\n try:\n draw_image(v, fill_sym=main.fill_sym)\n except FileNotFoundError:\n report.append(\"No such file or directory: '%s'\"%v)\n if types.texts is not None and v in types.texts:\n draw_string(v, fill_symbol=main.fill_sym)\n\n\nfor i in report:\n print(i)\n\n\nprint(main.fill_sym)\n\n\n# from itertools import chain\n# s = '''\\\n# Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod\n# tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,\n# quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo\n# consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse\n# cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non\n# proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\\\n# '''\n# s = s.replace('\\n', '')\n# l = s.split('.')\n# l = map(lambda x: x + '.', l)\n#\n# l = map(lambda x: x.split(','), l)\n#\n# rows = []\n# for phrase in l:\n# *body, last = phrase\n# body = map(lambda x: x + ',', body)\n# phrase = list(body) + [last]\n# rows += phrase\n#\n# for row in rows:\n# draw_string(row)\n \n \n\n\n" }, { "alpha_fraction": 0.6047953963279724, "alphanum_fraction": 0.6366267204284668, "avg_line_length": 28, "blob_id": "435785a36bcb3d7de5ad1ba0834f6c63046c7b7c", "content_id": "b00eef7e8acaee5544375c40c133ec9e9fce0745", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2419, "license_type": "no_license", "max_line_length": 111, "num_lines": 83, "path": "/console_draw.py", "repo_name": "s-andrew/console_draw", "src_encoding": "UTF-8", "text": "from itertools import repeat, starmap\n\nfrom PIL import Image\nimport numpy as np\nfrom colorama import Back, Fore, init\n\nfrom alphabet import ALPHABET, SPACE\n\ninit()\nCONSOLE_COLORS = {\n 0: (Back.BLACK, Fore.BLACK),\n 1: (Back.RED, Fore.RED),\n 2: (Back.YELLOW, Fore.YELLOW),\n 3: (Back.GREEN, Fore.GREEN),\n 4: (Back.WHITE, Fore.WHITE),\n 5: (Back.LIGHTGREEN_EX, Fore.LIGHTGREEN_EX),\n 6: (Back.LIGHTYELLOW_EX, Fore.LIGHTYELLOW_EX),\n 7: (Back.LIGHTYELLOW_EX, Fore.YELLOW),\n 'n': (Back.RESET, Fore.RESET)\n }\n\n\ndef pixel_mapper(color, fill):\n back, fore = CONSOLE_COLORS[color]\n return back + fore + fill\n\n\ndef join(l, sep):\n newl = [sep]\n for i in l:\n newl.append(i)\n newl.append(sep)\n return newl\n\ndef newline():\n return pixel_mapper('n', '\\n')\n\n\ndef string2matrix(string, font_color, back_color, interligne):\n string = string.lower()\n matrixes = map(lambda x: ALPHABET[x], string)\n matrix = join(matrixes, SPACE)\n matrix = np.concatenate(matrix, axis=1)\n _, w = matrix.shape\n line_space = np.zeros((interligne, w))\n matrix = np.concatenate([line_space, matrix, line_space], axis=0)\n colorize = lambda x: font_color if x else back_color\n mapper = lambda x: np.array(list(map(colorize, x)))\n matrix = np.apply_along_axis(mapper, axis=0, arr=matrix)\n return matrix\n\n\ndef matrix2printable(matrix, fill_symbol):\n for row in matrix:\n row = starmap(pixel_mapper, zip(row, repeat(fill_symbol)))\n row = ''.join(row)\n yield row\n\n\ndef draw_string(string, font_color=4, back_color=0, fill_symbol='.', interligne=1):\n matrix = string2matrix(string, font_color, back_color, interligne)\n print(*list(matrix2printable(matrix, fill_symbol)), sep=newline())\n return\n\n\ndef quantizetopalette(silf, palette):\n silf.load()\n palette.load()\n im = silf.im.convert(\"P\", 0, palette.im)\n return silf._new(im)\n\n\ndef open_image(file_name):\n palimage = Image.new('P', (16, 16))\n palettedata = [ 0, 0, 0, 255, 0, 0, 255, 255, 0, 0, 255, 0, 255, 255, 255,85,255,85, 255,85,85, 255,255,85]\n palimage.putpalette(palettedata * 32)\n return np.array(quantizetopalette(Image.open(file_name), palimage))\n\n\ndef draw_image(file_name, fill_sym='.'):\n matrix = open_image(file_name)\n print(*list(matrix2printable(matrix, fill_sym)), sep=newline())\n return\n \n\n\n\n\n\n\n\n" } ]
2
attacker-codeninja/advanced-python-2021
https://github.com/attacker-codeninja/advanced-python-2021
4a761f86177a83153f73c7ffff9ba1fd6b361f3d
69a7de1da114de6914c6284c95592013d777bc05
03c1797d8db5943d6feda96d76866934fcc7fa5b
refs/heads/master
2023-07-18T20:31:24.426399
2021-09-29T22:18:15
2021-09-29T22:18:15
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6402877569198608, "alphanum_fraction": 0.6762589812278748, "avg_line_length": 20.947368621826172, "blob_id": "6d3e6c370863b42ccbfddd7c3b484ce994c5190b", "content_id": "28671190020f47944e4069f8bf718612ac7bb561", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 417, "license_type": "no_license", "max_line_length": 69, "num_lines": 19, "path": "/WEEK-1/introduction.py", "repo_name": "attacker-codeninja/advanced-python-2021", "src_encoding": "UTF-8", "text": "print('Hello')\nprint('Hello','World', 2021, 'I love Pyton')\n\n# comment\n# what is the pupose of a comment ?\n# it allows us to leave some remark about the code ?\n# it makes code more readable, clean, reusable\n\n'''\nthe function below is print\nit is a builtin function it takes unlimited number of arguments\n'''\n\"\"\"\nthis is \na mulitiline\ncomment\n\"\"\"\n\nprint(1, 2, 3, 4, 'WAHT EVER', True, [1, 2, 3], None, ('year', 2021))\n" } ]
1
EswarAleti/Dream11_Prediction_Project
https://github.com/EswarAleti/Dream11_Prediction_Project
714f26058a7453ec4def906e59538ac8f335ebef
e2999fd1e52cafc5551c03801f0139e090b76f7f
9c011b9b04fa998e5af2c4f88158b1f650aa6f1f
refs/heads/master
2020-12-27T11:50:58.685460
2020-02-03T05:51:17
2020-02-03T05:51:17
237,893,075
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5950323939323425, "alphanum_fraction": 0.620590329170227, "avg_line_length": 32.469879150390625, "blob_id": "d471fc61b07edc093bb134601e03cc38818087c8", "content_id": "7210edebe9d8eaf9b0fcbfbb6eff13cd3cece14e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2778, "license_type": "no_license", "max_line_length": 152, "num_lines": 83, "path": "/Team11/newRecord.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\n\ndef batsmanNewRecord():\n\n\tteam=raw_input(\"1. SRH\\n2. RCB\\n3. KKR\\n4. CSK\\n5. DD\\n6. KXIP\\n7. RR\\n8. MI\\nEnter your team (e.g. SRH): \")\n\tdf=pd.read_csv(team+\" Batting.csv\")\n\t\n\twhile True:\n\t\tprint df['player']\n\t\tplayerIndex=input(\"\\nEnter index of player to enter score or -1 to exit or -2 to place a new player: \")\n\t\tif playerIndex==-1:\n\t\t\tbreak;\n\t\telif playerIndex==-2:\n\t\t\tz=df.columns\n\t\t\tx={}\n\t\t\tx['player']=[raw_input(\"Enter player name: \")]\n\t\t\tfor i in z[1:]:\n\t\t\t\tx[i]=[np.nan]\n\t\t\tdf=df.append(pd.DataFrame(x)).reset_index(drop=True)\n\t\t\tdf=df[z]\n\t\telse:\t\n\t\t\truns=input(\"Enter runs scored: \")\n\t\t\tballs=input(\"Enter number of balls defended: \")\n\t\t\tif not np.isnan(df.iloc[playerIndex,20]):\n\t\t\t\tfor i in range(1,18,2):\n\t\t\t\t\tdf.iloc[playerIndex,i],df.iloc[playerIndex,i+1]=df.iloc[playerIndex,i+2],df.iloc[playerIndex,i+3]\n\t\t\t\tdf.iloc[playerIndex,19]=runs\n\t\t\t\tdf.iloc[playerIndex,20]=balls\n\t\t\telse:\n\t\t\t\tfor i in range(1,20,2):\n\t\t\t\t\tif np.isnan(df.iloc[playerIndex,i]):\n\t\t\t\t\t\tdf.iloc[playerIndex,i]=runs\n\t\t\t\t\t\tdf.iloc[playerIndex,i+1]=balls\n\t\t\t\t\t\tbreak\n\tdf.to_csv(team+\" Batting.csv\",index=False);\n\t\ndef bowlerNewRecord():\n\n\tteam=raw_input(\"1. SRH\\n2. RCB\\n3. KKR\\n4. CSK\\n5. DD\\n6. KXIP\\n7. RR\\n8. MI\\nEnter your team (e.g. SRH): \")\n\tdf=pd.read_csv(team+\" Bowling.csv\");\n\twhile True:\n\t\tprint df['player']\n\t\tplayerIndex=input(\"\\nEnter index of player to enter score or -1 to exit or -2 to place a new player: \")\n\t\tif playerIndex==-1:\n\t\t\tbreak;\n\t\telif playerIndex==-2:\n\t\t\tz=df.columns\n\t\t\tx={}\n\t\t\tx['player']=[raw_input(\"Enter player name: \")]\n\t\t\tfor i in z[1:]:\n\t\t\t\tx[i]=[np.nan]\n\t\t\tdf=df.append(pd.DataFrame(x)).reset_index(drop=True)\n\t\t\tdf=df[z]\n\t\telse:\t\n\t\t\tovers=input(\"Enter number of Overs: \")\n\t\t\truns=input(\"Enter runs Given: \")\n\t\t\twickets=input(\"Enter number of Wickets picked: \")\n\t\t\tif not np.isnan(df.iloc[playerIndex,30]):\n\t\t\t\tfor i in range(1,27,3):\n\t\t\t\t\tdf.iloc[playerIndex,i],df.iloc[playerIndex,i+1],df.iloc[playerIndex,i+2]=df.iloc[playerIndex,i+3],df.iloc[playerIndex,i+4],df.iloc[playerIndex,i+5]\n\t\t\t\tdf.iloc[playerIndex,28]=runs\n\t\t\t\tdf.iloc[playerIndex,29]=overs\n\t\t\t\tdf.iloc[playerIndex,30]=wickets\n\t\t\telse:\n\t\t\t\tfor i in range(1,30,3):\n\t\t\t\t\tif np.isnan(df.iloc[playerIndex,i]):\n\t\t\t\t\t\tdf.iloc[playerIndex,i]=runs\n\t\t\t\t\t\tdf.iloc[playerIndex,i+1]=overs\n\t\t\t\t\t\tdf.iloc[playerIndex,i+2]=wickets\n\t\t\t\t\t\tbreak\n\tdf.to_csv(team+\" Bowling.csv\",index=False);\n\t\ndef main():\n\tprint '-----------------Innings 1 Batting------------------'\n\tbatsmanNewRecord();\n\tprint '-----------------Innings 2 Batting------------------'\n\tbatsmanNewRecord();\n\tprint '-----------------Innings 1 Bowling------------------'\n\tbowlerNewRecord();\n\tprint '-----------------Innings 2 Bowling------------------'\n\tbowlerNewRecord();\nif __name__ == \"__main__\": main()\n" }, { "alpha_fraction": 0.6699561476707458, "alphanum_fraction": 0.6973684430122375, "avg_line_length": 31.963855743408203, "blob_id": "aaa27ba86eb2314409413a44d582d3ed25e5c292", "content_id": "d2b897736c07d8566cf2d3a1621bd826467c34cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2736, "license_type": "no_license", "max_line_length": 106, "num_lines": 83, "path": "/Team11/Team11.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nbatsmanIndex=[]\nIPLteams=['SRH','CSK','RCB','RR','KKR','KXIP','DD','MI']\nteam11=[]\ndef getTeam():\n\n\tteam1=raw_input(\"1. SRH\\n2. RCB\\n3. KKR\\n4. CSK\\n5. DD\\n6. KXIP\\n7. RR\\n8. MI\\nEnter team1 (e.g. SRH): \")\n\tdfBat=pd.read_csv(team1+\" Batting.csv\")\n\tdfBall=pd.read_csv(team1+\" Bowling.csv\")\n\tteam2=raw_input(\"1. SRH\\n2. RCB\\n3. KKR\\n4. CSK\\n5. DD\\n6. KXIP\\n7. RR\\n8. MI\\nEnter team2 (e.g. SRH): \")\n\tdfBat=dfBat.append(pd.read_csv(team2+\" Batting.csv\")).reset_index(drop=True)\n\tdfBall=dfBall.append(pd.read_csv(team2+\" Bowling.csv\")).reset_index(drop=True)\n\t\n\twicket_keeper(team1,team2)\n\t#batsmen(team1,team2)\n\t#bowler(team1,team2)\n\t#allrounder(team1,team2)\n\t\n\tprint team11\n\t\ndef wicket_keeper(team1,team2):\n\n\tdfBat=pd.read_csv(team1+\" Batting.csv\")\n\tdfBat=dfBat.append(pd.read_csv(team2+\" Batting.csv\")).reset_index(drop=True)\n\t\n\tprint dfBat['player']\n\twk1=input('Enter index of wicket keeper1: ')\n\twk2=input('Enter index of wicket keeper2: ')\n\tif dfBat.loc[wk1,'performanceIndex'] > dfBat.loc[wk2,'performanceIndex']:\n\t\tteam11.append(dfBat.loc[wk1,'player'])\n\telse:\n\t\tteam11.append(dfBat.loc[wk2,'player'])\n\t\ndef batsmen(team1,team2):\n\n\tdfBat=pd.read_csv(team1+\" Batting.csv\")\n\tdfBat=dfBat.append(pd.read_csv(team2+\" Batting.csv\")).reset_index(drop=True)\n\t\n\tprint dfBat['player']\n\tx=raw_input(\"Enter batsman indices \")\n\tbatsmanIndex.extend(int(i) for i in x.split())\n\tdfBat = dfBat[dfBat.index.isin(batsmanIndex)]\n\tl = list(dfBat.performanceIndex.nlargest(5))\n\tdfBat = dfBat[dfBat['performanceIndex'].isin(l)]\n\tteam11.append(list(dfBat['player']))\t\n\t\t\ndef bowler(team1,team2):\n\n\tdfBall=pd.read_csv(team1+\" Bowling.csv\")\n\tdfBall=dfBall.append(pd.read_csv(team2+\" Bowling.csv\")).reset_index(drop=True)\n\t\n\tprint dfBall['player']\n\tx=raw_input(\"Enter bowler indices \")\n\tbatsmanIndex.extend(int(i) for i in x.split())\n\tdfBall = dfBall[dfBall.index.isin(batsmanIndex)]\n\tl = list(dfBall.performanceIndex.nlargest(3))\n\tdfBall = dfBall[dfBall['performanceIndex'].isin(l)]\n\tteam11.append(list(dfBall['player']))\n\t\n\t\ndef allrounder(team1,team2):\n\t\n\tdfBat=pd.read_csv(team1+\" Batting.csv\")\n\tdfBat=dfBat.append(pd.read_csv(team2+\" Batting.csv\")).reset_index(drop=True)\n\t\n\tdfBall=pd.read_csv(team1+\" Bowling.csv\")\n\tdfBall=dfBall.append(pd.read_csv(team2+\" Bowling.csv\")).reset_index(drop=True)\n\t\n\tdf = dfBall.merge(dfBat,how='inner',on=['player'])\n\tdf['ar']= df['performanceIndex_x'] + df['performanceIndex_y']\n\tprint df['player']\n\t\n\tx=raw_input(\"Enter Allrounder indices: \")\n\tbatsmanIndex.extend(int(i) for i in x.split())\n\tdf = df[df.index.isin(batsmanIndex)]\n\tl = list(df.ar.nlargest(3))\n\tdf = df[df['ar'].isin(l)]\n\tteam11.append(list(df['player']))\n\t\ndef main():\n\tgetTeam()\nif __name__ == \"__main__\": main()\n" }, { "alpha_fraction": 0.6023842692375183, "alphanum_fraction": 0.6206170916557312, "avg_line_length": 26.372549057006836, "blob_id": "b3cb2a342dac005d43ee127781dad9703405c8f1", "content_id": "698e43b28521aea1e30e26bce86d661f3bc4fb61", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1426, "license_type": "no_license", "max_line_length": 194, "num_lines": 51, "path": "/7.Individual_Performances/scores.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\n\nteams=[['Sunrisers Hyderabad'],['Royal Challengers Bangalore'],['Mumbai Indians'],['Rising Pune Supergiant'],['Gujarat Lions'],['Kolkata Knight Riders'],['Kings XI Punjab'],['Delhi Daredevils']]\nbatsmen=[]\nbowlers=[]\n \ndef creatBatsmenList(team_name):\n\tteamDF=pd.read_csv(team_name+'.csv')\n\tteamDF.set_index(\"player\", drop = False)\n\ti=0\n\tli=(teamDF.values.tolist())\n\twhile(i<teamDF.shape[0]):\n\t\tcleanedList = [x for x in li[i] if str(x) != 'nan']\n\t\tlength=len(cleanedList)\n\t\tif length>21:\n\t\t\tcleanedList=cleanedList[:1] + cleanedList[length-20:]\n\t\tif length>1:\n\t\t\tbatsmen.append(cleanedList)\n\t\ti=i+1\n\ndef creatBowlersList(team_name):\n\tteamDF=pd.read_csv(team_name+'.csv')\n\tteamDF.set_index(\"player\", drop = False)\n\ti=0\n\tli=(teamDF.values.tolist())\n\twhile(i<teamDF.shape[0]):\n\t\tcleanedList = [x for x in li[i] if str(x) != 'nan']\n\t\tlength=len(cleanedList)\n\t\tif length>31:\n\t\t\tcleanedList=cleanedList[:1] + cleanedList[length-30:]\n\t\tif length>1:\n\t\t\tbowlers.append(cleanedList)\n\t\ti=i+1\n\t\t\t\ndef main():\n for i in range(0,8):\n \tcreatBatsmenList(teams[i][0]+\" Bat\");\n \n for i in range(0,8):\n \tcreatBowlersList(teams[i][0]+\" Bowl\");\n \n for i in range(0,len(batsmen)):\n\t\tprint batsmen[i]\n\t\tprint\n\t\n\tfor i in range(0,len(bowlers)):\n\t\tprint bowlers[i]\n\t\tprint\n\nif __name__ == \"__main__\": main() \n" }, { "alpha_fraction": 0.594307541847229, "alphanum_fraction": 0.6191588640213013, "avg_line_length": 43.5523796081543, "blob_id": "192a1dfc5e9babe72af084d4fa425aa5d2381402", "content_id": "51ddfc768cb5341f26ff486c4a5d004e85cfd329", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4708, "license_type": "no_license", "max_line_length": 227, "num_lines": 105, "path": "/8.IPL2018/scores.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\n\nteams=[['Sunrisers Hyderabad'],['Royal Challengers Bangalore'],['Mumbai Indians'],['Rising Pune Supergiant'],['Gujarat Lions'],['Kolkata Knight Riders'],['Kings XI Punjab'],['Delhi Daredevils']]\nbatsmen=[]\nbowlers=[]\ndef creatBatsmenList(team_name):\n\tteamDF=pd.read_csv(team_name+'.csv')\n\tteamDF.set_index(\"player\", drop = False)\n\ti=0\n\tli=(teamDF.values.tolist())\n\twhile(i<teamDF.shape[0]):\n\t\tcleanedList = [x for x in li[i] if str(x) != 'nan']\n\t\tlength=len(cleanedList)\n\t\tif length>21:\n\t\t\tcleanedList=cleanedList[:1] + cleanedList[length-20:]\n\t\tif length>1:\n\t\t\tbatsmen.append(cleanedList)\n\t\ti=i+1\n\ndef creatBowlersList(team_name):\n\tteamDF=pd.read_csv(team_name+'.csv')\n\tteamDF.set_index(\"player\", drop = False)\n\ti=0\n\tli=(teamDF.values.tolist())\n\twhile(i<teamDF.shape[0]):\n\t\tcleanedList = [x for x in li[i] if str(x) != 'nan']\n\t\tlength=len(cleanedList)\n\t\tif length>31:\n\t\t\tcleanedList=cleanedList[:1] + cleanedList[length-30:]\n\t\tif length>1:\n\t\t\tbowlers.append(cleanedList)\n\t\ti=i+1\n\ndef ipl2018TeamBat(team):\n\tteam=[[i] for i in team]\n\tfor i in range(0,len(team)):\n\t\tfor j in range(0,len(batsmen)):\n\t\t\tif team[i][0] in batsmen[j]:\n\t\t\t\tteam[i].extend(batsmen[j][1:])\n\treturn team\ndef ipl2018TeamBall(team):\n\tteam=[[i] for i in team]\n\tfor i in range(0,len(team)):\n\t\tfor j in range(0,len(bowlers)):\n\t\t\tif team[i][0] in bowlers[j]:\n\t\t\t\tteam[i].extend(bowlers[j][1:])\n\treturn team\t\n\ndef createNewBatCSV(team,teamName):\n\tlable=['player']\n for i in range(1,11):\n \tlable.append(\"m\"+str(i)+\"r\")\n \tlable.append(\"m\"+str(i)+\"b\")\n df = pd.DataFrame.from_records(team, columns=lable)\n df.to_csv(teamName+' Batting.csv',index=False)\n \ndef createNewBallCSV(team,teamName):\n\tlable=['player']\n for i in range(1,11):\n \tlable.append(\"m\"+str(i)+\"r\")\n \tlable.append(\"m\"+str(i)+\"o\")\n \tlable.append(\"m\"+str(i)+\"w\")\n df = pd.DataFrame.from_records(team, columns=lable)\n df.to_csv(teamName+' Bowling.csv',index=False)\n \ndef main():\n for i in range(0,8):\n \tcreatBatsmenList(teams[i][0]+\" Bat\");\n \n for i in range(0,8):\n \tcreatBowlersList(teams[i][0]+\" Bowl\");\n \n for i in range(0,len(bowlers)):\n \tfor j in range(2,len(bowlers[i]),3):\n \t\tbowlers[i][j]=int(bowlers[i][j]/6)+float(bowlers[i][j]%6)/10\n \t\n SRH=['DA Warner','B Kumar','S Dhawan','Shakib Al Hasan','KS Williamson','MK Pandey','CR Brathwaite','YK Pathan','WP Saha','Rashid Khan','DJ Hooda','S Kaul','T Natarajan','Mohammad Nabi','Basil Thampi','Sandeep Sharma','Sachin Baby','CJ Jordan','B Stanlake','Bipul Sharma']\n\tRCB=['V Kohli','AB de Villiers','BB McCullum','CR Woakes','C de Grandhomme','UT Yadav','YS Chahal','M Vohra','A Choudhary','Mandeep Singh','Washington Sundar','P Negi','Mohammed Siraj','PA Patel','TG Southee','CJ Anderson'] \n\tCSK=['MS Dhoni','SK Raina','RA Jadeja','F du Plessis','Harbhajan Singh','DM Bravo','SR Watson','AT Rayudu','DL Chahar','M Vijay','SW Billings','M Wood','Imran Tahir','KV Sharma','SN Thakur',]\n\tDD=['SS Iyer','CH Morris','RR Pant','GJ Maxwell','G Gambhir','JJ Roy','C Munro','V Shankar','DT Christian','NV Ojha','Mohammed Shami','A Mishra','R Tewatia','AR Patel','Avesh Khan','S Nadeem','TA Boult']\n\tKKR=['KD Karthik','CA Lynn','N Rana','PP Chawla','RV Uthappa','SP Narine','MG Johnson','Kuldeep Yadav','R Vinay Kumar']\n\tRR=['SPD Smith','RA Tripathi','AM Rahane','STR Binny','BA Stokes','SV Samson','JC Buttler','DS Kulkarni','JD Unadkat','Ankit Sharma','Anureet Singh']\n\tMI=['RG Sharma','JJ Bumrah','HH Pandya','KA Pollard','Mustafizur Rahman','SA Yadav','KH Pandya','Ishan Kishan','RD Chahar','SS Tiwary','BCJ Cutting','PJ Sangwan','AP Tare','AF Milne','MJ McClenaghan']\n\tKXIP=['AR Patel','Yuvraj Singh','KK Nair','CH Gayle','DA Miller','AJ Finch','MP Stoinis','MA Agarwal','AS Rajpoot','MK Tiwary','MM Sharma','BB Sran','AJ Tye','AD Nath','']\n \t\n createNewBatCSV(ipl2018TeamBat(SRH),\"SRH\")\n createNewBallCSV(ipl2018TeamBall(SRH),\"SRH\")\n createNewBatCSV(ipl2018TeamBat(RR),\"RR\")\n createNewBallCSV(ipl2018TeamBall(RR),\"RR\")\n createNewBatCSV(ipl2018TeamBat(RCB),\"RCB\")\n createNewBallCSV(ipl2018TeamBall(RCB),\"RCB\")\n createNewBatCSV(ipl2018TeamBat(CSK),\"CSK\")\n createNewBallCSV(ipl2018TeamBall(CSK),\"CSK\")\n createNewBatCSV(ipl2018TeamBat(KXIP),\"KXIP\")\n createNewBallCSV(ipl2018TeamBall(KXIP),\"KXIP\")\n createNewBatCSV(ipl2018TeamBat(KKR),\"KKR\")\n createNewBallCSV(ipl2018TeamBall(KKR),\"KKR\")\n createNewBatCSV(ipl2018TeamBat(DD),\"DD\")\n createNewBallCSV(ipl2018TeamBall(DD),\"DD\")\n createNewBatCSV(ipl2018TeamBat(MI),\"MI\")\n createNewBallCSV(ipl2018TeamBall(MI),\"MI\")\n \t\n\t\nif __name__ == \"__main__\": main() \n" }, { "alpha_fraction": 0.4207066595554352, "alphanum_fraction": 0.4404272735118866, "avg_line_length": 38.56666564941406, "blob_id": "8fa56fbbd780304502b48a434151d91e3dd4344a", "content_id": "1ed5dfd850d54550083c29cf1f894e429c833007", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1217, "license_type": "no_license", "max_line_length": 194, "num_lines": 30, "path": "/3.Teams/Teams.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as n\nimport pandas as pd\ndf1=pd.read_csv('IPL_2017.csv')\n\nteams=[['Sunrisers Hyderabad'],['Royal Challengers Bangalore'],['Mumbai Indians'],['Rising Pune Supergiant'],['Gujarat Lions'],['Kolkata Knight Riders'],['Kings XI Punjab'],['Delhi Daredevils']]\n\ndef generate_team_players():\n for i in range(0,df1.shape[0]):\n for j in range(0,8):\n if(df1['batting_team'][i] == teams[j][0]):\n if(df1['batsman'][i] not in teams[j]):\n teams[j].append(df1['batsman'][i])\n for j in range(0,8):\n if(df1['bowling_team'][i] == teams[j][0]):\n if(df1['bowler'][i] not in teams[j]):\n teams[j].append(df1['bowler'][i])\n\ndef show_teams():\n for i in range(0,8):\n \tprint teams[i]\n \tprint \n #print '************',teams[i][0],'*************'\n #for j in range(1,len(teams[i])):\n # print teams[i][j]\n\ndef main():\n generate_team_players()\n show_teams()\n\nif __name__ == \"__main__\": main() \n" }, { "alpha_fraction": 0.6708333492279053, "alphanum_fraction": 0.7208333611488342, "avg_line_length": 47, "blob_id": "8e16cd9c437618c4335c4964585f47129c86af80", "content_id": "941a191f13f91dbe0b986934fb375a9835be03cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 240, "license_type": "no_license", "max_line_length": 105, "num_lines": 5, "path": "/2.Modify_data_as_per_Requirement/usefulData.py~", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import pandas as pd\ndf1=pd.read_csv('IPL_2017.csv')\ncolumns=['non_striker','is_super_over','bye_runs','legbye_runs','penalty_runs','extra_runs','total_runs']\ndf1.drop(columns, inplace=True,axis=1)\ndf1.to_csv('IPL_2017_Use.csv',index=False)\n" }, { "alpha_fraction": 0.4024452269077301, "alphanum_fraction": 0.42180335521698, "avg_line_length": 39.27083206176758, "blob_id": "f70f8af826855ddba9ca2ebd6ae7168def864e23", "content_id": "24943b999f3ddb02de5dba491e77663584371610", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1963, "license_type": "no_license", "max_line_length": 194, "num_lines": 48, "path": "/4.Team_Individual_Files/teams.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\ndf1=pd.read_csv('IPL_2017.csv')\n\nteams=[['Sunrisers Hyderabad'],['Royal Challengers Bangalore'],['Mumbai Indians'],['Rising Pune Supergiant'],['Gujarat Lions'],['Kolkata Knight Riders'],['Kings XI Punjab'],['Delhi Daredevils']]\n\ndef generate_team_players():\n\n for i in range(0,df1.shape[0]):\n for j in range(0,8):\n if(df1['batting_team'][i] == teams[j][0]):\n if(df1['batsman'][i] not in teams[j]):\n teams[j].append(df1['batsman'][i])\n for j in range(0,8):\n if(df1['bowling_team'][i] == teams[j][0]):\n if(df1['bowler'][i] not in teams[j]):\n teams[j].append(df1['bowler'][i]) \n\ndef show_teams():\n\n for i in range(0,8):\n print '************',teams[i][0],'*************'\n for j in range(1,len(teams[i])):\n print teams[i][j]\n \ndef create_files():\n \n for i in range(0,8):\n single_team=teams[i][1:]\n single_team=[[x] for x in single_team] #converting 1D list into 2D\n for j in range(0,len(single_team)):\n for k in range(0,16):\n single_team[j].append('\\0')\n \t#print single_team\n #print\n labels = ['Player']\n for l in range(1,17):\n \tlabels.append('m'+str(l))\n #print labels\n df = pd.DataFrame.from_records(single_team, columns=labels)\n #print df\n df.to_csv(teams[i][0]+'.csv',index=False)\n\ndef main():\n generate_team_players()\n create_files()\n\nif __name__ == \"__main__\": main() \n" }, { "alpha_fraction": 0.554305911064148, "alphanum_fraction": 0.6018637418746948, "avg_line_length": 30.755102157592773, "blob_id": "ec98ce202ea87299a185451f5ec7f30c6c1df27c", "content_id": "898b3714f2001a38dc8ca0825f0fc0e5f027828f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3112, "license_type": "no_license", "max_line_length": 148, "num_lines": 98, "path": "/Team11/playerStats.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport math\nfrom StringIO import StringIO\nminimum_runs= 25\nminimum_wickets= [1,2,3]\nIPLteams=['SRH','CSK','RCB','RR','KKR','KXIP','DD','MI']\n\ndef batting_analysis(teamFile):\n\tdf = pd.read_csv(teamFile)\n\tcolumns =df.columns[1:]\n\tdf2 = df.copy()\n\tdf.fillna(-1,inplace=True)\n\tcsvstr =\"player,match,balls,runs\\n\"\n\tfor i,row in df.iterrows():\n\t\tfor k in range(1,11):\n\t\t\tcsvstr+=row['player']+\",\"+str(k)+\",\"+str(df['m'+str(k)+\"b\"].iloc[i])+\",\"+str(df['m'+str(k)+\"r\"].iloc[i])+\"\\n\"\n\tcsvstr = csvstr.replace(\",-1.0\",\",\")\n\tdf = pd.read_csv(StringIO(csvstr))\n\tdf['cf']=0\n\tdf2['cf']=0\n\tdf2['avg']=0\n\tdf2['srate']=0\n\tdf['srate'] = df['runs']/df['balls'].astype(float)\n\tfor i in df.player.unique():\n\t\td = df[df['player']==i]\n\t\tl = d.runs.count()\n\t\tif l==0:\n\t\t\tcontinue\n\t\tcf = 0\n\t\tk = list(d.runs.dropna())\n\t\tb = list(d.balls.dropna())\n\t\trunsum,ballssum = sum(k),sum(b)\n\t\tlist1 = k[0:int(l/2)]\n\t\tlist2 = k[int(l/2):]\n\t\tfor j in list1:\n\t\t\tcf+=1 if j >= minimum_runs and not np.isnan(j) else 0\n\t\tfor j in list2:\n\t\t\tcf+=2 if j>= minimum_runs and not np.isnan(j) else 0\n\t\tdf.loc[df['player']==i,'cf']=math.ceil(cf/float(l)*100)/100\n\t\tdf2.loc[df2['player']==i,'cf']= math.ceil(cf/float(l)*100)/100\n\t\tdf2.loc[df2['player']==i,'avg']= math.ceil(runsum/float(l)*100)/100 \n\t\tdf2.loc[df2['player']==i,'srate']= math.ceil((runsum/float(ballssum))*100*100)/100\n\tdf2['performanceIndex']=df2['cf']*0.4 + df2['avg']/10*0.4 + df2['srate']/100*0.2\n\tdf2.to_csv(teamFile,index=False)\ndef bowling_analysis(teamFile):\n\tdf = pd.read_csv(teamFile)\n\tcolumns =df.columns[1:]\n\tdf2 = df.copy()\n\tdf.fillna(-1,inplace=True)\n\tcsvstr =\"player,match,runs,overs,wickets\\n\"\n\tfor i,row in df.iterrows():\n\t\tfor k in range(1,11):\n\t\t\tcsvstr+=row['player']+\",\"+str(k)+\",\"+str(df['m'+str(k)+\"r\"].iloc[i])+\",\"+str(df['m'+str(k)+\"o\"].iloc[i])+\",\"+str(df['m'+str(k)+\"w\"].iloc[i])+\"\\n\"\n\tcsvstr = csvstr.replace(\",-1.0\",\",\")\n\tdf = pd.read_csv(StringIO(csvstr))\n\tdf['cf']=0\n\tdf2['cf']=0\n\tfor i in df.player.unique():\n\t\td = df[df['player']==i]\n\t\tl = d.overs.count()\n\t\tif l==0:\n\t\t\tcontinue\n\t\tcf = 0\n\t\tk = list(d.wickets.dropna())\n\t\to = list(d.overs.dropna())\n\t\tr = list(d.runs.dropna())\n\t\trunsum,overssumm = sum(r),sum(o)\n\t\tlist1 = k[0:int(l/2)]\n\t\tlist2 = k[int(l/2):]\n\t\tfor j in list1:\n\t\t\tif not np.isnan(j):\n\t\t\t\tif int(j) == minimum_wickets[0]:\n\t\t\t\t\tcf+=1\n\t\t\t\telif int(j) == minimum_wickets[1]:\n\t\t\t\t\tcf+=2\n\t\t\t\telif int(j) >= minimum_wickets[2]:\n\t\t\t\t\tcf+=3\n\t\tfor j in list2:\n\t\t\tif not np.isnan(j):\n\t\t\t\tif int(j) == minimum_wickets[0]:\n\t\t\t\t\tcf+=2\n\t\t\t\telif int(j) == minimum_wickets[1]:\n\t\t\t\t\tcf+=4\n\t\t\t\telif int(j) >= minimum_wickets[2]:\n\t\t\t\t\tcf+=6\t\n\n\t\tdf.loc[df['player']==i,'cf']=math.ceil(cf/float(l)*100)/100\n\t\tdf2.loc[df2['player']==i,'cf']= math.ceil(cf/float(l)*100)/100\n\t\tdf2.loc[df2['player']==i,'econamy']= math.ceil(runsum/float(overssumm)*100)/100\n\tdf2['performanceIndex']=df2['cf']*0.8 + (1-df2['econamy']/10)*0.2\n\tdf2.to_csv(teamFile,index=False)\n\ndef main():\n\tfor i in range(0,len(IPLteams)):\n\t\tbatting_analysis(IPLteams[i]+' Batting.csv')\n\t\tbowling_analysis(IPLteams[i]+' Bowling.csv')\nif __name__ == \"__main__\": main()\n" }, { "alpha_fraction": 0.48657718300819397, "alphanum_fraction": 0.5016778707504272, "avg_line_length": 44.46154022216797, "blob_id": "e23fc7154045c3defde5d20bc2d2e6b684974b95", "content_id": "7c8c37d04430b2061b8227bb0666272985b7f9c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3576, "license_type": "no_license", "max_line_length": 194, "num_lines": 78, "path": "/6.Bowling_Status/scores.py", "repo_name": "EswarAleti/Dream11_Prediction_Project", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nipl=pd.read_csv('IPL_2017.csv')\n\nteams=[['Sunrisers Hyderabad'],['Royal Challengers Bangalore'],['Mumbai Indians'],['Rising Pune Supergiant'],['Gujarat Lions'],['Kolkata Knight Riders'],['Kings XI Punjab'],['Delhi Daredevils']]\n\ndef generate_team_players():\n\n for i in range(0,ipl.shape[0]):\n for j in range(0,8):\n if(ipl['batting_team'][i] == teams[j][0]):\n if(ipl['batsman'][i] not in teams[j]):\n teams[j].append(ipl['batsman'][i])\n for j in range(0,8):\n if(ipl['bowling_team'][i] == teams[j][0]):\n if(ipl['bowler'][i] not in teams[j]):\n teams[j].append(ipl['bowler'][i]) \n\ndef show_teams():\n\n for i in range(0,8):\n print '************',teams[i][0],'*************'\n for j in range(1,len(teams[i])):\n print teams[i][j]\n \ndef create_files():\n \n for i in range(0,8):\n single_team=teams[i][1:]\n single_team=[[x] for x in single_team] #converting 1D list into 2D\n for j in range(0,len(single_team)):\n for k in range(0,54):\n single_team[j].append('\\0')\n labels = ['player']\n for match_no in range(1,19):\n \tlabels.append('m'+str(match_no)+'r')\n \tlabels.append('m'+str(match_no)+'o')\n \tlabels.append('m'+str(match_no)+'w')\n df = pd.DataFrame.from_records(single_team, columns=labels)\n df.to_csv(teams[i][0]+'.csv',index=False)\ndef scores(team_name):\n\tteamDF=pd.read_csv(team_name+'.csv')\n\tteamDF.set_index(\"player\", inplace=True)\n\tmatch=1\n\ti=0\n\tdismissals=['caught','bowled','hit wicket','lbw','stumped']\n\twhile(i<ipl.shape[0]):\n\t\tif(ipl['bowling_team'][i]==team_name):\n\t\t\tif not np.isnan(teamDF.loc[ipl['bowler'][i],'m'+str(match)+'o']):\n\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'r']=int(teamDF.loc[ipl['bowler'][i],'m'+str(match)+'r'])+int(ipl['wide_runs'][i])+int(ipl['noball_runs'][i])+int(ipl['batsman_runs'][i])\n\t\t\t\tif int(ipl['wide_runs'][i])==0 and int(ipl['noball_runs'][i])==0:\n\t\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'o']=int(teamDF.loc[ipl['bowler'][i],'m'+str(match)+'o'])+1\n\t\t\telse:\n\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'r']=int(ipl['wide_runs'][i])+int(ipl['noball_runs'][i])+int(ipl['batsman_runs'][i])\n\t\t\t\tif int(ipl['wide_runs'][i])==0 and int(ipl['noball_runs'][i])==0:\n\t\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'o']=1\n\t\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'w']=0\n\t\t\t\n\t\t\tif ipl.loc[i,'dismissal_kind'] in dismissals:\n\t\t\t\tif not np.isnan(teamDF.loc[ipl['bowler'][i],'m'+str(match)+'w']):\n\t\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'w']=int(teamDF.loc[ipl['bowler'][i],'m'+str(match)+'w'])+1\n\t\t\t\telse:\n\t\t\t\t\tteamDF.loc[ipl['bowler'][i],'m'+str(match)+'w']=1\n\t\t\t\t\n\t\t\tif i<ipl.shape[0]-1 and ipl['batting_team'][i]!=ipl['batting_team'][i+1]:\n\t\t\t\tmatch=match+1\n\t\t\telif i<ipl.shape[0]-1 and ipl['match_id'][i]!=ipl['match_id'][i+1]:\n\t\t\t\tmatch=match+1\n\t\ti=i+1\n\tteamDF.to_csv(team_name+' Bowl.csv',index=True)\n\ndef main():\n generate_team_players()\n create_files()\n for i in range(0,8):\n \tscores(teams[i][0]);\n\nif __name__ == \"__main__\": main() \n" } ]
9
hajam09/FrogAndToadWith0And1
https://github.com/hajam09/FrogAndToadWith0And1
fcc06790f8bdd43cd3b9f3ceb7c076d7e90bbb0f
cd6efc61062dccf9b1e5c98df5f7eb775736fb3c
deadc9d324ba67a2bb96a18f2698168b1afbe243
refs/heads/master
2020-04-03T09:51:24.129755
2018-10-29T08:36:36
2018-10-29T08:36:36
155,178,099
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.44557079672813416, "alphanum_fraction": 0.510293185710907, "avg_line_length": 36.62650680541992, "blob_id": "36d425df4aced55ed6791876ba4f069f5cf90d0b", "content_id": "f510b2cefc8ae67d65d04b0514071cb749124da5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6412, "license_type": "no_license", "max_line_length": 95, "num_lines": 166, "path": "/MA Hajamohideen Assessment 4.py", "repo_name": "hajam09/FrogAndToadWith0And1", "src_encoding": "UTF-8", "text": "#(5,4)(3,5)(2,3)(4,2)(6,4)(7,6)(5,7)(3,5)(1,3)(2,1)(4,2)(6,4)(5,6)(3,5)(4,3)\r\n#(3,4)(5,3)(6,5)(4,6)(2,4)(1,2)(3,1)(5,3)(7,5)(6,7)(4,6)(2,4)(3,2)(5,3)(4,5)\r\nimport time\r\n\r\ndef play():\r\n print(\"Game Rules: (READ VERY CAREFULLY!)\")\r\n time.sleep(1)\r\n print(\"You must move all the frogs (F1, F2 and F3) to right side\")\r\n print(\"and all the toads (T1, T2 and T3) to the left side\")\r\n time.sleep(2)\r\n print(\"Enter a 'From' number to choose your position of the frog/toad\")\r\n time.sleep(1)\r\n print(\"enter a 'To' number to send your frod/toad\")\r\n time.sleep(1)\r\n print(\"Remember, the frogs can only move right and toads can only move left\")\r\n time.sleep(1)\r\n print(\"You can move the frogs/toads to one place that is an empty space\")\r\n print(\" or jump over another frog/toad into an empty space.\")\r\n time.sleep(1)\r\n print(\"You WIN if you move the frogs and toads to opposite sides\")\r\n time.sleep(1)\r\n print(\"If you want to reset the game in the middle of the gameplay\")\r\n print(\"simply enter the position of an empty space ' ' on (To) input\")\r\n \r\n def left1():\r\n temp = frogList[userIndex-1]\r\n frogList[userIndex-1] = frogList[userIndex]\r\n frogList[userIndex] = temp\r\n print(frogList)\r\n return(frogList)\r\n \r\n def right1():\r\n temp1 = frogList[userIndex+1]\r\n frogList[userIndex+1] = frogList[userIndex]\r\n frogList[userIndex] = temp1\r\n print(frogList)\r\n return(frogList)\r\n\r\n def left2():\r\n temp2 = frogList[userIndex-2]\r\n frogList[userIndex-2] = frogList[userIndex]\r\n frogList[userIndex] = temp2\r\n print(frogList)\r\n return(frogList)\r\n\r\n def right2():\r\n temp3 = frogList[userIndex+2]\r\n frogList[userIndex+2] = frogList[userIndex]\r\n frogList[userIndex] = temp3\r\n print(frogList)\r\n return(frogList)\r\n\r\n def invalid():\r\n print(\"Invalid Move, Choose Again\")\r\n\r\n def won():\r\n print(\"You've Won!\")\r\n\r\n\r\n print(\"LET'S PLAY!\")\r\n #index 0 1 2 3 4 5 6\r\n frogList = ['F1', 'F2', 'F3', ' ', 'T1', 'T2', 'T3']\r\n\r\n print(frogList)\r\n userIndex1 = int(input(\"From: \"))\r\n userIndex = userIndex1-1\r\n\r\n while userIndex == ' ' or frogList[userIndex] == ' ':\r\n invalid()\r\n userIndex1 = int(input(\"From: \"))\r\n userIndex = userIndex1-1\r\n\r\n while frogList[userIndex] != ' ':\r\n userMove1 = int(input(\"To: \"))\r\n userMove = userMove1-1\r\n difference = userIndex - userMove\r\n if difference == 1 and frogList[userIndex-1] == ' ' and frogList[userIndex] == 'T1':\r\n left1()\r\n elif difference == 1 and frogList[userIndex-1] == ' ' and frogList[userIndex] == 'T2':\r\n left1()\r\n elif difference == 1 and frogList[userIndex-1] == ' ' and frogList[userIndex] == 'T3':\r\n left1()\r\n elif difference == 2 and frogList[userIndex-2] == ' ' and frogList[userIndex] == 'T1':\r\n left2()\r\n elif difference == 2 and frogList[userIndex-2] == ' ' and frogList[userIndex] == 'T2':\r\n left2()\r\n elif difference == 2 and frogList[userIndex-2] == ' ' and frogList[userIndex] == 'T3':\r\n left2()\r\n elif difference == -1 and frogList[userIndex+1] == ' ' and frogList[userIndex] == 'F1':\r\n right1()\r\n elif difference == -1 and frogList[userIndex+1] == ' ' and frogList[userIndex] == 'F2':\r\n right1()\r\n elif difference == -1 and frogList[userIndex+1] == ' ' and frogList[userIndex] == 'F3':\r\n right1()\r\n elif difference == -2 and frogList[userIndex+2] == ' ' and frogList[userIndex] == 'F1':\r\n right2()\r\n elif difference == -2 and frogList[userIndex+2] == ' ' and frogList[userIndex] == 'F2':\r\n right2()\r\n elif difference == -2 and frogList[userIndex+2] == ' ' and frogList[userIndex] == 'F3':\r\n right2()\r\n else:\r\n invalid()\r\n\r\n if frogList == ['T1', 'T2', 'T3', ' ', 'F1', 'F2', 'F3']:\r\n print(\"YOU'VE WON!\")\r\n break\r\n \r\n userIndex1 = int(input(\"From: \"))\r\n userIndex = userIndex1-1\r\n\r\ndef demonstration():\r\n print(\"Step 1. [F1, F2, F3, ' ', T1, T2, T3]\")\r\n print(\"Step 2. [F1, F2, F3, T1, ' ', T2, T3]\")\r\n print(\"Step 3. [F1, F2, ' ', T1, F3, T2, T3]\")\r\n print(\"Step 4. [F1, ' ', F2, T1, F3, T2, T3]\")\r\n print(\"Step 5. [F1, T1, F2, ' ', F3, T2, T3]\")\r\n print(\"Step 6. [F1, T1, F2, T2, F3, ' ', T3]\")\r\n print(\"Step 7. [F1, T1, F2, T2, F3, T3, ' ']\")\r\n print(\"Step 8. [F1, T1, F2, T2, ' ', T3, F3]\")\r\n print(\"Step 9. [F1, T1, ' ', T2, F2, T3, F3]\")\r\n print(\"Step 10. [' ', T1, F1, T2, F2, T3, F3]\")\r\n print(\"Step 11. [T1, ' ', F1, T2, F2, T3, F3]\")\r\n print(\"Step 12. [T1, T2, F1, ' ', F2, T3, F3]\")\r\n print(\"Step 13. [T1, T2, F1, T3, F2, ' ', F3]\")\r\n print(\"Step 14. [T1, T2, F1, T3, ' ', F2, F3]\")\r\n print(\"Step 15. [T1, T2, ' ', T3, F1, F2, F3]\")\r\n print(\"Step 16. [T1, T2, T3, ' ', F1, F2, F3]\")\r\n print(\"\")\r\n print(\"OR\")\r\n time.sleep(2)\r\n print(\"\")\r\n print(\"Step 1. [F1, F2, F3, ' ', T1, T2, T3]\")\r\n print(\"Step 2. [F1, F2, ' ', F3, T1, T2, T3]\")\r\n print(\"Step 3. [F1, F2, T1, F3, ' ', T2, T3]\")\r\n print(\"Step 4. [F1, F2, T1, F3, T2, ' ', T3]\")\r\n print(\"Step 5. [F1, F2, T1, ' ', T2, F3, T3]\")\r\n print(\"Step 6. [F1, ' ', T1, F2, T2, F3, T3]\")\r\n print(\"Step 7. [' ', F1, T1, F2, T2, F3, T3]\")\r\n print(\"Step 8. [T1, F1, ' ', F2, T2, F3, T3]\")\r\n print(\"Step 9. [T1, F1, T2, F2, ' ', 'F3, T3]\")\r\n print(\"Step 10. [T1, F1, T2, F2, T3, F3, ' ']\")\r\n print(\"Step 11. [T1, F1, T2, F2, T3, ' ', F3]\")\r\n print(\"Step 12. [T1, F1, T2, ' ', T3, F2, F3]\")\r\n print(\"Step 13. [T1, ' ', T2, F1, T3, F2, F3]\")\r\n print(\"Step 14. [T1, T2, ' ', F1, T3, F2, F3]\")\r\n print(\"Step 15. [T1, T2, T3, F1, ' ', F2, F3]\")\r\n print(\"Step 16. [T1, T2, T3, ' ', F1, F2, F3]\")\r\n main()\r\n \r\ndef main():\r\n global entranceInput\r\n print(\" 1. Play\")\r\n print(\" 2. Demonstration\")\r\n print(\" 3. Exit\")\r\n entranceInput = int(input(\"Enter your Option: \")) \r\n if entranceInput ==1:\r\n play()\r\n main()\r\n elif entranceInput ==2:\r\n demonstration()\r\n elif entranceInput ==3:\r\n print(\"Leaving\")\r\n else:\r\n main()\r\n\r\nmain()\r\n" } ]
1
MichaelSluydts/QueueManager-client
https://github.com/MichaelSluydts/QueueManager-client
a4b313f0054b1f4b4788f75baff1100ecbc84963
8ff38aeb574de8bca0718f91186942ce630070d7
c747e79899a15d4b3baf20849f81ae7777392c04
refs/heads/master
2023-05-02T17:48:45.341268
2021-02-01T12:09:56
2021-02-01T12:09:56
334,930,063
1
1
MIT
2021-02-01T11:38:54
2021-05-03T13:56:54
2021-05-10T14:41:46
null
[ { "alpha_fraction": 0.5531561374664307, "alphanum_fraction": 0.6295680999755859, "avg_line_length": 12.681818008422852, "blob_id": "d47f2df8ff1f2fd589b54080d710b8a539708333", "content_id": "d38a220ff187466b03efbc9dca8d9d2db2acfa7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 602, "license_type": "permissive", "max_line_length": 57, "num_lines": 44, "path": "/HTtools/HTphasesubmit", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#Michael Sluydts 14/12/2011\nwalltime=$[$1-1]':59:00'\n\nif [ $1 -gt 72 ]; then\n queue='special'\nelif [ $1 -gt 12 ]; then\n\tqueue='long'\nelif [ $1 -eq 1 ]; then\n\tqueue='debug'\nelse\n\tqueue='short'\nfi\n#jobs='jobs'\nif [ $VSC_INSTITUTE_CLUSTER == 'muk' ]; then\n queue='batch'\n# jobs='scripts'\nfi\n\ndir=`pwd`\ncat > submit.sh << !\n#!/bin/bash\n#PBS -N $dir\n\n#PBS -m ea\n\n#PBS -q $queue\n\n#PBS -l walltime=$walltime\n\n#PBS -l nodes=1:ppn=1\n\ncd $dir\n\nmodule load pymatgen/2017.10.16-intel-2017b-Python-2.7.14\nmodule load ase/3.8.1\nmodule load HighThroughput/devel\n\nHTphasedb $3 $4\n\nexit 0\n!\n\nqsub submit.sh\n" }, { "alpha_fraction": 0.563341498374939, "alphanum_fraction": 0.5746437311172485, "avg_line_length": 40.194332122802734, "blob_id": "65f1d19a297ff721b322d1de1a98ba0e891e318d", "content_id": "9b06711a86dd81ac69e90be1a87a09b1c6cb1fc9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10175, "license_type": "permissive", "max_line_length": 149, "num_lines": 247, "path": "/errors/VASPfixes.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os\nimport numpy as np\nfrom HighThroughput.manage.calculation import getResults,updateResults,getSettings\nimport numpy as np\nfrom HighThroughput.modules.VASP import cont\ndef test(calc):\n #Dummy\n print('This is a bugfix.')\n return True\n\ndef rmWAVECAR(calc):\n #2: CHGCAR more reliable so clear WAVECAR\n if os.path.isfile('WAVECAR'):\n open('WAVECAR', 'w').close() \n \n if os.path.isfile('WAVECAR.gz'):\n open('WAVECAR.gz', 'w').close()\n \n return True\n\ndef rmCHGCAR(calc):\n #7: In case of corrupted density\n if os.path.isfile('CHGCAR'):\n open('CHGCAR', 'w').close()\n \n if os.path.isfile('CHGCAR.gz'):\n open('CHGCAR.gz', 'w').close()\n \n return True\n\ndef algoSwitch(calc):\n #3: Switch between All/Damped Normal/Fast\n # Can be tuned by bandgap ismear further\n algos = ['F','N','D','A']\n if calc['settings']['INCAR'].get('ALGO'):\n current = calc['settings']['INCAR']['ALGO'][0]\n else:\n current = 'N'\n\n try:\n currentindex = algos.index(current)\n calc['settings']['INCAR']['ALGO'] = algos[(currentindex + 1)%4]\n print('Algorithm switched to ' + calc['settings']['INCAR']['ALGO'])\n except ValueError:\n print('Unknown algorithm detected. Skipping algorithm switch.')\n\n \"\"\"old\n if 'ALGO' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['ALGO'] = 'Fast'\n elif calc['settings']['INCAR']['ALGO'][0] == 'F':\n calc['settings']['INCAR']['ALGO'] = 'Normal'\n elif calc['settings']['INCAR']['ALGO'][0] == 'D':\n calc['settings']['INCAR']['ALGO'] = 'A'\n elif calc['settings']['INCAR']['ALGO'][0] == 'A':\n calc['settings']['INCAR']['ALGO'] = 'D'\"\"\"\n return True\n\ndef halveStep(calc):\n #4: bit much for multiple times, maybe should split\n if 'TIME' in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['TIME'] = np.ceil(float(calc['settings']['INCAR']['TIME'])*100.0/2.0)/100.0\n elif 'POTIM' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['POTIM'] = np.ceil(float(calc['settings']['INCAR']['POTIM'])*100.0/2.0)/100.0\n return True\n\n\ndef doubleStep(calc):\n #5: bit much for multiple times\n if 'TIME' in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['TIME'] = float(calc['settings']['INCAR']['TIME'])*2.0\n elif 'POTIM' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['POTIM'] = float(calc['settings']['INCAR']['POTIM'])*2.0\n return True\n\ndef preconv(calc):\n #8: Preconverge calculation with another algorithm.\n preconvAlgo = {'A' : 'N', 'D' : 'N'}\n calc['settings']['INCAR']['ALGOb'] = calc['settings']['INCAR']['ALGO']\n calc['settings']['INCAR']['ALGO'] = preconvAlgo[calc['settings']['INCAR']['ALGO'][0]]\n calc['settings']['INCAR']['NELMb'] = calc['settings']['INCAR']['NELM'] \n calc['settings']['INCAR']['NELM'] = '8'\n return True\n\ndef restorePreconv(calc):\n #9: Restore the original settings before preconvergence.\n if os.path.isfile('CHGCAR.prec'):\n if os.stat('CHGCAR.prec').st_size > 0:\n os.rename('CHGCAR.prec','CHGCAR')\n if 'ALGOb' in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['ALGO'] = calc['settings']['INCAR']['ALGOb'] \n del calc['settings']['INCAR']['ALGOb'] \n if 'NELMb' in calc['settings']['INCAR'].keys(): \n calc['settings']['INCAR']['NELM'] = calc['settings']['INCAR']['NELMb']\n del calc['settings']['INCAR']['NELMb']\n return True\n\n\ndef doubleNELM(calc):\n if 'NELM' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['NELM'] = 60\n calc['settings']['INCAR']['NELM'] = int(calc['settings']['INCAR']['NELM'])*2\n return True\n\ndef halveNELM(calc):\n if 'NELM' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['NELM'] = 60\n calc['settings']['INCAR']['NELM'] = int(calc['settings']['INCAR']['NELM'])/2\n return True\n\ndef startWAVECAR(calc):\n #10 Ensure a preconverged WAVECAR is used for the new coefficients and the density.\n calc['settings']['INCAR']['ISTART'] = \"1\"\n calc['settings']['INCAR']['ICHARG'] = \"0\" \n return True\n\ndef startCHGCAR(calc):\n calc['settings']['INCAR']['ISTART'] = \"0\"\n calc['settings']['INCAR']['ICHARG'] = \"1\"\n return True\n\ndef halveSigmaInherit(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = { \"INCAR\" : {} }\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n elif presults['settingsmod'].get('INCAR').get('SIGMA') != None:\n presults['settingsmod']['INCAR']['SIGMA'] = float(presults['settingsmod']['INCAR']['SIGMA'])/2\n else:\n presults['settingsmod']['INCAR']['SIGMA'] = float(calc['settings']['INCAR']['SIGMA'])/2\n updateResults(presults, calc['parent'])\n return True\n\ndef changeSpinInherit(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = { \"INCAR\" : {} }\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n if presults['settingsmod'].get('INCAR').get('ISPIN') != None:\n presults['settingsmod']['INCAR']['ISPIN'] = (int(presults['settingsmod']['INCAR']['ISPIN'])-2) % 2 + 1\n else:\n presults['settingsmod']['INCAR']['ISPIN'] = (int(calc['settings']['INCAR']['ISPIN'])-2) % 2 + 1\n print('Setting spin to ' + str(presults['settingsmod']['INCAR']['ISPIN']))\n updateResults(presults, calc['parent'])\n return True\n\ndef changeSmearInherit(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = {\"INCAR\": {}}\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n\n if presults['settingsmod'].get('INCAR').get('ISMEAR') != None:\n if presults['settingsmod'].get('INCAR').get('ISMEAR') == 0:\n presults['settingsmod']['INCAR']['ISMEAR'] = 1\n presults['settingsmod']['INCAR']['SIGMA'] = 0.2\n elif presults['settingsmod'].get('INCAR').get('ISMEAR') == 1:\n presults['settingsmod']['INCAR']['ISMEAR'] = 0\n presults['settingsmod']['INCAR']['SIGMA'] = 0.05\n else:\n if calc['settings']['INCAR']['ISMEAR'] == 0:\n presults['settingsmod']['INCAR']['ISMEAR'] = 1\n presults['settingsmod']['INCAR']['SIGMA'] = 0.2\n elif calc['settings']['INCAR']['ISMEAR'] == 1:\n presults['settingsmod']['INCAR']['ISMEAR'] = 0\n presults['settingsmod']['INCAR']['SIGMA'] = 0.05\n updateResults(presults, calc['parent'])\n return True\n\ndef converge(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = {}\n \n for propset in presults['convergence']:\n total = len(propset)\n prop = propset[0]\n for i in range(1,total):\n (crit,cond,current,converged) = propset[i]\n if converged == 1:\n continue;\n elif crit == 'K':\n if 'KPOINTS' not in presults['settingsmod'].keys():\n presults['settingsmod']['KPOINTS'] = {}\n if 'K' not in presults['settingsmod']['KPOINTS'].keys():\n presults['settingsmod']['KPOINTS']['K'] = '2 2 2'\n else:\n presults['settingsmod']['KPOINTS']['K'] = ' '.join([str(int(x) + 2) for x in presults['settingsmod']['KPOINTS']['K'].split(' ')])\n #curkp = [int(x) for x in calc['settings']['KPOINTS']['K'].split(' ')]\n #curmod = [int(x) for x in presults['settingsmod']['KPOINTS']['K'].split(' ')]\n # calc['settings']['KPOINTS']['K'] = ' '.join([str(curkp[x] + curmod[x]) for x in range(3)])\n break;\n elif crit == 'ENCUT':\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n if 'ENCUT' not in presults['settingsmod']['INCAR']:\n presults['settingsmod']['INCAR']['ENCUT'] = 100\n else:\n presults['settingsmod']['INCAR']['ENCUT'] += 100\n # calc['settings']['INCAR']['ENCUT'] = int(calc['settings']['INCAR']['ENCUT']) + presults['settingsmod']['INCAR']['ENCUT']\n break;\n updateResults(presults,calc['parent'])\n return True\n\ndef lowerSYMPREC(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = {}\n\n if 'SYMPREC' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['SYMPREC'] = 1e-5\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n if 'ENCUT' not in presults['settingsmod']['INCAR']:\n presults['settingsmod']['INCAR']['SYMPREC'] = np.float32(calc['settings']['INCAR']['SYMPREC'])/10.\n else:\n presults['settingsmod']['INCAR']['SYMPREC'] = np.float32(calc['settings']['INCAR']['SYMPREC'])/10.\n return True\n\n\ndef raiseSYMPREC(calc):\n presults = getResults(calc['parent'])\n if 'settingsmod' not in presults.keys():\n presults['settingsmod'] = {}\n\n if 'SYMPREC' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['SYMPREC'] = 1e-5\n if 'INCAR' not in presults['settingsmod'].keys():\n presults['settingsmod']['INCAR'] = {}\n if 'ENCUT' not in presults['settingsmod']['INCAR']:\n presults['settingsmod']['INCAR']['SYMPREC'] = np.float32(calc['settings']['INCAR']['SYMPREC'])*10.\n else:\n presults['settingsmod']['INCAR']['SYMPREC'] = np.float32(calc['settings']['INCAR']['SYMPREC'])*10.\n return True\n\ndef toggleISYM(calc):\n presults = getResults(calc['parent'])\n if 'ISYM' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['ISYM'] = 1\n\n calc['settings']['INCAR']['ISYM'] = (int(calc['settings']['INCAR']['ISYM']) + 1) %2\n return True\n\ndef contCalc(calc):\n return True\n" }, { "alpha_fraction": 0.5380173325538635, "alphanum_fraction": 0.5413859486579895, "avg_line_length": 38.18867874145508, "blob_id": "83884124ce7a440b850a8be2c665e502af72536c", "content_id": "f7643931f2d74ba33adf7b9aa198627688f7d32f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2078, "license_type": "permissive", "max_line_length": 224, "num_lines": 53, "path": "/manage/queue.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from HighThroughput.communication.mysql import *\nimport os\n\ndef get(qid):\n queue = mysql_query('SELECT * FROM `queues` WHERE `id` = ' + str(qid))\n return queue\n\ndef showAll():\n return mysql_query('SELECT * FROM `queues`')\n\ndef add(name,workflow='1',fields='ID:id,Info:text,Material ID:file,Status:stat,Jobid:jobid,Start:start,End:end',directory=os.getcwd()):\n owner = mysql_query('')\n dbfield = ''\n if (isinstance(fields, dict)):\n for field in fields.keys():\n dbfield += field + ':' + fields[field] + ','\n dbfield = dbfield[:-1]\n else:\n dbfield = fields\n result = mysql_query('INSERT INTO `queues` (`name`, `owner`, `dir`, `fields`, `workflow`) VALUES (\\'' + str(name) + '\\', ' + str(owner) + ', \\'' + str(directory) + '\\',\\'' + str(dbfield) + '\\', ' + str(workflow) + ')')\n qid = result\n if(int(result) > 0):\n print('The ' + name + ' queue has been added and assigned id ' + str(qid))\n else:\n print('Adding queue failed (contact Michael)')\n return qid\n\ndef modify(params):\n query = 'UPDATE `queues` SET '\n for key in params.keys():\n if key != 'id':\n query += '`' + key + '` ='\n if not str(params[key]).isdigit():\n query += '\\'' + str(params[key]) + '\\''\n else:\n query += str(params[key])\n query += ', '\n query = query[:-2] + ' WHERE `id` = ' + str(params['id'])\n result = mysql_query(query)\n if (result == '1'):\n print('The queue has been modified. Please verify.')\n else:\n print('Help... Me...')\n \ndef remove(qid):\n name = mysql_query('SELECT `name` FROM `queues` WHERE `id` = ' + str(qid))\n result = mysql_query('DELETE FROM `queues` WHERE `id` = ' + str(qid))\n result2 = mysql_query('DELETE FROM `calculations` WHERE `queue` = ' + str(qid))\n if (result == '1'):\n print('The ' + name['name'] + '(' + str(qid) + ') queue has been removed.')\n else:\n print('Removing the ' + name['name'] + '(' + str(qid) + ') has failed.')\n return int(result)\n\n" }, { "alpha_fraction": 0.6549019813537598, "alphanum_fraction": 0.6627451181411743, "avg_line_length": 30.875, "blob_id": "cdc45bb244d07d8587eeabfba13b5df3e5a319a9", "content_id": "00c33742954a412fd23c5d22d0c76e2204bf9179", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 255, "license_type": "permissive", "max_line_length": 119, "num_lines": 8, "path": "/errors/Gaussiandetectors.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import subprocess\ndef test(calc):\n print('SEARCHING FOR ERRORS')\n det = int(subprocess.Popen('grep WAAAAAAAAAGH tempout | wc -l',shell=True,stdout=subprocess.PIPE).communicate()[0])\n if det > 0:\n return True\n else:\n return False\n" }, { "alpha_fraction": 0.7740963697433472, "alphanum_fraction": 0.7740963697433472, "avg_line_length": 20.133333206176758, "blob_id": "4cbc1126bc536605a94cb1f5748eaa7de6fc0e9c", "content_id": "03898b38b33546f7dfc038a254d43bdfd6eab841", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 332, "license_type": "permissive", "max_line_length": 95, "num_lines": 15, "path": "/README.md", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "This repository contains the client files to use QueueManager on your cluster.\r\n\r\nQueue Manager has received contributions from:\r\n\r\nMichael Sluydts\r\nMichiel Larmuseau\r\nKarel Dumon\r\nTitus Crepain\r\n\r\nThe following open-source packages have been utilized during the construction of Queue Manager:\r\nPymatGen\r\n\r\nThe following packages are dependencies:\r\nASE\r\nPymatgen\r\n" }, { "alpha_fraction": 0.8147566914558411, "alphanum_fraction": 0.8178963661193848, "avg_line_length": 44.42856979370117, "blob_id": "40a55743a3bb7e932eac2756fc5f5e0cb4ba088c", "content_id": "9f63354894069f9c065801cd7a90d337a7f7365c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 637, "license_type": "permissive", "max_line_length": 110, "num_lines": 14, "path": "/examples/makeDefault", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, random, re, shutil, subprocess, sys, time,math\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nimport HighThroughput.manage.queue as queue\nimport HighThroughput.manage.workflow as workflow\nimport HighThroughput.manage.material\nfrom HighThroughput.io.VASP import *\nfrom HighThroughput.utils.generic import *\nfrom HighThroughput.modules.VASP import *\nfrom HighThroughput.errors.generic import *\nfrom HighThroughput.communication.mysql import *\n\nqid = queue.add('default', 20, 'ID:id,Info:text,Material ID:file,Status:stat,Jobid:jobid,Start:start,End:end')\n\n" }, { "alpha_fraction": 0.543667733669281, "alphanum_fraction": 0.580700695514679, "avg_line_length": 29.788034439086914, "blob_id": "72efd209d7925b9ee1e1d42a788c46c8b0f17504", "content_id": "842972d208f7851e151effbaf3f1bf2e9bcef4ba", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18011, "license_type": "permissive", "max_line_length": 458, "num_lines": 585, "path": "/examples/IntelligentScreening/old.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport shutil, os, sys, re\nimport subprocess\nfrom subprocess import Popen, PIPE\nfrom sys import argv\n\nimport ase.io\n\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.io\n\nimport json\nimport time\nimport random\n\nfrom numpy.linalg import norm\nfrom numpy import dot, arccos, degrees, floor, prod\n\n#sleep = random.randrange(20)\n#print('Random sleep... ZzZzZz for '+str(sleep)+' seconds, before starting script.')\n#time.sleep(sleep)\n\n\nqid = argv[1]\nsubmit_arg = '' + argv[2] + ' ' + argv[3] + ' ' + argv[4]\nprint(\"Running on queue \" + argv[1] + \" with a walltime of \" + argv[2] + \"h, using \" + argv[3] + \" nodes and \" + argv[4] + \" cores.\")\n\nversion = ''+argv[5]\n#version = 1\n\nif os.getenv('VSC_INSTITUTE_CLUSTER') == 'muk':\n qdir = os.path.join('/gpfs/scratch/projects/project_gpilot/vsc40479', 'queues', str(qid))\nelif os.getenv('VSC_INSTITUTE_CLUSTER') == 'breniac':\n qdir = os.path.join('/scratch/leuven/404/vsc40479/queues',str(qid))\nelse:\n qdir = os.path.join('/user/scratch/gent/vsc404/vsc40479', 'queues', str(qid))\ndef execute(command):\n out, err = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()\n print(out)\n print >> sys.stderr, err\n\n if err in locals():\n #raise Exception('Error in executing bash-command.')\n return False\n else:\n return out\n\ndef mkdir(command):\n if not os.path.isdir(command):\n os.mkdir(command)\n\ndef remove(command):\n if os.path.isfile(command):\n os.remove(command)\n else:\n print(str(command)+' is not a file.')\n\ndef error_catch(command):\n try:\n execute(command)\n return True\n except:\n return False\n\n\n#os.chdir(qdir + '/fetchid')\n\n#HT.fetchgetstart(qid)\ncinfo = HT.fetchgetstart(qid)\n\n#cid = HT.calcid\ncid = cinfo['id']\n\n#cinfo = HT.get(cid)\ncfile = cinfo['file']\nstatus = int(cinfo['stat'])\n\n#cinfo['settings']['INCAR']['ENCUT'] = 300 #Only for fast debugging\n#cinfo['settings']['KPOINTS']['K'] = \"1 1 1\" #Only for fast debugging\n\n#settings = json.loads(cinfo['settings'])\nsettings = HT.getSettings(cid)\n\nINCAR_dict = settings['INCAR']\nKPOINTS_dict = settings['KPOINTS']\n\nprint('Server: '+cinfo['server'])\nif int(cinfo['queue']) == 237:\n KPOINTS_dict['K'] = \"5 5 5\"\nelif int(cinfo['queue']) == 238:\n KPOINTS_dict['K'] = \"5 5 5\"\nelif int(cinfo['queue']) == 239:\n KPOINTS_dict['K'] = \"5 5 5\"\n\nKPOINTS_dict['mode'] = \"G\" #Voor hexagonale cellen: shift k-punt naar Gammapunt (G), andere cellen: Monkhorst (M)\nINCAR_dict['ALGO'] = 'A'\nINCAR_dict['ICHARG'] = 2\nINCAR_dict['ENCUT'] = '520'\nINCAR_dict['NCORE'] = 28\nINCAR_dict['NPAR'] = 4\nif int(status) < 17:\n INCAR_dict['ISIF'] = 4\n INCAR_dict['IBRION'] = 2\n INCAR_dict['NELM'] = 200\n INCAR_dict['NSW'] = 1000\n INCAR_dict['ICHARG'] = 1\n INCAR_dict['ISTART'] = 1\nif int(status) < 15:\n INCAR_dict['ISMEAR'] = 1\n INCAR_dict['SIGMA'] = 0.05\nelse:\n INCAR_dict['NSW'] = 0\n INCAR_dict['IBRION'] = -1\n INCAR_dict['ISIF'] = 0\n INCAR_dict['NELM'] = 2000\n\nsettings = {'INCAR':INCAR_dict, 'KPOINTS':KPOINTS_dict}\n#print('Updating settings (NPAR)')\n#HT.updateSettings(settings, cid)\nprint(settings)\nif os.path.isfile(qdir + '/CALCULATIONS/' + str(cfile) + '/STEP3/POSCAR'):\n crystal = ase.io.read(qdir + '/CALCULATIONS/' + str(cfile) + '/STEP3/POSCAR')\n cell = crystal.get_cell();\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3);\n nat = crystal.get_number_of_atoms()\n\n biglim = 20000./nat\n smallim = 2500./nat\n\n lc = [na,nb,nc]\n kp = [11,11,11]\n small = float(min(lc))\n for i in range(0,3):\n ratio = round(11.*small/lc[i],0)\n if ratio %2 == 0:\n ratio = ratio+1\n kp[i] = ratio\n kp = [int(x) for x in kp]\n while(prod(kp) < biglim):\n kp[0] += 2\n kp[1] += 2\n kp[2] += 2\n smallkp = [int(floor(float(x)/2.)) for x in kp]\n smallkp = [ x+ 1 if x %2 ==0 else x for x in smallkp]\n while(prod(smallkp) < smallim):\n smallkp[0] += 2\n smallkp[1] += 2\n smallkp[2] += 2\n\n\nif status== 1:\n\n os.chdir(qdir+'/CALCULATIONS')\n mkdir(str(cfile))\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile))\n\n mkdir('./STEP3')\n os.chdir('./STEP3')\n\n shutil.copy('/scratch/leuven/404/vsc40479/queues/' + str(qid) + '/import/' + str(cfile) + '.vasp', './POSCAR')\n #shutil.copy('/data/gent/vsc404/vsc40479/tier1/237/import/CHGCAR' + str(cfile), './CHGCAR')\n\n crystal = ase.io.read(qdir + '/CALCULATIONS/' + str(cfile) + '/STEP3/POSCAR')\n cell = crystal.get_cell();\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3);\n nat = crystal.get_number_of_atoms()\n\n biglim = 20000./nat\n smallim = 2500./nat\n\n lc = [na,nb,nc]\n kp = [11,11,11]\n small = float(min(lc))\n for i in range(0,3):\n ratio = round(11.*small/lc[i],0)\n if ratio %2 == 0:\n ratio = ratio+1\n kp[i] = ratio\n kp = [int(x) for x in kp]\n while(prod(kp) < biglim):\n kp[0] += 2\n kp[1] += 2\n kp[2] += 2\n\n smallkp = [int(floor(float(x)/2.)) for x in kp]\n smallkp = [ x+ 1 if x %2 ==0 else x for x in smallkp]\n while(prod(smallkp) < smallim):\n smallkp[0] += 2\n smallkp[1] += 2\n smallkp[2] += 2\n\n\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n count = len(re.findall(\"[a-zA-Z_]+\", elements))\n execute('POTgen_MP ' + str(elements))\n\n poscar.close()\n\n print('STATUS 5 started')\n print('= STEP 3: EOS-prepare')\n\n step = 3\n\n atoms = ase.io.read('POSCAR',format=\"vasp\")\n volume = float(atoms.get_volume())\n\n\n for i in xrange(94, 107, 2):\n I = 0.01*i\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n\n mkdir(str(I))\n shutil.copy('./POSCAR','./' + str(I) + '/POSCAR')\n shutil.copy('./POTCAR','./' + str(I) + '/POTCAR')\n #shutil.copy('./CHGCAR','./' + str(I) + '/CHGCAR')\n os.chdir('./' + str(I))\n KPOINTS_dict['K'] = str(smallkp[0]) + ' ' + str(smallkp[1]) + ' ' + str(smallkp[2])\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n lines[1] = ' -' + str(volume*I) + '\\n'\n\n poscar = open('POSCAR','w')\n poscar.writelines(lines)\n poscar.close()\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n\n print('EOS 0.94')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step)+'/0.94')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/CHGCAR')\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/0.94/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/0.94/tempout vasp_std')\n\n execute('free -m; date')\n\n print('END STATUS 5 / STEP 3')\n\nelif status==3:\n print('STEP 7 started')\n\n step = 3\n\n print('EOS 0.96')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step)+'/0.96')\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/0.96/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/0.96/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==5:\n print('STEP 9 started')\n\n step = 3\n\n print('EOS 0.98')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step)+'/0.98')\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/0.98/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/0.98/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==7:\n print('STEP 11 started')\n\n step = 3\n\n print('EOS 1.0')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step)+'/1.0')\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/1.0/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/1.0/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==9:\n print('STEP 13 started')\n\n step = 3\n\n print('EOS 1.02')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step)+'/1.02')\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/1.02/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/1.02/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==11:\n print('STEP 15 started')\n\n step = '3/1.04'\n\n print('EOS 1.04')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==13:\n print('STEP 17 started')\n\n step = '3/1.06'\n\n print('EOS 1.06')\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==15:\n print('STEP 19 started')\n\n step = 4\n\n mkdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/STEP'+str(step))\n\n execute('STEP4_prepare')\n\n if execute('eos EOS_data 1 1')==False:\n print('Failed executing eos.')\n error = 'EOS-error'\n\n elif os.path.isfile('EOS_data.eosout'):\n volume = float(Popen('grep V0 EOS_data.eosout | awk \\'{print $2}\\' ', stdout=PIPE, shell=True).communicate()[0])\n best = ''\n diff = 999999999\n for e in os.listdir('../STEP3/'):\n if os.path.isdir(os.path.join('../STEP3',e)):\n vol = float(Popen('grep \\'volume \\' ../STEP3/' + e + '/OUTCAR | tail -n 1 | awk \\'{print $5}\\'', stdout=PIPE, shell=True).communicate()[0])\n if abs(vol - volume) < diff:\n diff = abs(vol - volume)\n best = e\n\n shutil.copy('../STEP3/'+ best + '/CONTCAR','./POSCAR')\n shutil.copy('../STEP3/' + best + '/POTCAR','./POTCAR')\n if os.path.isfile('../STEP3/' + best + '/CHGCAR'):\n shutil.copy('../STEP3/' + best + '/CHGCAR','./CHGCAR')\n INCAR_dict['IBRION'] =\"-1\"\n INCAR_dict['ISIF'] = \"0\"\n INCAR_dict['NSW'] = \"0\"\n KPOINTS_dict['K'] = str(kp[0]) + ' ' + str(kp[1]) + ' ' + str(kp[2])\n\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n lines[1] = ' -' + str(volume) + '\\n'\n\n poscar = open('POSCAR','w')\n poscar.writelines(lines)\n poscar.close()\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp_std')\n\n execute('free -m; date')\n else:\n print( 'Something wrong while EOS-fitting. Error...')\n error = 'EOS-error'\n\nelif status==17:\n print('STEP 21 started')\n\n step = 'DOS'\n\n mkdir(qdir+'/CALCULATIONS/'+str(cfile)+'/'+str(step))\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/'+str(step))\n\n shutil.copy('../STEP4/CONTCAR', './POSCAR')\n shutil.copy('../STEP4/POTCAR', './POTCAR')\n shutil.copy('../STEP4/CHGCAR', './CHGCAR')\n KPOINTS_dict['K'] = str(kp[0]) + ' ' + str(kp[1])+ ' ' + str(kp[2])\n\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/'+str(step)+'/tempout vasp_std')\n\n execute('free -m; date')\n\nelif status==19:\n print('STEP 23 started')\n\n step = 'BANDS'\n\n mkdir(qdir+'/CALCULATIONS/'+str(cfile)+'/'+str(step))\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile)+'/'+str(step))\n\n shutil.copy('../STEP4/CONTCAR', './POSCAR')\n shutil.copy('../STEP4/POTCAR', './POTCAR')\n shutil.copy('../STEP4/CHGCAR', './CHGCAR')\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n execute('free -m; date; touch WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/' +str(step)+ '/tempout')\n\n execute('aconvasp --kpath < POSCAR > KPOINTS')\n\n shutil.copy('KPOINTS', 'KPOINTStemp')\n os.remove('KPOINTS')\n\n KPOINTSstarted = True\n KPOINTSfile = open('KPOINTStemp','r')\n for line in KPOINTSfile:\n if KPOINTSstarted and line[:1]!='/':\n if \"G-M-K-G-A-L-H-A L-M K-H\" in line:\n line = line.replace(\"G-M-K-G-A-L-H-A L-M K-H\",\"G-M-K-G-A-L-H-A|L-M|K-H\")\n KPOINTS = open('KPOINTS','a')\n KPOINTS.write(line)\n KPOINTS.close()\n if line[:1]=='/':\n KPOINTSstarted = not KPOINTSstarted\n os.remove('KPOINTStemp')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/'+str(step)+'/tempout vasp_std')\n\n execute('free -m; date')\n\nelse:\n print('Status ' + str(status) + ' is not a valid status. Calculation terminated.')\n sys.exit()\n\n\n#UPDATE POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\nsettings['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n print('This is the HT.end cid: ' + str(cid))\n\n ended = 0\n tries = 0\n while(ended == 0 and tries < 10):\n ended = HT.end(cid)\n tries = tries + 1\n print tries\n time.sleep(random.randint(10,100))\n if(ended == 0):\n print('ERROR: I tried to end calculation ' + str(cid) + str(tries) + ' times, but no succes.') #tries should always be 10\n HT.updateResults({'error':'Ending calculation failed'}, cid)\n\n else:\n if status<15:\n #os.remove('CHGCAR')\n os.remove('CHG')\n\n results = HT.getResults(cid)\n\n #could leave this out when working with QZP's\n\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n\n energy = float(energy)\n\n Eatom = energy/numberofatoms\n results['Eatom'] = Eatom\n\n atoms = lines[5].split()\n\n energies = {'Te':-3.142188083,'K':-1.04693431,'Tl':-2.2266554,'Se':-3.498123137,'Rb':-0.936014755,'Sb':-4.13566371,'P':-5.373717185,'Bi':-3.885356122,'Po':-3.07473254,'Al':-3.745478105,'Ca':-1.929739603,'In':-2.55987617,'Sn':-3.846317905,'Ga':-2.905926696,'Mg':-1.506391565,'Na':-1.311801313,'Ba':-1.90827009,'Sr':-1.636048335,'Cs':-0.852268485,'S':-4.125889916,'Si':-5.424861565,'Ge':-4.51831862,'Pb':-3.565225973,'As':-4.66985772,'Li':-1.910459733}\n\n\n if len(atoms)==1:\n results['Eformation'] = Eatom - float(energies[str(atoms[0])])\n elif len(atoms)==4:\n results['Eformation'] = Eatom - float(6*energies[str(atoms[0])] + energies[str(atoms[1])] + 4*energies[str(atoms[2])] + 3*energies[str(atoms[3])])/14\n\n results['volume'] = float(Popen('grep volume OUTCAR | tail -1 | awk \\'{print $5}\\' ', stdout=PIPE, shell=True).communicate()[0])\n\n crystal = ase.io.read('CONTCAR')\n cell = crystal.get_cell()\n\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3);\n\n results['a'] = na\n results['b'] = nb\n results['c'] = nc\n results['alpha'] = round(degrees(arccos(dot(b,c)/nb/nc)),1)\n results['beta'] = round(degrees(arccos(dot(c,a)/nc/na)),1)\n results['gamma'] = round(degrees(arccos(dot(a,b)/na/nb)),1)\n\n print('Updating results')\n print results\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nprint settings\nHT.updateSettings(settings, cid)\n\n\n#RELOOP to the script\n\n#exit() #No new calculations allowed.\n\ncid_new = HT.fetch(qid)\n\nprint('Fetched calculation '+str(cid_new)+' from queue '+str(qid)+'.')\n\nos.chdir(qdir+'/LOGFILES')\n\nif int(cid_new) > 0:\n print('Queue not empty: submit new job.')\n execute(' ssh login1 \"~/bin/HTtools/QZPsubmit_reeosleuven ' + str(qid) + ' ' + str(argv[2]) + ' ' + str(argv[3]) + ' ' + str(argv[4]) + '\"')\nelse:\n print('No calculations left; end script without submitting new job.')\n execute('touch '+str(qid)+'_'+str(cid)+'_fetch0')\n\nexecute('qstat')\n" }, { "alpha_fraction": 0.6419752836227417, "alphanum_fraction": 0.6574074029922485, "avg_line_length": 37.57143020629883, "blob_id": "16e1f179303dc6cfbbfa45ca5a974ea1f97a736b", "content_id": "5d0649a10562c67a14ad11eed7695c44e8fc50f4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1620, "license_type": "permissive", "max_line_length": 161, "num_lines": 42, "path": "/HTtools/HTsymmcheck", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os,sys, shutil, subprocess\n\ncifid = sys.argv[1]\n\nshutil.copy(cifid + '.cif','temp/temp.cif')\n\n\n\nos.chdir('temp')\nwith open('temp.cif','r') as cif:\n lines = cif.readlines()\n temp = lines[8].split(' ',1)\n if '90' in lines[4] and '90' in lines[5] and '120' in lines[6]:\n temp[1] = 'H'\n # lines[8] = temp[0] + ' \\'' + temp[1].strip() + '\\'\\n'\n\nwith open('temp.cif','w') as cif:\n cif.write(''.join(lines))\n\nsubprocess.Popen('cif2struct temp.cif',stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True).wait()\nsymm = subprocess.Popen('x symmetry',stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)\nout = symm.communicate()[0]\nif 'hexagonal' in out:\n with open('temp.struct','r') as struct:\n lines = struct.readlines()\n lines[1] = 'H' + lines[1][1:]\n with open('temp.struct','w') as struct:\n struct.write(''.join(lines))\n subprocess.Popen('x symmetry',stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True).wait()\nelif 'STOP' in out:\n print(cifid,out)\nshutil.copy('temp.struct_st','temp.struct')\nsubprocess.Popen('x sgroup',stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True).wait()\nshutil.copy('temp.struct_sgroup','temp.struct')\nnsymm = subprocess.Popen('cat temp.struct | grep \\'MBER OF SYMMETRY OPERATION\\' | awk \\'{print $1}\\'',stdout=subprocess.PIPE,shell=True).communicate()[0].strip()\nnatoms = subprocess.Popen('cat temp.struct | grep \\'LOCAL ROT MATRIX\\' | wc -l',stdout=subprocess.PIPE,shell=True).communicate()[0].strip()\n\nprint(cifid + '\\t' + natoms + '\\t' + nsymm)\nos.system('rm temp*')\nos.chdir('../')\n" }, { "alpha_fraction": 0.681502103805542, "alphanum_fraction": 0.731571614742279, "avg_line_length": 22.19354820251465, "blob_id": "214457f52392c4fa3e152f1509962b49f075e4b8", "content_id": "c38f0ddecb774496f6427f210f6c7761bdc5c0ab", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 719, "license_type": "permissive", "max_line_length": 174, "num_lines": 31, "path": "/examples/Backtesting/backtest_mf.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport HighThroughput.manage.queue as HTq\nimport HighThroughput.manage.calculation as HTc\nimport HighThroughput.ML.models.priority as HTML\nfrom HighThroughput.communication.mysql import mysql_query\nimport pickle, sys, json\nimport random\nimport numpy as np\nimport tensorflow as tf\n\n#tf.set_random_seed(666)\n#random.seed(666)\n#np.random.seed(666)\n\nnsample = 500\nstable = 0\n#target = sys.argv[1]\nrdict = {}\nrlist = []\nlimit = 0.05\nN_init = 50\nbatch_size = 1\n\nnewq = 256\n\ntarget = 'Ehull'\n\nstats = [6, 14, 22, 32, 40, 42]\n\nHTML.updateMLPriority(newq,stat=stats,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target = 'Ehull',features = ['mass','Ecoh','EN','IP'] ,maxParallel=1)\n" }, { "alpha_fraction": 0.6644912362098694, "alphanum_fraction": 0.670424222946167, "avg_line_length": 36.88764190673828, "blob_id": "15ba11d0a2e3124397a1e2944c7b59ec20094aec", "content_id": "dea453f6532d8b15c565e033d5ac6877f6e79302", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3371, "license_type": "permissive", "max_line_length": 133, "num_lines": 89, "path": "/HTtools/HTchem", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport warnings\nwarnings.filterwarnings(\"ignore\")\nfrom pymatgen.ext.matproj import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.analysis.phase_diagram import *\nfrom pymatgen.entries.compatibility import MaterialsProjectCompatibility\nimport re, sys, json, os\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone, sys.argv[1], 4)\nentriesorig = queen.get_data()\nqueen.load_data(os.path.join(os.path.dirname(__file__), '../ML/data/missingels.json'))\nentriesextra = queen.get_data() \n\nif len(sys.argv) > 2:\n compat = MaterialsProjectCompatibility(check_potcar=False)\n entriesorig = compat.process_entries(entriesorig) \n\nfor entry in entriesorig:\n name = entry.name\n line = re.findall('[A-Z][^A-Z]*',name.replace('(','').replace(')',''))\n\nsearchset= set(re.sub('\\d',' ',' '.join(line)).split())\nentries = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split())==searchset, entriesorig)\n\nentriesextra = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split()) & searchset, entriesextra)\n#This initializes the REST adaptor. Put your own API key in.\na = MPRester(\"s2vUo6mzETOHLdbu\")\n#unknownEls = set(['Po','Ra'])\n\n#if unknownEls & searchset:\n# print('99', None)\n# exit()\n\n#print(a.get_stability(entriesorig))\nall_entries = a.get_entries_in_chemsys(set(searchset)) + list(entries) + list(entriesextra)\n\npd = PhaseDiagram(all_entries)\n\ndef name(potcar):\n name = ''\n for p in potcar:\n temp = (p.split(' ')[-2].split('_')[0])\n name += temp\n return name\n \n#for e in pd.stable_entries:\n# if e.entry_id == None:\n# reaction = pd.get_equilibrium_reaction_energy(e)\n# print(reaction,None)\n\n#for e in pd.unstable_entries:\n# decomp, e_above_hull = pd.get_decomp_and_e_above_hull(e)\n# pretty_decomp = [(\"{}:{}\".format(k.composition.reduced_formula, k.entry_id), round(v, 2)) for k, v in decomp.items()]\n# if e.entry_id == None:\n# print(e_above_hull,pretty_decomp)\n\nli_entries = [e for e in all_entries if e.composition.reduced_formula == \"Li\"]\nuli0 = min(li_entries, key=lambda e: e.energy_per_atom).energy_per_atom\n\nfor entry in entriesorig:\n name = entry.name\n\n line = re.findall('[A-Z][^A-Z]*',name.replace('(','').replace(')',''))\n\n searchset= set(re.sub('\\d',' ',' '.join(line)).split())\n #why am I refiltering\n temp = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split())==searchset, entries)\n\n all_entries = a.get_entries_in_chemsys(set(searchset)) + list(temp)\n\n #They place LGPS on the ehull, can you do that here?\n pd = PhaseDiagram(all_entries)\n\n el_profile = pd.get_element_profile(Element('Li'), entry.composition)\n\n for j, d in enumerate(el_profile):\n if np.isclose([x for x in list(zip(d['reaction']._all_comp,d['reaction']._coeffs)) if x[0] == Composition('Li')][-1][-1], 0):\n ind = j\n break\n \nout = {'electro' : (-np.round(el_profile[j]['chempot']- uli0,3),-np.round(el_profile[j+1]['chempot']- uli0,3))}\n\nprint(json.dumps(out))" }, { "alpha_fraction": 0.6659091114997864, "alphanum_fraction": 0.6772727370262146, "avg_line_length": 32.846153259277344, "blob_id": "016dfbfb574d681c09fd510a2494fcff172d5132", "content_id": "929d0f60e64568c229a5643acb3db90ff0ce48bb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 440, "license_type": "permissive", "max_line_length": 141, "num_lines": 13, "path": "/HTtools/HTrestart", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#HTrestart calcid or HTrestart queue file\n\nimport HighThroughput.manage.calculation as HT, sys,os\nfrom HighThroughput.communication.mysql import mysql_query\n\nif len(sys.argv) == 3:\n calc = mysql_query('SELECT `id` FROM `calculations` WHERE `queue` = ' + sys.argv[1] + ' AND `file` = ' + sys.argv[2] + ' AND `leaf` = 1')\nelse:\n calc = HT.get(sys.argv[1])\n\nif not isinstance(calc, str):\n HT.restart(calc['id'])\n" }, { "alpha_fraction": 0.5809524059295654, "alphanum_fraction": 0.585578203201294, "avg_line_length": 34.386138916015625, "blob_id": "e5c10a678f5eae14b284f0beb52a3a8c6ac3208f", "content_id": "7c9f83578a1d3ab74aa9b7f0788e934dfb01b50c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3675, "license_type": "permissive", "max_line_length": 194, "num_lines": 101, "path": "/communication/mysql.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import http.client,urllib,json,os.path\r\nfrom datetime import datetime\r\nimport ssl\r\nimport time\r\nfrom functools import wraps\r\n\r\nfrom http.client import HTTPException, NotConnected, ResponseNotReady, RemoteDisconnected\r\n\r\n\r\ndef retry(exceptions, tries=3, delay=30, backoff=2, logger=None):\r\n \"\"\"\r\n Retry calling the decorated function using an exponential backoff.\r\n\r\n Args:\r\n exceptions: The exception to check. may be a tuple of\r\n exceptions to check.\r\n tries: Number of times to try (not retry) before giving up.\r\n delay: Initial delay between retries in seconds.\r\n backoff: Backoff multiplier (e.g. value of 2 will double the delay\r\n each retry).\r\n logger: Logger to use. If None, print.\r\n \"\"\"\r\n def deco_retry(f):\r\n\r\n @wraps(f)\r\n def f_retry(*args, **kwargs):\r\n mtries, mdelay = tries, delay\r\n while mtries > 1:\r\n try:\r\n return f(*args, **kwargs)\r\n except exceptions as e:\r\n msg = '{}, Retrying in {} seconds...'.format(e, mdelay)\r\n if logger:\r\n logger.warning(msg)\r\n else:\r\n print(msg)\r\n time.sleep(mdelay)\r\n mtries -= 1\r\n mdelay *= backoff\r\n return f(*args, **kwargs)\r\n\r\n return f_retry # true decorator\r\n\r\n return deco_retry\r\n\r\n\r\n@retry((HTTPException, NotConnected, ResponseNotReady, RemoteDisconnected))\r\ndef mysql_query(query,blob=''):\r\n configfile= open(os.path.join(os.path.expanduser('~'),'.highthroughput'),'r')\r\n login = json.loads(configfile.read())\r\n context = ssl.create_default_context()\r\n context.load_verify_locations('/etc/pki/tls/cert.pem')\r\n conn = http.client.HTTPSConnection('HOSTNAME',context=context,timeout=900)\r\n headers = {\"Content-type\": \"application/x-www-form-urlencoded\",\"Accept\": \"text/plain\"}\r\n params = urllib.parse.urlencode({'query' : query.replace('<','%3C').replace('>','%3E'), 'blob' : blob, 'email' : login['email'], 'token' : login['token']}, quote_via=urllib.parse.quote_plus)\r\n params.encode('utf-8')\r\n conn.request('APIPATH',params,headers)\r\n response = None\r\n response = conn.getresponse().read()\r\n try: \r\n data = json.loads(response.decode())\r\n if len(data) == 1:\r\n data = data[0]\r\n except:\r\n data = response.decode()\r\n\r\n conn.close()\r\n return data\r\n\r\ndef mysql_query_profile(query,blob=''):\r\n startTime = datetime.now()\r\n print('\\n\\nstart')\r\n configfile= open(os.path.expanduser('~') + '/.highthroughput','r')\r\n login = json.loads(configfile.read())\r\n print('load config')\r\n print(datetime.now() - startTime)\r\n conn = http.client.HTTPConnection('HOSTNAME')\r\n print('connected')\r\n print(datetime.now() - startTime)\r\n headers = {\"Content-type\": \"application/x-www-form-urlencoded\",\"Accept\": \"text/plain\"}\r\n params = urllib.parse.urlencode({'query' : query, 'blob' : blob, 'email' : login['email'], 'token' : login['token']})\r\n print('encoded')\r\n print(datetime.now() - startTime)\r\n conn.request('POST','API_PATH',params,headers)\r\n print('posted')\r\n print(datetime.now() - startTime)\r\n response = conn.getresponse().read()\r\n print('read response')\r\n print(datetime.now() - startTime)\r\n try: \r\n data = json.loads(response)\r\n if len(data) == 1:\r\n data = data[0]\r\n except:\r\n data = response\r\n print('json parsed')\r\n print(datetime.now() - startTime)\r\n print('\\n\\n')\r\n return data\r\n\r\nowner = mysql_query('')\r\n" }, { "alpha_fraction": 0.3196598291397095, "alphanum_fraction": 0.3288310766220093, "avg_line_length": 89.83333587646484, "blob_id": "bba3bf4931a768aa26be23dbfb8c4a9ccaaff93e", "content_id": "15f391e0d4a2e989445ed2ee854e1af0f79b0a19", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5997, "license_type": "permissive", "max_line_length": 200, "num_lines": 66, "path": "/HTtools/POTgen", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport subprocess\n\n#from tables import *\nfrom sys import argv\n\nimport argparse\n\nparser = argparse.ArgumentParser(description='Generate POTCAR.')\nparser.add_argument('els', nargs='*')\nparser.add_argument('-s','--set',default='zintl')\nargs = parser.parse_args()\n\n\n#Be and Mg and Bi differ with MP, transition metals not checked\n\n\nif args.set == 'zintl':\n potentials = {'H':'','He':'','Li':'_sv', 'Be':'', 'B':'', 'C':'', 'N':'', 'O':'', 'F':'', 'Ne':'', 'Na':'_pv', 'Mg':'',\n 'Al':'', 'Si':'', 'P':'', 'S':'', 'Cl':'', 'Ar':'', 'K':'_sv', 'Ca':'_sv', 'Sc':'_sv', 'Ti':'_sv', 'V':'_sv',\n 'Cr':'_pv', 'Mn':'_pv', 'Fe':'', 'Co':'', 'Ni':'', 'Cu':'', 'Zn':'', 'Ga':'_d', 'Ge':'_d', 'As':'', 'Se':'',\n 'Br':'', 'Kr':'', 'Rb':'_sv', 'Sr':'_sv', 'Y':'_sv', 'Zr':'_sv', 'Nb':'_sv', 'Mo':'_sv', 'Tc':'_pv', 'Ru':'_pv',\n 'Rh':'_pv', 'Pd':'', 'Ag':'', 'Cd':'', 'In':'_d', 'Sn':'_d', 'Sb':'', 'Te':'', 'I':'', 'Xe':'', 'Cs':'_sv',\n 'Ba':'_sv', 'Lu':'_3', 'Hf':'_pv', 'Ta':'_pv', 'W':'_pv', 'Re':'', 'Os':'', 'Ir':'', 'Pt':'', 'Au':'',\n 'Hg':'', 'Tl':'_d', 'Pb':'_d', 'Bi':'_d', 'Po':'_d', 'At':'_d','Rn':'', 'La' : '',\n 'Ce' : '', 'Pr' : '_3', 'Nd' : '_3', 'Pm' : '_3', 'Sm' : '_3', 'Eu' : '_2', 'Gd' : '_3','Tb' : '_3', 'Dy' : '_3' , 'Ho' : '_3', 'Er' : '_3', 'Tm' : '_3','Yb' : '_2',\n 'Ac' : '', 'Th' : '', 'Pa' : '', 'U' : '', 'Np' : '', 'Pu' : '', 'Am' : '', 'Cm' : '', 'Bk' : '?' , 'Ra' : '_sv', 'Fr' : '_sv'}\n potdir = '$VSC_DATA_VO/POTCAR_PBE/'\nelif args.set == 'mp':\n potentials = {'H':'','He':'','Li':'_sv', 'Be':'_sv', 'B':'', 'C':'', 'N':'', 'O':'', 'F':'', 'Ne':'', 'Na':'_pv', 'Mg':'_pv',\n 'Al':'', 'Si':'', 'P':'', 'S':'', 'Cl':'', 'Ar':'', 'K':'_sv', 'Ca':'_sv', 'Sc':'_sv', 'Ti':'_pv', 'V':'_sv',\n 'Cr':'_pv', 'Mn':'_pv', 'Fe':'', 'Co':'', 'Ni':'', 'Cu':'', 'Zn':'', 'Ga':'_d', 'Ge':'_d', 'As':'', 'Se':'',\n 'Br':'', 'Kr':'', 'Rb':'_sv', 'Sr':'_sv', 'Y':'_sv', 'Zr':'_sv', 'Nb':'_sv', 'Mo':'_sv', 'Tc':'_pv', 'Ru':'_pv',\n 'Rh':'_pv', 'Pd':'', 'Ag':'', 'Cd':'', 'In':'_d', 'Sn':'_d', 'Sb':'', 'Te':'', 'I':'', 'Xe':'', 'Cs':'_sv',\n 'Ba':'_sv', 'Lu':'_3', 'Hf':'_pv', 'Ta':'_pv', 'W':'_pv', 'Re':'', 'Os':'', 'Ir':'', 'Pt':'', 'Au':'',\n 'Hg':'', 'Tl':'_d', 'Pb':'_d', 'Bi':'', 'Po':'_d', 'At':'_d','Rn':'', 'La' : '', 'Lu' : '_3',\n 'Ce' : '', 'Pr' : '_3', 'Nd' : '_3', 'Pm' : '_3', 'Sm' : '_3', 'Eu' : '', 'Gd' : '','Tb' : '_3', 'Dy' : '_3' , 'Ho' : '_3', 'Er' : '_3', 'Tm' : '_3','Yb' : '',\n 'Ac' : '', 'Th' : '', 'Pa' : '', 'U' : '', 'Np' : '', 'Pu' : '', 'Am' : '', 'Cm' : '', 'Bk' : '?', 'Ra' : '_sv', 'Fr' : '_sv'}\n potdir = '$VSC_DATA_VO/shared/potcars/old_potpaw_PBE'\nelif args.set == 'kurt':\n potentials = {'H':'_h_GW','He':'_GW','Li':'_sv_GW', 'Be':'_sv_GW', 'B':'_GW', 'C':'_GW', 'N':'_h_GW', 'O':'_h_GW', 'F':'_h_GW', 'Ne':'_GW', 'Na':'_sv_GW', 'Mg':'_sv_GW',\n 'Al':'_GW', 'Si':'_GW', 'P':'_GW', 'S':'_GW', 'Cl':'_GW', 'Ar':'_GW', 'K':'_sv_GW', 'Ca':'_sv_GW', 'Sc':'_sv_GW', 'Ti':'_sv_GW', 'V':'_sv_GW',\n 'Cr':'_sv_GW', 'Mn':'_sv_GW', 'Fe':'_sv_GW', 'Co':'_sv_GW', 'Ni':'_sv_GW', 'Cu':'_sv_GW', 'Zn':'_sv_GW', 'Ga':'_d_GW', 'Ge':'_d_GW', 'As':'_GW', 'Se':'_GW',\n 'Br':'_GW', 'Kr':'_GW', 'Rb':'_sv_GW', 'Sr':'_sv_GW', 'Y':'_sv_GW', 'Zr':'_sv_GW', 'Nb':'_sv_GW', 'Mo':'_sv_GW', 'Tc':'_sv_GW', 'Ru':'_sv_GW',\n 'Rh':'_sv_GW', 'Pd':'_sv_GW', 'Ag':'_sv_GW', 'Cd':'_sv_GW', 'In':'_d_GW', 'Sn':'_d_GW', 'Sb':'_d_GW', 'Te':'_GW', 'I':'_GW', 'Xe':'_GW', 'Cs':'_sv_GW',\n 'Ba':'_sv_GW', 'La':'_GW',\n 'Lu':'_3', 'Hf':'_sv_GW', 'Ta':'_sv_GW', 'W':'_sv_GW', 'Re':'_sv_GW', 'Os':'_sv_GW', 'Ir':'_sv_GW', 'Pt':'_sv_GW', 'Au':'_sv_GW',\n 'Hg':'_sv_GW', 'Tl':'_d_GW', 'Pb':'_d_GW', 'Bi':'_d_GW', 'Po':'_d_GW', 'At':'_d_GW','Rn':'_d_GW', 'Ac' : '', 'Am' : '', 'Ce' : '_GW', 'Dy' : '_3', 'Er' : '_3' , 'Eu' : '_2',\n 'Gd' : '_3', 'Ho' : '_3', 'Nd' : '_3', 'Np' : '', 'Pa' : '', 'Pm' : '_3', 'Pr' : '_3', 'Pu' : '', 'Ra' : '_sv', 'Sm' : '_3', 'Tb' : '_3', 'Th' : '', 'Tm' : '_3', 'U' : '', 'Yb' : '_2'}\n potdir = '$VSC_DATA_VO/shared/potcars/potpaw_PBE.54'\nelif args.set == 'kurtnohard':\n potentials = {'H':'_GW','He':'_GW','Li':'_sv_GW', 'Be':'_sv_GW', 'B':'_GW', 'C':'_GW', 'N':'_GW', 'O':'_GW', 'F':'_GW', 'Ne':'_GW', 'Na':'_sv_GW', 'Mg':'_sv_GW',\n 'Al':'_GW', 'Si':'_GW', 'P':'_GW', 'S':'_GW', 'Cl':'_GW', 'Ar':'_GW', 'K':'_sv_GW', 'Ca':'_sv_GW', 'Sc':'_sv_GW', 'Ti':'_sv_GW', 'V':'_sv_GW',\n 'Cr':'_sv_GW', 'Mn':'_sv_GW', 'Fe':'_sv_GW', 'Co':'_sv_GW', 'Ni':'_sv_GW', 'Cu':'_sv_GW', 'Zn':'_sv_GW', 'Ga':'_d_GW', 'Ge':'_d_GW', 'As':'_GW', 'Se':'_GW',\n 'Br':'_GW', 'Kr':'_GW', 'Rb':'_sv_GW', 'Sr':'_sv_GW', 'Y':'_sv_GW', 'Zr':'_sv_GW', 'Nb':'_sv_GW', 'Mo':'_sv_GW', 'Tc':'_sv_GW', 'Ru':'_sv_GW',\n 'Rh':'_sv_GW', 'Pd':'_sv_GW', 'Ag':'_sv_GW', 'Cd':'_sv_GW', 'In':'_d_GW', 'Sn':'_d_GW', 'Sb':'_d_GW', 'Te':'_GW', 'I':'_GW', 'Xe':'_GW', 'Cs':'_sv_GW',\n 'Ba':'_sv_GW', 'La':'_GW',\n 'Lu':'_3', 'Hf':'_sv_GW', 'Ta':'_sv_GW', 'W':'_sv_GW', 'Re':'_sv_GW', 'Os':'_sv_GW', 'Ir':'_sv_GW', 'Pt':'_sv_GW', 'Au':'_sv_GW',\n 'Hg':'_sv_GW', 'Tl':'_d_GW', 'Pb':'_d_GW', 'Bi':'_d_GW', 'Po':'_d_GW', 'At':'_d_GW','Rn':'_d_GW', 'Ac' : '', 'Am' : '', 'Ce' : '_GW', 'Dy' : '_3', 'Er' : '_3' , 'Eu' : '_2',\n 'Gd' : '_3', 'Ho' : '_3', 'Nd' : '_3', 'Np' : '', 'Pa' : '', 'Pm' : '_3', 'Pr' : '_3', 'Pu' : '', 'Ra' : '_sv', 'Sm' : '_3', 'Tb' : '_3', 'Th' : '', 'Tm' : '_3', 'U' : '', 'Yb' : '_2'}\n potdir = '$VSC_DATA_VO/shared/potcars/potpaw_PBE.54'\nf = open('POTCAR','w')\nf.close()\nfor i in range(0,len(args.els)):\n subprocess.call('cat ' + potdir + '/' + args.els[i] + potentials[args.els[i]] + '/POTCAR >> POTCAR', shell=True)\n\n\n" }, { "alpha_fraction": 0.6335597634315491, "alphanum_fraction": 0.6449421644210815, "avg_line_length": 25.96039581298828, "blob_id": "c3a33343f538ee6b2c44cd423bb01e6de348adb6", "content_id": "e24251f681e0fea375029c9223979c55c604c436", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5447, "license_type": "permissive", "max_line_length": 127, "num_lines": 202, "path": "/examples/VASP.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, random, re, shutil, subprocess, sys, time\n\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nimport HighThroughput.io\nfrom HighThroughput.utils.generic import *\nfrom HighThroughput.modules.VASP import *\n\nqid = sys.argv[1]\nsubmit_arg = '' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys.argv[4]\n\nqdir = os.path.join(os.getenv('VSC_SCRATCH'), '/queues/', str(qid))\nos.chdir(os.path.join(qdir, '/fetchid'))\n\nstart = 0\ncounter = 0\n\n#Fetching jobs, ensuring we don't still somehow run the same job twice\nprint('Fetching...')\ncinfo = HT.fetchgetstart(qid)\nprint('Fetched ' + str(cid))\ncfile = cinfo['file']\nstatus = cinfo['stat']\n\nif int(cid) <= 0:\n print('No calculations left')\n #This is the logfile directory (PBS and own logs)\n os.chdir(os.path.join(qdir,'/LOGFILES'))\n #Make a file showing the queue no longer has any feasible jobs left, imperfect, new jobs may be added when others finish\n execute('touch ' + str(qid) + '_fetch0')\n sys.exit()\n\n#Real script starts here\n\ncid = cinfo['id']\n\ncfile = cinfo['file']\nstatus = int(cinfo['stat'])\ninputfile = os.path.join(qdir, '/import/', str(cfile) + '.vasp')\n\nprint('THE STAT is' + str(status))\n\n#temp variable for INCAR for manual mods\nINCAR = cinfo['settings']['INCAR']\n\nif int(qid) in [149,150,155,156]:\n INCAR['ENCUT'] = 600\n\nif detectSP(inputfile):\n INCAR['ISPIN'] = 2\n\nprint('Server: ' + cinfo['server'])\n\n#Setup default 1 point per node etc\n\nprint cinfo['settings']\n\nparallelSetup(cinfo['settings'])\n\nprint cinfo['settings']\n\nINCAR['KPAR'] = str(sys.argv[3])\n\ncinfo['settings']['INCAR'] = INCAR\n\nif status== 1:\n print('STEP 1 started')\n step = 1\n\n os.chdir(qdir+'/CALCULATIONS')\n mkdir(str(cfile))\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile))\n\n mkdir('./STEP1')\n os.chdir('./STEP1')\n\n shutil.copy(qdir + '/import/' + str(cfile) + '.vasp', './POSCAR')\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n\n\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n execute('POTgen ' + str(elements))\n\n poscar.close()\n\n execute('free -m; date; touch CHGCAR WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n\n\n print('END STATUS 1 / STEP 1')\n\nelif status==3:\n print('STEP 3 started')\n\n step = 2\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile))\n if os.path.isdir('./STEP'+str(step)):\n os.rmdir('./STEP'+str(step))\n mkdir('./STEP'+str(step))\n os.chdir('./STEP'+str(step))\n\n shutil.copy('../STEP'+str(step-1)+'/CONTCAR', './POSCAR')\n shutil.copy('../STEP'+str(step-1)+'/POTCAR', './POTCAR')\n shutil.copy('../STEP'+str(step-1)+'/CHGCAR', './CHGCAR')\n #shutil.copy('../STEP'+str(step-1)+'/WAVECAR', './WAVECAR')\n\n\n settings = {'INCAR':INCAR_dict, 'KPOINTS':KPOINTS_dict}\n\n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict)\n HighThroughput.io.VASP.writeINCAR(INCAR_dict)\n\n execute('free -m; date; touch CHGCAR WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp')\n\n execute('free -m; date')\n\n print('END STATUS 3 / STEP 2')\n\nelse:\n print('Not a valid status. Calculation terminated.')\n sys.exit()\n\n\n#UPDATE POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\nsettings['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $4 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n HT.end(cid)\n\n if status<19:\n os.remove('CHG')\n\n results = HT.getResults(cid)\n\n #could leave this out when working with QZP's\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n\n energy = float(energy)\n\n if step == 1:\n results['E0PBE'] = energy\n else:\n results['E0HSE06'] = energy\n results['E0PBE'] = float(execute('grep \\'energy without entropy\\' ../STEP1/OUTCAR | tail -1 | awk \\'{ print $4 }\\''))\n\n\n\n print('Updating results')\n print results\n# updateresults could be assumed from dictionary keys and automated.\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nprint settings\nHT.updateSettings(settings, cid)\n\n\n#RELOOP to the script\n\ncid_new = HT.fetch(qid)\n\nprint('Fetched calculation '+str(cid_new)+' from queue '+str(qid)+'.')\n\nos.chdir(qdir+'/LOGFILES')\n\nif int(cid_new) > 0:\n# print('Script ONCE: do not submit new job')\n #execute('vaspsubmit ' + str(qid) + ' ' + str(submit_arg))\nelse:\n print('No calculations left; end script without submitting new job.')\n execute('touch '+str(qid)+'_'+str(cid)+'_fetch0')\n\nexecute('qstat')\n\n" }, { "alpha_fraction": 0.6406926512718201, "alphanum_fraction": 0.649350643157959, "avg_line_length": 37.5, "blob_id": "598f28eb9424c64dd13be3d02e9f0cdfc945c7d5", "content_id": "d5ef28a99b6775d9ae20e9a21972275a14a9c9e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 693, "license_type": "permissive", "max_line_length": 152, "num_lines": 18, "path": "/HTtools/HTcleanup", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport HighThroughput.manage as HT\nimport os,subprocess,sys\nfrom HighThroughput.communication.mysql import *\n\nserver = os.getenv('VSC_INSTITUTE_CLUSTER')\nqstat = subprocess.Popen('qstat | grep R | awk \\'{print $1}\\' | cut -d. -f1',stdout=subprocess.PIPE,shell=True).communicate()[0].decode().split()\n\nif not isinstance(qstat,str):\n end = ' AND `jobid` NOT IN (' + ','.join(qstat) + ')'\n\n\ncrashes = mysql_query('SELECT `id` FROM `calculations` WHERE `queue` = ' + sys.argv[1] + ' AND `stat` % 2 = 1 AND `server` = \\'' + server + '\\'' + end)\n\nif not isinstance(crashes,str):\n for calc in crashes:\n print(calc['id'])\n HT.calculation.restart(calc['id'])\n" }, { "alpha_fraction": 0.607740581035614, "alphanum_fraction": 0.6391213536262512, "avg_line_length": 31.039106369018555, "blob_id": "be04b7c80108d26675b3b1231a900f678ca8a3bb", "content_id": "1351fe9694d88ec5fc16159dff045daaab846c60", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5736, "license_type": "permissive", "max_line_length": 143, "num_lines": 179, "path": "/examples/DefectScreening/HSEflow.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, random, re, shutil, subprocess, sys, time,math\n\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nfrom HighThroughput.io.VASP import *\nfrom HighThroughput.utils.generic import *\nfrom HighThroughput.modules.VASP import *\nfrom HighThroughput.errors.generic import *\nqid = sys.argv[1]\nsubmit_arg = '' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys.argv[4]\n\nif (int(qid) == 160 or int(qid) == 159 or int(qid) == 158 or int(qid) < 156 or int(qid) > 160) and os.getenv('VSC_INSTITUTE_CLUSTER') == 'muk':\n qdir = os.path.join('/gpfs/scratch/projects/project_gpilot/vsc40479', 'queues', str(qid))\nelif os.getenv('VSC_INSTITUTE_CLUSTER') == 'muk':\n qdir = os.path.join('/gpfs/scratch/users/vsc404/vsc40479', 'queues', str(qid))\nelse:\n qdir = os.path.join('/user/scratch/gent/vsc404/vsc40479', 'queues', str(qid))\n#os.chdir(os.path.join(qdir, '/fetchid'))\n\n\n#Fetching jobs, ensuring we don't still somehow run the same job twice\nprint('Fetching...')\ncinfo = HT.fetchgetstart(qid)\nprint('Fetched ' + str(cinfo['id']))\n\nif int(cinfo['id']) <= 0:\n print('No calculations left')\n #This is the logfile directory (PBS and own logs)\n os.chdir(os.path.join(qdir,'/LOGFILES'))\n #Make a file showing the queue no longer has any feasible jobs left, imperfect, new jobs may be added when others finish\n # execute('touch ' + str(qid) + '_fetch0')\n sys.exit()\n\n#Real script starts here\n\ncfile = cinfo['file']\ncid = cinfo['id']\ncinfo['settings'] = json.loads(cinfo['settings'])\n#cinfo['settings']['INCAR']['ENCUT'] = 1000\n#cinfo['settings']['KPOINTS']['K'] = \"30 30 30\"\nstatus = int(cinfo['stat'])\n\nprint('THE STAT is' + str(status))\n\nprint('Server: ' + cinfo['server'])\n\n\nos.chdir(qdir+'/CALCULATIONS/')\nmkdir(str(cfile))\n\nos.chdir(qdir+'/CALCULATIONS/'+str(cfile))\nstep = int(math.ceil(float(cinfo['stat'])/2))\nprint('step' + str(step))\nmkdir('./STEP' + str(step))\nos.chdir('./STEP' + str(step))\n\nif os.path.isfile('aborted'):\n os.remove('aborted')\n\nif os.path.isfile('STOPCAR'):\n os.remove('STOPCAR')\n\ncheckpointStart(cinfo,10000)\n\nparent = HT.getSettings(cinfo['parent'])\n\nif 'continue' not in parent.keys():\n parent['continue'] = 0\nif 'continued' not in parent.keys():\n parent['continued'] = 0\n\nif int(parent['continue']) > int(parent['continued']):\n print('Continuing job')\n cont(cinfo)\nelse:\n inherit(status,qdir,cfile)\n initialize(cinfo['settings'])\n\nif detectSP('POSCAR'):\n cinfo['settings']['INCAR']['ISPIN'] = 2\n\nif int(qid) in [150,156,171]:\n cinfo['settings']['INCAR']['ENCUT'] = 600\n cinfo['settings']['INCAR']['NELM'] = 60\n #cinfo['settings']['INCAR']['ISPIN'] = 1\nif int(qid) != 171 and int(qid) != 156:\n cinfo['settings']['INCAR']['TIME'] = 0.1\n cinfo['settings']['INCAR']['NELM'] = 60\n\n\nif int(qid) < 155:\n cinfo['settings']['KPOINTS']['K'] = '6 6 6'\n cinfo['settings']['INCAR']['NKRED'] = '6'\n cinfo['settings']['INCAR']['NELM'] = 120\nelse:\n cinfo['settings']['KPOINTS']['K'] = '4 4 4'\n cinfo['settings']['INCAR']['NKRED'] = '4'\n\nif int(qid) == 167 or int(qid) == 171:\n cinfo['settings']['KPOINTS']['K'] = '12 12 12'\n cinfo['settings']['INCAR']['NKRED'] = '2'\ncinfo['settings']['INCAR']['LSUBROT'] = '.TRUE.'\nif int(cfile) == 10025486:\n cinfo['settings']['INCAR']['ALGO'] = 'A'\n cinfo['settings']['INCAR']['NELM'] = 30\n cinfo['settings']['INCAR']['TIME'] = 1.95\nif int(cfile) == 10031153:\n cinfo['settings']['INCAR']['MAGMOM'] = '2*1.0000 2*-1.0000'\n\nif int(cfile) == 10031210:\n cinfo['settings']['INCAR']['MAGMOM'] = '1. 1. -1. -1.'\n\nparallelSetup(cinfo['settings'])\nif os.getenv('VSC_INSTITUTE_CLUSTER') == 'muk':\n cinfo['settings']['INCAR']['NCORE'] = '16'\nperror = HT.getResults(cinfo['parent'])\nif perror.get('errors') != None:\n fixerrors(cinfo)\nwriteSettings(cinfo['settings'])\nrun()\nfinderrors(cinfo)\nif os.path.isfile('STOPCAR'):\n os.remove('STOPCAR')\n\n\n\nprint('END STATUS ' + cinfo['stat'])\n\n\n#UPDATE POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\ncinfo['settings']['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\nif os.path.isfile('aborted'):\n print('Calculation aborted')\n execute('betaHSE ' + str(qid) + ' ' + str(submit_arg))\n sys.exit()\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $4 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n HT.end(cid)\n #cleanup function\n os.remove('CHG')\n\n results = json.loads(cinfo['results'])\n #could leave this out when working with QZP's\n\n energy = float(energy)\n\n if status == 1:\n results['E0PBE'] = energy\n else:\n results['E0HSE06'] = energy\n results['E0PBE'] = float(execute('grep \\'energy without entropy\\' ../STEP1/OUTCAR | tail -1 | awk \\'{ print $4 }\\''))\n\n print('Updating results')\n# updateresults could be assumed from dictionary keys and automated.\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nHT.updateSettings(cinfo['settings'], cid)\nnewcalc = int(HT.fetch(str(qid)))\nprint(str(newcalc))\nif int(HT.fetch(str(qid))) > 0 and int(qid) != 167 and int(qid) != 171:\n execute('betaHSE ' + str(qid) + ' ' + str(submit_arg))\n# -*- coding: utf-8 -*-\n\n" }, { "alpha_fraction": 0.7633135914802551, "alphanum_fraction": 0.7633135914802551, "avg_line_length": 83, "blob_id": "e5db836fcc9f5bdb348bb97c002c9ffae6fe2025", "content_id": "56c72077eb31c59cf50fa513f306ccba76d75782", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 169, "license_type": "permissive", "max_line_length": 88, "num_lines": 2, "path": "/manage/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__ = ['calculation','calculation_profile','material','queue','template','workflow']\nfrom . import calculation,calculation_profile,material,queue,template,workflow\n\n" }, { "alpha_fraction": 0.7888198494911194, "alphanum_fraction": 0.7888198494911194, "avg_line_length": 79, "blob_id": "f9fcaa25902ed13e806d7445302c701990f0643c", "content_id": "6d82d978376aa3e5ae139390a9a074c5300f957b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 161, "license_type": "permissive", "max_line_length": 83, "num_lines": 2, "path": "/errors/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__ = ['generic','genericdetectors','genericfixes','VASPdetectors','VASPfixes']\nfrom . import generic,genericdetectors,genericfixes,VASPdetectors,VASPfixes\n\n" }, { "alpha_fraction": 0.6319218277931213, "alphanum_fraction": 0.6482084393501282, "avg_line_length": 42.78571319580078, "blob_id": "8c28a92c5120b43e4a70c0a7882371b0f5d0e0f5", "content_id": "a3094ec0036fdf020a615d7f9c026176fb9ec39c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 614, "license_type": "permissive", "max_line_length": 89, "num_lines": 14, "path": "/HTtools/HTgather", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os,shutil\n\nif not os.path.isdir('vasprun'):\n os.mkdir('vasprun')\n\nfor dirs in os.listdir(os.curdir):\n if dirs[0] == '1':\n if not os.path.isdir(os.path.join('vasprun',dirs)):\n os.mkdir(os.path.join('vasprun',dirs))\n shutil.copy2(os.path.join(dirs,'STEP4/vasprun.xml'),os.path.join('vasprun',dirs))\n shutil.copy2(os.path.join(dirs,'STEP4/POSCAR'),os.path.join('vasprun',dirs))\n shutil.copy2(os.path.join(dirs,'STEP4/CONTCAR'),os.path.join('vasprun',dirs))\n shutil.copy2(os.path.join(dirs,'STEP4/POTCAR'),os.path.join('vasprun',dirs))\n\n" }, { "alpha_fraction": 0.5720294713973999, "alphanum_fraction": 0.6182965040206909, "avg_line_length": 37.040000915527344, "blob_id": "d569e58706755c886c3bf1873b2e73cd89312960", "content_id": "a88d4caab982d5abb7ac0239bd0fc546b187cafe", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 951, "license_type": "permissive", "max_line_length": 316, "num_lines": 25, "path": "/HTtools/HTpurge", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os,sys\nfrom HighThroughput.communication.mysql import mysql_query\n\ndef clean(fname,qdir):\n cdir = [os.path.join(qdir,'CALCULATIONS',fname,x) for x in ['CALIB/low','RELAX/vol','RELAX/all','EOSL/1.0','EOSL/1.02','EOSL/0.98','EOSL/1.04','EOSL/0.96','EOSL/1.06','EOSL/0.94','RELAX/internalL','CALIB/high','EOSH/1.0','EOSH/1.02','EOSH/0.98','EOSH/1.04','EOSH/0.96','EOSH/1.06','EOSH/0.94','RELAX/internalH']]\n for c in cdir:\n kill = ['CHGCAR','CHGCAR.gz','WAVECAR','WAVECAR.gz','CHG','LOCPOT']\n for k in kill:\n kfile=os.path.join(c,k)\n if os.path.isfile(kfile):\n os.remove(kfile)\n\nqid = sys.argv[1]\n\nuser = os.getenv('USER')\nscratch = os.getenv('VSC_SCRATCH_VO')\nqdir = os.path.join(scratch, 'queues', str(qid))\n\n\ndone = mysql_query('SELECT `file` FROM `calculations` WHERE `queue` = ' + sys.argv[1] + ' and `stat` = 42')\n\nfor row in done:\n clean(row['file'],qdir)\n" }, { "alpha_fraction": 0.5154733061790466, "alphanum_fraction": 0.520024299621582, "avg_line_length": 39.69135665893555, "blob_id": "551ecd79b879bee48649921c79a5401fb26b5458", "content_id": "8cecfef5d98476013d007da540e6abd566208299", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3296, "license_type": "permissive", "max_line_length": 148, "num_lines": 81, "path": "/manage/workflow.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from HighThroughput.communication.mysql import *\nimport os\n\n#Could again make this global or a class\ndef get(workflow, stat = None):\n extra = ''\n if stat != None:\n extra = ' AND `stat` = ' + str(stat)\n return mysql_query('SELECT * FROM `workflows` WHERE `id` = ' + str(workflow) + extra + ' ORDER BY `stat`')\n\ndef showAll():\n return mysql_query('SELECT * FROM `workflows`')\n\ndef add(name,entries):\n owner = mysql_query('')\n newid = mysql_query('SELECT MAX(`id`) AS `newid` FROM `workflows` WHERE 1=1 OR 1=1')\n newid = int(newid['newid']) + 1\n for entry in entries:\n if('id' not in entry.keys()):\n entry['id'] = newid\n fields = '`owner`, '\n values = owner + ', '\n for key in entry.keys():\n fields += '`' + str(key) + '`'\n if not str(entry[key]).isdigit():\n values += '\\'' + str(entry[key]) + '\\''\n else:\n values += str(entry[key])\n fields += ', '\n values += ', '\n result = mysql_query('INSERT INTO `workflows` (`name`, ' + str(fields[:-2]) + ') VALUES (\\'' + str(name) + '\\', ' + str(values[:-2]) + ')')\n #if(int(result) > 0):\n print('Added workflow entry ' + str(entry['stat']) + ' for workflow ' + str(name) + ' (' + str(entry['id']) + ')')\n # else:\n # print 'Adding workflow failed (contact Michael)'\n return entry['id']\n\ndef modify(params):\n query = 'UPDATE `workflows` SET '\n for key in params.keys():\n if key != 'id' and key != 'stat':\n query += '`' + key + '` ='\n if not params[key].isdigit():\n query += '\\'' + params[key] + '\\''\n else:\n query += params[key]\n query += ', '\n query = query[:-2] + ' WHERE `id` = ' + str(params['id']) + ' AND `stat` = ' + str(params['stat'])\n result = mysql_query(query)\n if (result == '1'):\n print('The workflow has been modified. Please verify.')\n else:\n print('Help... Me...')\n return int(result)\n \ndef remove(wid, stat):\n wid = str(wid)\n stat = str(stat)\n name = mysql_query('SELECT `name` FROM `workflows` WHERE `id` = ' + wid + ' LIMIT 1')\n result = mysql_query('DELETE FROM `workflows` WHERE `id` = ' + wid + ' AND `stat` = ' + stat)\n if (result == '1'):\n print('Status ' + stat + ' of the ' + name['name'] + '(' + wid + ') workflow has been removed.')\n else:\n print('Removing the ' + stat + ' stat of ' + name['name'] + '(' + wid + ') has failed.')\n return int(result)\n\ndef removeAll(wid):\n wid = str(wid)\n name = mysql_query('SELECT `name` FROM `workflows` WHERE `id` = ' + wid + ' LIMIT 1')\n result = mysql_query('DELETE FROM `workflows` WHERE `id` = ' + wid)\n if (int(result) > 0):\n print('The ' + name['name'] + ' (' + wid + ') workflow has been removed.')\n else:\n print('Removing ' + name['name'] + ' (' + wid + ') has failed.')\n\ndef setPriority(priority,wid,stat):\n if(str(priority).isdigit()):\n return int(mysql_query('UPDATE `workflows` SET `priority` = ' + str(priority) + ' WHERE `id` = ' + str(wid) + ' AND `stat` = ' + str(stat)))\n else:\n print('Priorities are number, the higher the number the higher the priority')\n return 0\n" }, { "alpha_fraction": 0.7314410209655762, "alphanum_fraction": 0.7314410209655762, "avg_line_length": 64.28571319580078, "blob_id": "8db5aaa0bbf096ec5690d9bf92b236ca666a494e", "content_id": "6aa958a08bb578c8f43c08510a37de16bdc02177", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 458, "license_type": "permissive", "max_line_length": 226, "num_lines": 7, "path": "/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os\nfrom sys import exit\nif not os.path.isfile(os.path.join(os.getenv('HOME'),'.highthroughput')):\n exit('Please log in to http://physics.epotentia.com/queue/ (request an account from Michael if you don\\'t have one) then paste the contents of http://physics.epotentia.com/queue/HT.php in ~/.highthroughput and try again.')\n\n__all__ = ['communication', 'manage', 'io','modules','utils','errors']\n#import communication,manage,io,modules,utils,errors\n\n" }, { "alpha_fraction": 0.5227499008178711, "alphanum_fraction": 0.5347060561180115, "avg_line_length": 32.411109924316406, "blob_id": "974899dde44fc25af7b2ae260d6003ef8ecfcb78", "content_id": "744e7e20a5512286bac7110532c7781e34295502", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3011, "license_type": "permissive", "max_line_length": 108, "num_lines": 90, "path": "/io/VASP.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os\n\ndef readINCAR(directory=None):\n if directory == None:\n directory = os.getcwd()\n template = dict()\n INCAR = open(os.path.join(directory,'INCAR'),'r') \n\n for line in INCAR:\n settings = line.split('=')\n if len(settings) < 2:\n continue\n settings[1] = settings[1].split('!')\n template[settings[0].strip()] = settings[1][0].strip()\n\n INCAR.close()\n return template\n\ndef writeINCAR(template,directory=None):\n print(template)\n if directory == None:\n directory = os.getcwd()\n INCAR = open(os.path.join(directory,'INCAR'),'w')\n\n for key,value in sorted(template.items()):\n INCAR.write(str(key) + ' = ' + str(value) + '\\n')\n\n return INCAR.close()\n\ndef readKPOINTS(directory=None):\n if directory == None:\n directory = os.getcwd()\n template = dict()\n KPOINTS = open(os.path.join(directory,'KPOINTS'),'r') \n \n kfile = list(filter(None,map(lambda s: s.strip(), KPOINTS.readlines())))\n \n if int(kfile[1]) != 0:\n template['file'] = '\\n'.join(kfile)\n else:\n template['comment'] = kfile[0]\n template['mode'] = kfile[2][0].upper()\n if template['mode'] == 'A':\n template['L'] = int(kfile[3])\n else:\n #k = kfile[3].split()\n #template['kx'] = int(k[0])\n #template['ky'] = int(k[1])\n #template['kz'] = int(k[2])\n template['K'] = kfile[3]\n template['shift'] = kfile[4]\n return template\n\ndef writeKPOINTS(template,directory=None):\n if directory == None:\n directory = os.getcwd()\n KPOINTS = open(os.path.join(directory,'KPOINTS'),'w')\n modedict = {'A' : 'Auto', 'M' : 'Monkhorst Pack', 'G' : 'Gamma'}\n if not template['mode']:\n kfile = str(template['file'])\n else:\n kfile = str(template['comment']) + '\\n0\\n' + modedict[str(template['mode'])] + '\\n'\n\n if template['mode'] == 'A':\n kfile += str(template['L'])\n else:\n kfile += str(template['K']) + '\\n' + str(template['shift'])\n\n KPOINTS.write(kfile)\n return KPOINTS.close()\n \ndef rescalePOSCAR(poscar,scale=1.0,directory=None):\n if directory == None:\n directory = os.getcwd()\n \n with open(os.path.join(directory,poscar),'r+') as oldpos:\n current = oldpos.readlines()\n if isinstance(scale,float) or isinstance(scale,int) or isinstance(scale,str):\n if float(scale) < 0:\n current[1] = str(scale) + '\\n'\n else:\n current[1] = str(float(scale)**(1/3)*float(current[1].strip())) + '\\n'\n elif len(scale) == 3:\n for i in range(2,5):\n newlat = [str(float(x)*float(scale[i-2])) for x in current[i].strip().split(' ') if x != '']\n newlat = [x if abs(x) > 1e-5 else 0.0 for x in newlat]\n current[i] = ' '.join(newlat) + '\\n'\n oldpos.seek(0)\n oldpos.write(''.join(current))\n oldpos.truncate()\n " }, { "alpha_fraction": 0.5526315569877625, "alphanum_fraction": 0.5526315569877625, "avg_line_length": 18, "blob_id": "fac20468b441f445abcbac1cfee42406cc15dbd8", "content_id": "26f70f43b084a83e4eca47e426bf891de4a69a71", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 38, "license_type": "permissive", "max_line_length": 18, "num_lines": 2, "path": "/modules/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__ = ['VASP']\nfrom . import VASP\n" }, { "alpha_fraction": 0.559043824672699, "alphanum_fraction": 0.5688446164131165, "avg_line_length": 33.38356018066406, "blob_id": "267b728503f003f65e61ed28ff48a02edabd79c7", "content_id": "60791098ace1194dc464ee6d6af39e7407088fcc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12550, "license_type": "permissive", "max_line_length": 173, "num_lines": 365, "path": "/errors/VASPdetectors.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import subprocess, os, json\nfrom HighThroughput.manage.calculation import getResults, getSettings, updateResults\nfrom HighThroughput.modules.VASP import gather, eosRollback\nfrom HighThroughput.utils.generic import execute\nimport numpy as np\nfrom pymatgen.io.vasp.outputs import Vasprun\nfrom pymatgen.electronic_structure.core import Spin, Orbital\nimport xml\nimport shutil\n\ndef currenterror(calc):\n if len(calc['cerrors']) > 0:\n print('Skipping current step due to detected errors.')\n return True\n\ndef test(calc):\n print('SEARCHING FOR ERRORS')\n det = int(\n subprocess.Popen('grep WAAAAAAAAAGH tempout | wc -l', shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if det > 0:\n print('Error detected.')\n return True\n else:\n return False\n\n\ndef maxSteps(calc):\n # Slow, bad or no convergence\n nsteps = subprocess.Popen(\n 'grep -e \" .* .* .* .* .* .*\" OSZICAR | grep : | grep -v vasp | awk \\'{print $2}\\' | tail -n 1',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode().strip()\n\n if nsteps.isdigit():\n nsteps = int(nsteps)\n else:\n return False\n if 'NELM' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['NELM'] = 60\n\n print(str(nsteps) + ' performed of ' + str(calc['settings']['INCAR']['NELM']) + ' allowed steps')\n\n if nsteps == int(calc['settings']['INCAR']['NELM']):\n print('Error detected.')\n return True\n else:\n return False\n\ndef maxIonicSteps(calc):\n # Slow, bad or no convergence\n nsteps = subprocess.Popen(\n 'grep F= tempout | awk \\'{print $1}\\' | tail -n 1',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode().strip()\n\n if nsteps.isdigit():\n nsteps = int(nsteps)\n else:\n return False\n if 'NSW' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['NSW'] = 0\n\n if nsteps == 0:\n return False\n if 'IBRION' not in calc['settings']['INCAR'].keys():\n return False\n elif int(calc['settings']['INCAR']['IBRION']) <= 0:\n return False\n\n print(str(nsteps) + ' performed of ' + str(calc['settings']['INCAR']['NSW']) + ' allowed ionic steps')\n\n if nsteps == int(calc['settings']['INCAR']['NSW']):\n print('Error detected.')\n return True\n else:\n return False\n\n\n\ndef gradNotOrth(calc):\n # Corrupted CHGCAR, POTCAR or optimizer/lib issue in VASP\n detected = int(\n subprocess.Popen('fgrep \"EDWAV: internal error, the gradient is not orthogonal\" tempout | wc -l', shell=True,\n stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef ZHEGV(calc):\n # Davidson failing\n detected = int(subprocess.Popen('fgrep \"Error EDDDAV: Call to ZHEGV failed.\" tempout | wc -l', shell=True,stdout=subprocess.PIPE).communicate()[0].decode().strip())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef subSpace(calc):\n # Davidson failing more\n detected = int(subprocess.Popen('fgrep \"Sub-Space-Matrix is not hermitian in DAV\" tempout | wc -l', shell=True,stdout=subprocess.PIPE).communicate()[0].decode().strip())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef planeWaveCoeff(calc):\n # Grid/basis set/whatnot changed\n detected = int(\n subprocess.Popen('fgrep \"ERROR: while reading WAVECAR, plane wave coefficients changed\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef corruptWAVECAR(calc):\n # Grid/basis set/whatnot changed\n detected = int(\n subprocess.Popen('fgrep \"ERROR: while reading eigenvalues from WAVECAR\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef SGRCON(calc):\n # Grid/basis set/whatnot changed\n detected = int(\n subprocess.Popen('fgrep \"VERY BAD NEWS! internal error in subroutine SGRCON\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode()) + int(\n subprocess.Popen('fgrep \"SYMPREC\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef ZPOTRF(calc):\n # Grid/basis set/whatnot changed\n detected = int(subprocess.Popen('fgrep \"LAPACK: Routine ZPOTRF failed!\" tempout | wc -l', shell=True,\n stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\n\ndef PSSYEVX(calc):\n # Grid/basis set/whatnot changed\n detected = int(\n subprocess.Popen('fgrep \"ERROR in subspace rotation PSSYEVX: not enough eigenvalues found\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef SYMPREC(calc):\n # error with symmetry, currently not used, combined with SGRCON\n detected = int(\n subprocess.Popen('fgrep \"SYMPREC\" tempout | wc -l',\n shell=True, stdout=subprocess.PIPE).communicate()[0].decode())\n if detected > 0:\n print('Error detected.')\n return True\n else:\n return False\n\ndef energyMissing(calc):\n # Energy cannot be extracted from OUTCAR\n # Is not in de db\n energy = int(\n subprocess.Popen('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $8 }\\'', shell=True,\n stdout=subprocess.PIPE).communicate()[0].decode()).strip()\n if energy == '' or not 'energy' in locals():\n print('Error detected.')\n return True\n else:\n return False\n\n\ndef chgMissing(calc):\n if not os.path.isfile('CHGCAR') or not os.path.isfile('CHG'):\n print('Error detected.')\n return True\n else:\n return False\n\n\ndef smearErr(calc):\n if int(calc['settings']['INCAR']['ISMEAR']) != 1:\n return False\n\n if currenterror(calc):\n return False\n\n if '9' in calc['cerrors']:\n print('Skipping sigma check due to smearing change.')\n return False\n\n psettings = getSettings(calc['parent'])\n results = gather({'natoms': 0, 'smearerr': 0})\n if float(results['smearerr']) > 0.001 * float(results['natoms']):\n print('Detected a smearing error of size: ' + str(results['smearerr']))\n return True\n else:\n return False\n\n\ndef wrongSmear(calc):\n if os.path.isfile('aborted'):\n return False\n\n if currenterror(calc):\n return False\n\n try:\n vasprun = Vasprun('vasprun.xml')\n except xml.etree.ElementTree.ParseError as e:\n shutil.copy2('vasprun.xml','error.vasprun.xml')\n raise ValueError\n\n index = np.argmin(np.abs(vasprun.idos.energies - vasprun.efermi))\n index01 = np.argmin(np.abs(vasprun.idos.energies - vasprun.efermi - 0.1))\n dos = np.sum([int(float(x) < 0.5) for x in vasprun.tdos.densities[Spin.up][index:index01]])\n diff = vasprun.idos.densities[Spin.up][index01] - vasprun.idos.densities[Spin.up][index]\n print(diff)\n print(dos)\n psettings = getSettings(calc['parent'])\n smearing = calc['settings']['INCAR']['ISMEAR']\n if 'continue' in psettings.keys():\n doubt = int(psettings['continue'])\n print(doubt,smearing)\n if (doubt > 5) and smearing == 1:\n return True\n elif (doubt > 5) and smearing == 0:\n return False\n # Could check if Gamma is in the k-mesh to estimate the odds of a missed BG crossing\n if (smearing == 0 and diff - 0.5 > -1e-3) or (smearing == 1 and diff - 0.5 < -1e-3 and dos > 0.):\n print('Wrong smearing detected.')\n # if (smearing == 0 and diff <= 0.03) or (smearing == 1 and BG > 0.03):\n return True\n else:\n return False\n\ndef checkSpin(calc):\n #needs lorbit option\n if currenterror(calc):\n return False\n if ('spincheck' not in calc['results'].keys()) or (calc['settings']['INCAR']['ISPIN'] == 1):\n return False\n\n magmom = gather({'magmom' : 0})['magmom']\n\n if magmom < calc['results']['spincheck']:\n print('Unnecessary spin detected.')\n return True\n return False\n\ndef notConverged(calc):\n if os.path.isfile('aborted'):\n return False\n\n if len(calc['cerrors']) > 0:\n print('Skipping convergence step due to detected errors.')\n return False\n\n presults = getResults(calc['parent'])\n error = False\n\n if 'convergence' not in presults.keys():\n return False\n else:\n # \"convergence\": [[\"Ehull\", [\"K\", 0.01, [], 0]]] format, could add more than two els to each tuple to determine how to increase the settings and so on\n new = []\n for propset in presults['convergence']:\n total = len(propset)\n prop = propset[0]\n pnew = (prop,)\n for i in range(1, total):\n (crit, cond, current, converged) = propset[i]\n if converged == 1:\n propset[i][-1] = 1\n pnew += (tuple(propset[i]),)\n #print('converged?', pnew)\n continue;\n print('Checking ' + prop + ' convergence ' + ' with respect to ' + crit + '.')\n\n newval = gather({prop: ''})[prop]\n\n current.append(newval)\n\n if len(current) == 1:\n error = True\n else:\n\n delta = np.abs(current[-1] - current[-2])\n\n if delta > cond:\n print('Not converged. Remaining error of ' + str(delta) + ' on ' + prop + '.')\n error = True\n else:\n print('Property ' + prop + ' is converged up to ' + str(delta) + '.')\n if crit == 'K':\n presults['settingsmod']['KPOINTS']['K'] = ' '.join(\n [str(int(x) - 2) for x in presults['settingsmod']['KPOINTS']['K'].split(' ')])\n elif crit == 'ENCUT':\n presults['settingsmod']['INCAR']['ENCUT'] -= 100\n converged = 1\n pnew += ((crit, cond, current, converged),)\n new.append(pnew)\n # presults['convergence'] = json.dumps(new).translate(str.maketrans({\"'\": r\"\\'\"}))\n updateResults(presults, calc['parent'])\n return error\n\ndef eosCheck(calc):\n if 'eoscheck' not in calc['results'].keys():\n return False\n\n eos = gather({'eos' : {}})['eos']\n crit = calc['results']['eoscheck']\n error = False\n\n if np.iscomplex(eos.values()).any():\n error = True\n if error:\n print('EOS Check: Complex number returned by fit.')\n return error\n else:\n print('No complex values:')\n print(eos)\n\n if crit['res'] is not None:\n if eos['res'] > crit['res']:\n print('EOS Check: Residual test failed with value 1-r^2 = ' + str(eos['res']) + ' for material ' + calc['file'])\n error = True\n\n if crit['B0'] is not None:\n if (eos['B0'] < crit['B0'][0]) or (eos['B0'] > crit['B0'][1]):\n print('EOS Check: Bulk modulus test failed with value B0 = ' + str(eos['B0']) + ' for material ' + calc['file'])\n error = True\n\n if crit['BP'] is not None:\n if (eos['BP'] < crit['BP'][0]) or (eos['BP'] > crit['BP'][1]):\n print('EOS Check: Bulk modulus derivative test failed with value BP = ' + str(eos['BP']) + ' for material ' + calc['file'])\n error = True\n\n if crit['V0'] is not None:\n if isinstance(eos['V0'], np.float64):\n if (eos['V0'] < crit['V0'][0]) or (eos['V0'] > crit['V0'][1]):\n print('EOS Check: Volume test failed with value V0 = ' + str(eos['V0']) + ' for material ' + calc['file'])\n error = True\n else:\n error = True\n\n if error == True:\n eosRollback(calc)\n\n return error\n" }, { "alpha_fraction": 0.6056337952613831, "alphanum_fraction": 0.6056337952613831, "avg_line_length": 34, "blob_id": "e2dc6d554f27780688176de134ee7dfbe54c8bbe", "content_id": "f4729830c59830beb402ef75c4e8e4d0bd94fb83", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 71, "license_type": "permissive", "max_line_length": 35, "num_lines": 2, "path": "/io/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__ = ['CIF','VASP','Gaussian']\nfrom . import CIF, VASP, Gaussian\n\n" }, { "alpha_fraction": 0.5313190817832947, "alphanum_fraction": 0.5405305624008179, "avg_line_length": 29.795454025268555, "blob_id": "5d9665aa023d63fff4faede5abc6bec1a5ff4987", "content_id": "e487f2747e49b88e77d2ab9eac2978789382f6cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2714, "license_type": "permissive", "max_line_length": 104, "num_lines": 88, "path": "/ML/features/elements.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Jul 26 15:28:56 2018\n\n@author: Michiel\n\"\"\"\n\n# -*- coding: utf-8 -*-\nfrom HighThroughput.communication.mysql import mysql_query\nimport pandas as pd\n\ndef addElemental_old(df,features):\n\n ncols = len([1 for feat in list(df) if feat.find('el')==0])\n els = set([])\n\n for i in range(ncols):\n els = els.union(set(df['el' + str(i)].unique()))\n \n allfeats = ['atomicnumber','symbol','mass','Ecoh','n','s','p','d','V','r','EN','EA','IP']\n\n atominfo = mysql_query('SELECT * FROM `elements` WHERE `symbol` IN (\\'' + '\\',\\''.join(els) + '\\')')\n atomdict = {}\n \n for row in atominfo:\n for f in allfeats:\n if f not in atomdict:\n atomdict[f] = []\n atomdict[f].append(row[f])\n \n atoms = pd.DataFrame(atomdict,columns=allfeats)\n data = {}\n\n for col in features:\n for i in range(ncols):\n data[col + str(i)] = [0 for x in range(0,len(df))]\n \n new = pd.DataFrame(data,index=df.index)\n \n df = pd.concat([df,new], axis=1)\n\n for (ind,material) in df.iterrows():\n for i in range(ncols):\n atominfo = atoms.loc[atoms['symbol'] == material['el' + str(i)]]\n for col in features:\n df.loc[ind,col + str(i)] = float(atominfo[col].values[0])\n \n return df\n\ndef addElemental(df,features):\n ncols = len([1 for feat in list(df) if feat.find('el')==0])\n els = set([])\n\n for i in range(ncols):\n els = els.union(set(df['el' + str(i)].unique()))\n \n allfeats = ['atomicnumber','symbol','mass','Ecoh','n','s','p','d','V','r','EN','EA','IP']\n\n atominfo = mysql_query('SELECT * FROM `elements` WHERE `symbol` IN (\\'' + '\\',\\''.join(els) + '\\')')\n atomdict = {}\n \n for row in atominfo:\n for f in allfeats:\n if f not in atomdict:\n atomdict[f] = []\n atomdict[f].append(row[f])\n \n atoms = pd.DataFrame(atomdict,columns=allfeats)\n data = [[0 for i in range(ncols*len(features))] for j in range(len(df))]\n\n index_df = pd.Index(df.index)\n \n column_names = [\"name\" for i in range(ncols*len(features))]\n \n for (ind,material) in df.iterrows():\n for i in range(ncols):\n atominfo = atoms.loc[atoms['symbol'] == material['el' + str(i)]]\n row_loc = index_df.get_loc(ind)\n for ind_feat, col in enumerate(features):\n col_loc = i+ncols*ind_feat\n column_names[col_loc] = col+str(i)\n data[row_loc][col_loc] = float(atominfo[col].values[0])\n\n new = pd.DataFrame(data,index=df.index, columns = column_names)\n \n df = pd.concat([df,new], axis=1)\n \n return df \n" }, { "alpha_fraction": 0.6494845151901245, "alphanum_fraction": 0.6597937941551208, "avg_line_length": 47.5, "blob_id": "a6ddb50079a7b94d1d2fbf3a2108bd794a57989c", "content_id": "e5be48fa9c0c3c0ece3cdc644fdfecdb6470c017", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 485, "license_type": "permissive", "max_line_length": 174, "num_lines": 10, "path": "/HTtools/HTremaining", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport HighThroughput as HT\nimport os,subprocess,sys\nfrom HighThroughput.communication.mysql import *\n\nwf = mysql_query('SELECT MAX(`stat`) as m FROM `workflows` WHERE `id` = (SELECT `workflow` FROM `queues` WHERE `id` = ' + sys.argv[1] + ')')\nmaxstat = wf['m']\n\nrunning = mysql_query('SELECT COUNT(`file`) AS `count` FROM `calculations` WHERE `queue` = ' + sys.argv[1] + ' AND `stat`< ' + maxstat + ' AND `stat` % 2 = 0 AND `leaf` = 1')\nprint(running['count'])\n" }, { "alpha_fraction": 0.6883116960525513, "alphanum_fraction": 0.7402597665786743, "avg_line_length": 12, "blob_id": "3d84536330a8f7a48ebe12fd576ff54e8b515486", "content_id": "652bfcdc2ae4e55271e79415fd92dbc51faabd5f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 77, "license_type": "permissive", "max_line_length": 21, "num_lines": 6, "path": "/HTtools/HTqsetup", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nmkdir $1\nmkdir $1/CALCULATIONS\nmkdir $1/LOGFILES\nmkdir $1/import" }, { "alpha_fraction": 0.5740740895271301, "alphanum_fraction": 0.6083052754402161, "avg_line_length": 31.399999618530273, "blob_id": "6edc8be6d8189226784eb73d0860a529b7fb68ae", "content_id": "3a9b5ed0c0736b4e1d2b3d2a09f6602724a9e0ab", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1782, "license_type": "permissive", "max_line_length": 162, "num_lines": 55, "path": "/HTtools/HTembed", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys,subprocess\n\nsmall = subprocess.Popen('cat ' + sys.argv[1] + ' | aconvasp --shift 0.5 0.5 0.5 d | aconvasp --cart',shell=True,stdout=subprocess.PIPE).communicate()[0].decode()\nlarge = subprocess.Popen('cat ' + sys.argv[2] + ' | aconvasp --shift 0.5 0.5 0.5 d | aconvasp --cart',shell=True,stdout=subprocess.PIPE).communicate()[0].decode()\ndef parse(poscar):\n coords = []\n lines = poscar.split('\\n')\n a = lines[1].strip().split(' ')[0]\n i = 0\n marker = 9999999\n natoms = 9999999\n for line in lines:\n if i > 0 and line.strip() == 'Ge':\n marker = i\n if i == marker + 1:\n natoms = sum([int(x) for x in line.split()])\n if i > marker + 2:\n temp = line.split()[0:3]\n coords.append(temp)\n i+=1\n if len(coords) == natoms:\n break\n return a, coords, poscar, marker,natoms\nasmall, csmall,poscarsmall,markersmall,natomssmall = parse(small)\nalarge,clarge, poscarlarge,markerlarge,natomslarge = parse(large)\n\nds = []\ndl = []\nfor c in csmall:\n ds.append(sum([(float(x)-float(asmall)/2)**2 for x in c]))\n\nfor c in clarge:\n dl.append(sum([(float(x)-float(asmall)/2)**2 for x in c]))\n\ncsmalls = [c for (d,c) in sorted(zip(ds,csmall))]\nclarges = [c for (d,c) in sorted(zip(dl,clarge))]\n\ndiff = int(sys.argv[3]) \nwhile diff > 0:\n clarges.insert(0, [0.,0.,0.])\n diff -= 1\nwhile diff < 0:\n clarges.pop(0)\n diff += 1\n\ndiff = int(sys.argv[3])\n#possible overlap same distance depending on sort\nind = 32+diff\nclarges[0:ind] = csmalls[0:ind]\nlines = poscarlarge.split('\\n')\nclines = [' '.join(c) for c in clarges]\nlines[(markerlarge+3):(markerlarge+4+natomslarge)] = clines\nlines[6] = str(int(natomslarge) + int(diff))\nprint('\\n'.join(lines))\n" }, { "alpha_fraction": 0.5311059951782227, "alphanum_fraction": 0.6069124341011047, "avg_line_length": 36.739131927490234, "blob_id": "f6f2d5f034595da24046e6d4695c9e99fabce415", "content_id": "3ba9e076cfc802f4f10d0047d03e480acf137540", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4340, "license_type": "permissive", "max_line_length": 458, "num_lines": 115, "path": "/HTtools/HTupdate", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport shutil, os, sys, re\nimport subprocess\nfrom subprocess import Popen, PIPE\nfrom sys import argv\n\nimport ase.io\n\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.io\nfrom HighThroughput.utils.generic import execute\nfrom HighThroughput.communication.mysql import mysql_query \nimport json\nimport time\nimport random\n\nfrom numpy.linalg import norm\nfrom numpy import dot, arccos, degrees, floor, prod\n\n#UPDATE POTCAR INFO\n\nfolder = os.getcwd().split('/')[-1]\nstatmap = {'0.94' : 2,'0.96' : 4,'0.98' : 6, '1.0' : 8, '1.02' : 10,'1.04' : 12,'1.06' : 14,'STEP4' : 16, 'DOS' : 18, 'BANDS' : 20}\n\nstat = statmap[folder]\n\ncid = mysql_query('SELECT `id` FROM `calculations` WHERE `queue` = ' + str(sys.argv[1]) + ' AND `file` = ' + str(sys.argv[2]) + ' AND `stat` IN (' + str(stat) + ', ' + str(stat - 1) + ')')\ncid = cid['id']\nsettings = {'INCAR': HighThroughput.io.VASP.readINCAR(), 'KPOINTS' : HighThroughput.io.VASP.readKPOINTS()}\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\nsettings['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n #HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n print('This is the HT.end cid: ' + str(cid))\n\n ended = 0\n tries = 0\n while(ended == 0 and tries < 10):\n #ended = HT.end(cid)\n ended = 1\n tries = tries + 1\n #time.sleep(random.randint(10,100))\n if(ended == 0):\n print('ERROR: I tried to end calculation ' + str(cid) + str(tries) + ' times, but no succes.') #tries should always be 10\n #HT.updateResults({'error':'Ending calculation failed'}, cid)\n\n else:\n #if status<15:\n #os.remove('CHGCAR')\n #os.remove('CHG')\n results = HT.getResults(cid)\n\n #could leave this out when working with QZP's\n\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n\n energy = float(energy)\n\n Eatom = energy/numberofatoms \n results['Eatom'] = Eatom\n\n atoms = lines[5].split()\n\n energies = {'Te':-3.142188083,'K':-1.04693431,'Tl':-2.2266554,'Se':-3.498123137,'Rb':-0.936014755,'Sb':-4.13566371,'P':-5.373717185,'Bi':-3.885356122,'Po':-3.07473254,'Al':-3.745478105,'Ca':-1.929739603,'In':-2.55987617,'Sn':-3.846317905,'Ga':-2.905926696,'Mg':-1.506391565,'Na':-1.311801313,'Ba':-1.90827009,'Sr':-1.636048335,'Cs':-0.852268485,'S':-4.125889916,'Si':-5.424861565,'Ge':-4.51831862,'Pb':-3.565225973,'As':-4.66985772,'Li':-1.910459733}\n\n\n if len(atoms)==1:\n results['Eformation'] = Eatom - float(energies[str(atoms[0])])\n elif len(atoms)==4: \n results['Eformation'] = Eatom - float(6*energies[str(atoms[0])] + energies[str(atoms[1])] + 4*energies[str(atoms[2])] + 3*energies[str(atoms[3])])/14\n\n results['volume'] = float(Popen('grep volume OUTCAR | tail -1 | awk \\'{print $5}\\' ', stdout=PIPE, shell=True).communicate()[0])\n\n crystal = ase.io.read('CONTCAR')\n cell = crystal.get_cell()\n \n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3); \n\n results['a'] = na\n results['b'] = nb\n results['c'] = nc\n results['alpha'] = round(degrees(arccos(dot(b,c)/nb/nc)),1)\n results['beta'] = round(degrees(arccos(dot(c,a)/nc/na)),1)\n results['gamma'] = round(degrees(arccos(dot(a,b)/na/nb)),1)\n \n print('Updating results')\n print(results)\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nprint(settings)\nHT.updateSettings(settings, cid)\n" }, { "alpha_fraction": 0.5134167671203613, "alphanum_fraction": 0.5262318849563599, "avg_line_length": 34.25299835205078, "blob_id": "5d5cebe788463667ff9e4495856593c5b795206c", "content_id": "d0186fdc8d72cc5e362ab06f9a83033dd31dba15", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 33242, "license_type": "permissive", "max_line_length": 102, "num_lines": 917, "path": "/ML/models/mf_kriging.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Mon Jul 9 11:00:11 2018\r\n\r\n@author: Michiel\r\n\"\"\"\r\n\r\n\"\"\"\r\nIntegrates the Multi-Fidelity Co-Kriging method described in [LeGratiet2013].\r\n\r\n(Author: Remi Vauclin [email protected])\r\n\r\nThis code was implemented using the package scikit-learn as basis.\r\n(Author: Vincent Dubourg, [email protected])\r\n\r\nOpenMDAO adaptation. Regression and correlation functions were directly copied\r\nfrom scikit-learn package here to avoid scikit-learn dependency.\r\n(Author: Remi Lafage, [email protected])\r\n\r\nISAE/DMSM - ONERA/DCPS\r\n\"\"\"\r\n\r\nimport numpy as np\r\nfrom numpy import atleast_2d as array2d\r\n\r\nfrom scipy import linalg\r\nfrom scipy.optimize import minimize\r\nfrom scipy.spatial.distance import squareform, pdist, cdist\r\nfrom scipy.special import kv, gamma\r\n#from openmdao.surrogate_models.surrogate_model import MultiFiSurrogateModel\r\n\r\nimport logging\r\n_logger = logging.getLogger()\r\n\r\nMACHINE_EPSILON = np.finfo(np.double).eps # machine precision\r\nNUGGET = 10. * MACHINE_EPSILON # nugget for robustness\r\n\r\nINITIAL_RANGE_DEFAULT = 0.3 # initial range for optimizer\r\nTOLERANCE_DEFAULT = 1e-6 # stopping criterion for MLE optimization\r\n\r\nTHETA0_DEFAULT = 0.5\r\nTHETAL_DEFAULT = 1e-5\r\nTHETAU_DEFAULT = 50\r\n\r\nif hasattr(linalg, 'solve_triangular'):\r\n # only in scipy since 0.9\r\n solve_triangular = linalg.solve_triangular\r\nelse:\r\n # slower, but works\r\n def solve_triangular(x, y, lower=True):\r\n \"\"\"Solve triangular.\"\"\"\r\n return linalg.solve(x, y)\r\n\r\ndef constant_regression(x):\r\n \"\"\"\r\n Zero order polynomial (constant, p = 1) regression model.\r\n\r\n x --> f(x) = 1\r\n \"\"\"\r\n x = np.asarray(x, dtype=np.float)\r\n n_eval = x.shape[0]\r\n f = np.ones([n_eval, 1])\r\n return f\r\n\r\ndef linear_regression(x):\r\n \"\"\"\r\n First order polynomial (linear, p = n+1) regression model.\r\n\r\n x --> f(x) = [ 1, x_1, ..., x_n ].T\r\n \"\"\"\r\n x = np.asarray(x, dtype=np.float)\r\n n_eval = x.shape[0]\r\n f = np.hstack([np.ones([n_eval, 1]), x])\r\n return f\r\n\r\ndef matern(theta, X, nu = 3/2, Y = None):\r\n if Y is None:\r\n if not np.isinf(nu):\r\n dists = squareform(pdist(X/theta, metric='euclidean'))\r\n else:\r\n dists = squareform(pdist(X/theta, metric='sqeuclidean'))\r\n else:\r\n if not np.isinf(nu):\r\n dists = cdist(X / theta, Y / theta, metric='euclidean')\r\n else:\r\n dists = cdist(X/theta, Y/theta, metric='sqeuclidean')\r\n if nu == 0.5:\r\n K = np.exp(-dists)\r\n elif nu == 1.5:\r\n K = dists * np.sqrt(3)\r\n K = (1. + K) * np.exp(-K)\r\n elif nu == 2.5:\r\n K = dists * np.sqrt(5)\r\n K = (1. + K + K ** 2 / 3.0) * np.exp(-K)\r\n elif np.isinf(nu):\r\n K = np.exp(-.5 * dists)\r\n else: # general case; expensive to evaluate\r\n K = dists\r\n K[K == 0.0] += np.finfo(float).eps # strict zeros result in nan\r\n tmp = (np.sqrt(2 * nu) * K)\r\n K.fill((2 ** (1. - nu)) / gamma(nu))\r\n K *= tmp ** nu\r\n K *= kv(nu, tmp)\r\n return K\r\n\r\ndef squared_exponential_correlation(theta, d):\r\n \"\"\"\r\n Squared exponential correlation model (Radial Basis Function).\r\n\r\n (Infinitely differentiable stochastic process, very smooth)::\r\n\r\n n\r\n theta, dx --> r(theta, dx) = exp( sum - theta_i * (dx_i)^2 )\r\n i = 1\r\n\r\n Parameters\r\n ----------\r\n theta: array_like\r\n An array with shape 1 (isotropic) or n (anisotropic) giving the\r\n autocorrelation parameter(s).\r\n\r\n dx: array_like\r\n An array with shape (n_eval, n_features) giving the componentwise\r\n distances between locations x and x' at which the correlation model\r\n should be evaluated.\r\n\r\n Returns\r\n -------\r\n r: array_like\r\n An array with shape (n_eval, ) containing the values of the\r\n autocorrelation model.\r\n \"\"\"\r\n theta = np.asarray(theta, dtype=np.float)\r\n d = np.asarray(d, dtype=np.float)\r\n\r\n if d.ndim > 1:\r\n n_features = d.shape[1]\r\n else:\r\n n_features = 1\r\n\r\n if theta.size == 1:\r\n return np.exp(-theta[0] * np.sum(d ** 2, axis=1))\r\n elif theta.size != n_features:\r\n raise ValueError(\"Length of theta must be 1 or %s\" % n_features)\r\n else:\r\n return np.exp(-np.sum(theta.reshape(1, n_features) * d ** 2, axis=1))\r\n\r\n\r\ndef l1_cross_distances(X, Y=None):\r\n \"\"\"\r\n Compute the nonzero componentwise L1 cross-distances between the vectors in X and Y.\r\n\r\n Parameters\r\n ----------\r\n X: array_like\r\n An array with shape (n_samples_X, n_features)\r\n\r\n Y: array_like\r\n An array with shape (n_samples_Y, n_features)\r\n\r\n Returns\r\n -------\r\n D: array with shape (n_samples * (n_samples - 1) / 2, n_features)\r\n The array of componentwise L1 cross-distances.\r\n\r\n \"\"\"\r\n if Y is None:\r\n X = array2d(X)\r\n n_samples, n_features = X.shape\r\n n_nonzero_cross_dist = n_samples * (n_samples - 1) // 2\r\n D = np.zeros((n_nonzero_cross_dist, n_features))\r\n ll_1 = 0\r\n for k in range(n_samples - 1):\r\n ll_0 = ll_1\r\n ll_1 = ll_0 + n_samples - k - 1\r\n D[ll_0:ll_1] = np.abs(X[k] - X[(k + 1):])\r\n else:\r\n X = array2d(X)\r\n Y = array2d(Y)\r\n n_samples_X, n_features_X = X.shape\r\n n_samples_Y, n_features_Y = Y.shape\r\n if n_features_X != n_features_Y:\r\n raise ValueError(\"X and Y must have the same dimensions.\")\r\n n_features = n_features_X\r\n\r\n n_nonzero_cross_dist = n_samples_X * n_samples_Y\r\n D = np.zeros((n_nonzero_cross_dist, n_features))\r\n ll_1 = 0\r\n for k in range(n_samples_X):\r\n ll_0 = ll_1\r\n ll_1 = ll_0 + n_samples_Y # - k - 1\r\n D[ll_0:ll_1] = np.abs(X[k] - Y)\r\n\r\n return D\r\n\r\n\r\nclass MultiFiCoKriging(object):\r\n \"\"\"\r\n Integrate the Multi-Fidelity Co-Kriging method described in [LeGratiet2013].\r\n \"\"\"\r\n\r\n _regression_types = {\r\n 'constant': constant_regression,\r\n 'linear': linear_regression\r\n }\r\n\r\n def __init__(self, regr='constant', rho_regr='constant',\r\n theta=None, theta0=None, thetaL=None, thetaU=None):\r\n \"\"\"\r\n Initialize all attributes.\r\n\r\n Parameters\r\n ----------\r\n regr: string or callable, optional\r\n A regression function returning an array of outputs of the linear\r\n regression functional basis for Universal Kriging purpose.\r\n regr is assumed to be the same for all levels of code.\r\n Default assumes a simple constant regression trend.\r\n Available built-in regression models are:\r\n 'constant', 'linear'\r\n\r\n rho_regr: string or callable, optional\r\n A regression function returning an array of outputs of the linear\r\n regression functional basis. Defines the regression function for the\r\n autoregressive parameter rho.\r\n rho_regr is assumed to be the same for all levels of code.\r\n Default assumes a simple constant regression trend.\r\n Available built-in regression models are:\r\n 'constant', 'linear'\r\n\r\n theta: double, array_like or list, optional\r\n Value of correlation parameters if they are known; no optimization is run.\r\n Default is None, so that optimization is run.\r\n if double: value is replicated for all features and all levels.\r\n if array_like: an array with shape (n_features, ) for\r\n isotropic calculation. It is replicated for all levels.\r\n if list: a list of nlevel arrays specifying value for each level\r\n\r\n theta0: double, array_like or list, optional\r\n Starting point for the maximum likelihood estimation of the\r\n best set of parameters.\r\n Default is None and meaning use of the default 0.5*np.ones(n_features)\r\n if double: value is replicated for all features and all levels.\r\n if array_like: an array with shape (n_features, ) for\r\n isotropic calculation. It is replicated for all levels.\r\n if list: a list of nlevel arrays specifying value for each level\r\n\r\n thetaL: double, array_like or list, optional\r\n Lower bound on the autocorrelation parameters for maximum\r\n likelihood estimation.\r\n Default is None meaning use of the default 1e-5*np.ones(n_features).\r\n if double: value is replicated for all features and all levels.\r\n if array_like: An array with shape matching theta0's. It is replicated\r\n for all levels of code.\r\n if list: a list of nlevel arrays specifying value for each level\r\n\r\n thetaU: double, array_like or list, optional\r\n Upper bound on the autocorrelation parameters for maximum\r\n likelihood estimation.\r\n Default is None meaning use of default value 50*np.ones(n_features).\r\n if double: value is replicated for all features and all levels.\r\n if array_like: An array with shape matching theta0's. It is replicated\r\n for all levels of code.\r\n if list: a list of nlevel arrays specifying value for each level\r\n\r\n\r\n Attributes\r\n ----------\r\n `theta`: list\r\n Specified theta for each level OR the best set of autocorrelation parameters\r\n (the sought maximizer of the reduced likelihood function).\r\n\r\n `rlf_value`: list\r\n The optimal negative concentrated reduced likelihood function value\r\n for each level.\r\n\r\n\r\n Examples\r\n --------\r\n >>> from openmdao.surrogate_models.multifi_cokriging import MultiFiCoKriging\r\n >>> import numpy as np\r\n >>> # Xe: DOE for expensive code (nested in Xc)\r\n >>> # Xc: DOE for cheap code\r\n >>> # ye: expensive response\r\n >>> # yc: cheap response\r\n >>> Xe = np.array([[0],[0.4],[1]])\r\n >>> Xc = np.vstack((np.array([[0.1],[0.2],[0.3],[0.5],[0.6],[0.7],[0.8],[0.9]]),Xe))\r\n >>> ye = ((Xe*6-2)**2)*np.sin((Xe*6-2)*2)\r\n >>> yc = 0.5*((Xc*6-2)**2)*np.sin((Xc*6-2)*2)+(Xc-0.5)*10. - 5\r\n >>> model = MultiFiCoKriging(theta0=1, thetaL=1e-5, thetaU=50.)\r\n >>> model.fit([Xc, Xe], [yc, ye])\r\n >>> # Prediction on x=0.05\r\n >>> np.abs(float(model.predict([0.05])[0])- ((0.05*6-2)**2)*np.sin((0.05*6-2)*2)) < 0.05\r\n True\r\n\r\n\r\n Notes\r\n -----\r\n Implementation is based on the Package Scikit-Learn\r\n (Author: Vincent Dubourg, [email protected]) which translates\r\n the DACE Matlab toolbox, see [NLNS2002]_.\r\n\r\n\r\n References\r\n ----------\r\n .. [NLNS2002] H. B. Nielsen, S. N. Lophaven, and J. Sondergaard.\r\n `DACE - A MATLAB Kriging Toolbox.` (2002)\r\n http://www2.imm.dtu.dk/~hbn/dace/dace.pdf\r\n\r\n .. [WBSWM1992] W. J. Welch, R. J. Buck, J. Sacks, H. P. Wynn, T. J. Mitchell,\r\n and M. D. Morris (1992). \"Screening, predicting, and computer experiments.\"\r\n `Technometrics,` 34(1) 15--25.\r\n http://www.jstor.org/pss/1269548\r\n\r\n .. [LeGratiet2013] L. Le Gratiet (2013). \"Multi-fidelity Gaussian process\r\n regression for computer experiments.\"\r\n PhD thesis, Universite Paris-Diderot-Paris VII.\r\n\r\n .. [TBKH2011] Toal, D. J., Bressloff, N. W., Keane, A. J., & Holden, C. M. E. (2011).\r\n \"The development of a hybridized particle swarm for kriging hyperparameter\r\n tuning.\" `Engineering optimization`, 43(6), 675-699.\r\n \"\"\"\r\n# self.corr = squared_exponential_correlation\r\n self.corr = matern\r\n self.nu = np.inf\r\n self.regr = regr\r\n self.rho_regr = rho_regr\r\n self.theta = theta\r\n self.theta0 = theta0\r\n self.thetaL = thetaL\r\n self.thetaU = thetaU\r\n\r\n self._nfev = 0\r\n\r\n\r\n def _build_R(self, lvl, theta):\r\n \"\"\"\r\n Build the correlation matrix with given theta for the specified level.\r\n \"\"\"\r\n n_samples = self.n_samples[lvl]\r\n R = np.eye(n_samples) * (1. + NUGGET)\r\n corr = self.corr(theta, self.X[lvl], nu = self.nu)\r\n R = R + corr\r\n\r\n return R\r\n\r\n def fit(self, X, y,\r\n initial_range=INITIAL_RANGE_DEFAULT, tol=TOLERANCE_DEFAULT):\r\n \"\"\"\r\n Implement the Multi-Fidelity co-kriging model fitting method.\r\n\r\n Parameters\r\n ----------\r\n X: list of double array_like elements\r\n A list of arrays with the input at which observations were made, from lowest\r\n fidelity to highest fidelity. Designs must be nested\r\n with X[i] = np.vstack([..., X[i+1])\r\n\r\n y: list of double array_like elements\r\n A list of arrays with the observations of the scalar output to be predicted,\r\n from lowest fidelity to highest fidelity.\r\n\r\n initial_range: float\r\n Initial range for the optimizer.\r\n\r\n tol: float\r\n Optimizer terminates when the tolerance tol is reached.\r\n\r\n \"\"\"\r\n # Run input checks\r\n # Transforms floats and arrays in lists to have a multifidelity\r\n # structure\r\n self._check_list_structure(X, y)\r\n # Checks if all parameters are structured as required\r\n self._check_params()\r\n\r\n X = self.X\r\n y = self.y\r\n nlevel = self.nlevel\r\n n_samples = self.n_samples\r\n\r\n # initialize lists\r\n self.beta = nlevel * [0]\r\n self.beta_rho = nlevel * [None]\r\n self.beta_regr = nlevel * [None]\r\n self.C = nlevel * [0]\r\n self.D = nlevel * [0]\r\n self.F = nlevel * [0]\r\n self.p = nlevel * [0]\r\n self.q = nlevel * [0]\r\n self.G = nlevel * [0]\r\n self.sigma2 = nlevel * [0]\r\n self._R_adj = nlevel * [None]\r\n\r\n y_best = y[nlevel - 1]\r\n for i in range(nlevel - 1)[::-1]:\r\n y_best = np.concatenate((y[i][:-n_samples[i + 1]], y_best))\r\n self.y_best = y_best\r\n\r\n self.y_mean = np.zeros(1)\r\n self.y_std = np.ones(1)\r\n self.X_mean = np.zeros(1)\r\n self.X_std = np.ones(1)\r\n\r\n for lvl in range(nlevel):\r\n\r\n # Calculate matrix of distances D between samples\r\n \r\n# if (np.min(np.sum(self.D[lvl], axis=1)) == 0.):\r\n# raise Exception(\"Multiple input features cannot have the same\"\r\n# \" value.\")\r\n\r\n # Regression matrix and parameters\r\n self.F[lvl] = self.regr(X[lvl])\r\n self.p[lvl] = self.F[lvl].shape[1]\r\n\r\n # Concatenate the autoregressive part for levels > 0\r\n if lvl > 0:\r\n F_rho = self.rho_regr(X[lvl])\r\n self.q[lvl] = F_rho.shape[1]\r\n self.F[lvl] = np.hstack((F_rho * np.dot((self.y[lvl - 1])[-n_samples[lvl]:],\r\n np.ones((1, self.q[lvl]))), self.F[lvl]))\r\n else:\r\n self.q[lvl] = 0\r\n\r\n n_samples_F_i = self.F[lvl].shape[0]\r\n\r\n if n_samples_F_i != n_samples[lvl]:\r\n raise Exception(\"Number of rows in F and X do not match. Most \"\r\n \"likely something is going wrong with the \"\r\n \"regression model.\")\r\n\r\n if int(self.p[lvl] + self.q[lvl]) >= n_samples_F_i:\r\n raise Exception((\"Ordinary least squares problem is undetermined \"\r\n \"n_samples=%d must be greater than the regression\"\r\n \" model size p+q=%d.\")\r\n % (n_samples[i], self.p[lvl] + self.q[lvl]))\r\n\r\n # Set attributes\r\n self.X = X\r\n self.y = y\r\n\r\n self.rlf_value = np.zeros(nlevel)\r\n\r\n for lvl in range(nlevel):\r\n # Determine Gaussian Process model parameters\r\n if self.theta[lvl] is None:\r\n # Maximum Likelihood Estimation of the parameters\r\n sol = self._max_rlf(\r\n lvl=lvl, initial_range=initial_range, tol=tol)\r\n self.theta[lvl] = sol['theta']\r\n self.rlf_value[lvl] = sol['rlf_value']\r\n\r\n if np.isinf(self.rlf_value[lvl]):\r\n raise Exception(\"Bad parameter region. \"\r\n \"Try increasing upper bound\")\r\n else:\r\n self.rlf_value[lvl] = self.rlf(lvl=lvl)\r\n if np.isinf(self.rlf_value[lvl]):\r\n raise Exception(\"Bad point. Try increasing theta0.\")\r\n\r\n return\r\n\r\n def rlf(self, lvl, theta=None):\r\n \"\"\"\r\n Determine BLUP parameters and evaluate negative reduced likelihood function for theta.\r\n\r\n Maximizing this function wrt the autocorrelation parameters theta is\r\n equivalent to maximizing the likelihood of the assumed joint Gaussian\r\n distribution of the observations y evaluated onto the design of\r\n experiments X.\r\n\r\n Parameters\r\n ----------\r\n self: Multi-Fidelity Co-Kriging object\r\n\r\n lvl: Integer\r\n Level of fidelity\r\n\r\n theta: array_like, optional\r\n An array containing the autocorrelation parameters at which the\r\n Gaussian Process model parameters should be determined.\r\n Default uses the built-in autocorrelation parameters\r\n (ie ``theta = self.theta``).\r\n\r\n Returns\r\n -------\r\n rlf_value: double\r\n The value of the negative concentrated reduced likelihood function\r\n associated to the given autocorrelation parameters theta.\r\n \"\"\"\r\n if theta is None:\r\n # Use built-in autocorrelation parameters\r\n theta = self.theta[lvl]\r\n\r\n # Initialize output\r\n rlf_value = 1e20\r\n\r\n # Retrieve data\r\n n_samples = self.n_samples[lvl]\r\n y = self.y[lvl]\r\n F = self.F[lvl]\r\n p = self.p[lvl]\r\n q = self.q[lvl]\r\n\r\n R = self._build_R(lvl, theta)\r\n\r\n try:\r\n C = linalg.cholesky(R, lower=True)\r\n except linalg.LinAlgError:\r\n _logger.warning(('Cholesky decomposition of R at level %i failed' % lvl) +\r\n ' with theta=' + str(theta))\r\n return rlf_value\r\n\r\n # Get generalized least squares solution\r\n Ft = solve_triangular(C, F, lower=True)\r\n Yt = solve_triangular(C, y, lower=True)\r\n try:\r\n Q, G = linalg.qr(Ft, econ=True)\r\n except TypeError: # qr() got an unexpected keyword argument 'econ'\r\n # DeprecationWarning: qr econ argument will be removed after scipy\r\n # 0.7. The economy transform will then be available through the\r\n # mode='economic' argument.\r\n Q, G = linalg.qr(Ft, mode='economic')\r\n pass\r\n\r\n # Universal Kriging\r\n beta = solve_triangular(G, np.dot(Q.T, Yt))\r\n\r\n err = Yt - np.dot(Ft, beta)\r\n err2 = np.dot(err.T, err)[0, 0]\r\n self._err = err\r\n sigma2 = err2 / (n_samples - p - q)\r\n detR = ((np.diag(C))**(2. / n_samples)).prod()\r\n\r\n rlf_value = (n_samples - p - q) * np.log10(sigma2) \\\r\n + n_samples * np.log10(detR)\r\n\r\n self.beta_rho[lvl] = beta[:q]\r\n self.beta_regr[lvl] = beta[q:]\r\n self.beta[lvl] = beta\r\n self.sigma2[lvl] = sigma2\r\n self.C[lvl] = C\r\n self.G[lvl] = G\r\n\r\n return rlf_value\r\n\r\n\r\n def _max_rlf(self, lvl, initial_range, tol):\r\n \"\"\"\r\n Estimate autocorrelation parameter theta as maximizer of the reduced likelihood function.\r\n\r\n (Minimization of the negative reduced likelihood function is used for convenience.)\r\n\r\n Parameters\r\n ----------\r\n self: Most parameters are stored in the Gaussian Process model object.\r\n\r\n lvl: integer\r\n Level of fidelity\r\n\r\n initial_range: float\r\n Initial range of the optimizer\r\n\r\n tol: float\r\n Optimizer terminates when the tolerance tol is reached.\r\n\r\n Returns\r\n -------\r\n optimal_theta: array_like\r\n optimal_rlf_value: double\r\n The optimal negative reduced likelihood function value.\r\n\r\n res: dict\r\n res['theta']: optimal theta\r\n res['rlf_value']: optimal value for likelihood\r\n \"\"\"\r\n # Initialize input\r\n thetaL = self.thetaL[lvl]\r\n thetaU = self.thetaU[lvl]\r\n\r\n def rlf_transform(x):\r\n return self.rlf(theta=10.**x, lvl=lvl)\r\n\r\n # Use specified starting point as first guess\r\n theta0 = self.theta0[lvl]\r\n x0 = np.log10(theta0[0])\r\n\r\n constraints = []\r\n for i in range(theta0.size):\r\n constraints.append({'type': 'ineq', 'fun': lambda log10t, i=i:\r\n log10t[i] - np.log10(thetaL[0][i])})\r\n constraints.append({'type': 'ineq', 'fun': lambda log10t, i=i:\r\n np.log10(thetaU[0][i]) - log10t[i]})\r\n\r\n constraints = tuple(constraints)\r\n sol = minimize(rlf_transform, x0, method='COBYLA',\r\n constraints=constraints,\r\n options={'rhobeg': initial_range,\r\n 'tol': tol, 'disp': 0})\r\n\r\n log10_optimal_x = sol['x']\r\n optimal_rlf_value = sol['fun']\r\n self._nfev += sol['nfev']\r\n\r\n optimal_theta = 10. ** log10_optimal_x\r\n\r\n res = {}\r\n res['theta'] = optimal_theta\r\n res['rlf_value'] = optimal_rlf_value\r\n\r\n return res\r\n\r\n def predict(self, X, eval_MSE=True):\r\n \"\"\"\r\n Perform the predictions of the kriging model on X.\r\n\r\n Parameters\r\n ----------\r\n X: array_like\r\n An array with shape (n_eval, n_features) giving the point(s) at\r\n which the prediction(s) should be made.\r\n\r\n eval_MSE: boolean, optional\r\n A boolean specifying whether the Mean Squared Error should be\r\n evaluated or not. Default assumes evalMSE is True.\r\n\r\n Returns\r\n -------\r\n y: array_like\r\n An array with shape (n_eval, ) with the Best Linear Unbiased\r\n Prediction at X. If all_levels is set to True, an array\r\n with shape (n_eval, nlevel) giving the BLUP for all levels.\r\n\r\n MSE: array_like, optional (if eval_MSE is True)\r\n An array with shape (n_eval, ) with the Mean Squared Error at X.\r\n If all_levels is set to True, an array with shape (n_eval, nlevel)\r\n giving the MSE for all levels.\r\n \"\"\"\r\n X = array2d(X)\r\n nlevel = self.nlevel\r\n n_eval, n_features_X = X.shape\r\n\r\n # Calculate kriging mean and variance at level 0\r\n mu = np.zeros((n_eval, nlevel))\r\n\r\n f = self.regr(X)\r\n f0 = self.regr(X)\r\n# dx = l1_cross_distances(X, Y=self.X[0])\r\n \r\n # Get regression function and correlation\r\n F = self.F[0]\r\n C = self.C[0]\r\n\r\n beta = self.beta[0]\r\n Ft = solve_triangular(C, F, lower=True)\r\n yt = solve_triangular(C, self.y[0], lower=True)\r\n r_ = self.corr(self.theta[0], X, nu = self.nu, Y=self.X[0]).reshape(n_eval, self.n_samples[0])\r\n gamma = solve_triangular(C.T, yt - np.dot(Ft, beta), lower=False)\r\n\r\n # Scaled predictor\r\n mu[:, 0] = (np.dot(f, beta) + np.dot(r_, gamma)).ravel()\r\n\r\n if eval_MSE:\r\n self.sigma2_rho = nlevel * [None]\r\n MSE = np.zeros((n_eval, nlevel))\r\n r_t = solve_triangular(C, r_.T, lower=True)\r\n G = self.G[0]\r\n\r\n u_ = solve_triangular(G.T, f.T - np.dot(Ft.T, r_t), lower=True)\r\n MSE[:, 0] = self.sigma2[0] * \\\r\n (1 - (r_t**2).sum(axis=0) + (u_**2).sum(axis=0))\r\n\r\n # Calculate recursively kriging mean and variance at level i\r\n for i in range(1, nlevel):\r\n C = self.C[i]\r\n F = self.F[i]\r\n g = self.rho_regr(X)\r\n# dx = l1_cross_distances(X, Y=self.X[i])\r\n# r_ = self.corr(self.theta[i], dx).reshape(\r\n# n_eval, self.n_samples[i])\r\n r_ = self.corr(self.theta[i], X, nu = self.nu, Y=self.X[i]).reshape(\r\n n_eval, self.n_samples[i])\r\n f = np.vstack((g.T * mu[:, i - 1], f0.T))\r\n\r\n Ft = solve_triangular(C, F, lower=True)\r\n yt = solve_triangular(C, self.y[i], lower=True)\r\n r_t = solve_triangular(C, r_.T, lower=True)\r\n G = self.G[i]\r\n beta = self.beta[i]\r\n\r\n # scaled predictor\r\n mu[:, i] = (np.dot(f.T, beta)\r\n + np.dot(r_t.T, yt - np.dot(Ft, beta))).ravel()\r\n\r\n if eval_MSE:\r\n Q_ = (np.dot((yt - np.dot(Ft, beta)).T,\r\n yt - np.dot(Ft, beta)))[0, 0]\r\n u_ = solve_triangular(G.T, f - np.dot(Ft.T, r_t), lower=True)\r\n sigma2_rho = np.dot(g,\r\n self.sigma2[\r\n i] * linalg.inv(np.dot(G.T, G))[:self.q[i], :self.q[i]]\r\n + np.dot(beta[:self.q[i]], beta[:self.q[i]].T))\r\n sigma2_rho = (sigma2_rho * g).sum(axis=1)\r\n\r\n MSE[:, i] = sigma2_rho * MSE[:, i - 1] \\\r\n + Q_ / (2 * (self.n_samples[i] - self.p[i] - self.q[i])) \\\r\n * (1 - (r_t**2).sum(axis=0)) \\\r\n + self.sigma2[i] * (u_**2).sum(axis=0)\r\n\r\n # scaled predictor\r\n for i in range(nlevel): # Predictor\r\n mu[:, i] = self.y_mean + self.y_std * mu[:, i]\r\n if eval_MSE:\r\n MSE[:, i] = np.sqrt(self.y_std**2 * MSE[:, i])\r\n if eval_MSE:\r\n return mu[:, -1].reshape((n_eval, 1)), MSE[:, -1].reshape((n_eval, 1))\r\n else:\r\n return mu[:, -1].reshape((n_eval, 1))\r\n\r\n\r\n def _check_list_structure(self, X, y):\r\n \"\"\"\r\n Check list structure.\r\n\r\n Parameters\r\n ----------\r\n x : list\r\n\r\n y : list\r\n \"\"\"\r\n if type(X) is not list:\r\n nlevel = 1\r\n X = [X]\r\n else:\r\n nlevel = len(X)\r\n\r\n if type(y) is not list:\r\n y = [y]\r\n\r\n if len(X) != len(y):\r\n raise ValueError(\"X and y must have the same length.\")\r\n\r\n n_samples = np.zeros(nlevel, dtype=int)\r\n n_features = np.zeros(nlevel, dtype=int)\r\n n_samples_y = np.zeros(nlevel, dtype=int)\r\n for i in range(nlevel):\r\n n_samples[i], n_features[i] = X[i].shape\r\n if i > 1 and n_features[i] != n_features[i - 1]:\r\n raise ValueError(\"All X must have the same number of columns.\")\r\n y[i] = np.asarray(y[i]).ravel()[:, np.newaxis]\r\n n_samples_y[i] = y[i].shape[0]\r\n if n_samples[i] != n_samples_y[i]:\r\n raise ValueError(\"X and y must have the same number of rows.\")\r\n\r\n self.n_features = n_features[0]\r\n\r\n if type(self.theta) is not list:\r\n self.theta = nlevel * [self.theta]\r\n elif len(self.theta) != nlevel:\r\n raise ValueError(\"theta must be a list of %d element(s).\" % nlevel)\r\n\r\n if type(self.theta0) is not list:\r\n self.theta0 = nlevel * [self.theta0]\r\n elif len(self.theta0) != nlevel:\r\n raise ValueError(\"theta0 must be a list of %d elements.\" % nlevel)\r\n\r\n if type(self.thetaL) is not list:\r\n self.thetaL = nlevel * [self.thetaL]\r\n elif len(self.thetaL) != nlevel:\r\n raise ValueError(\"thetaL must be a list of %d elements.\" % nlevel)\r\n\r\n if type(self.thetaU) is not list:\r\n self.thetaU = nlevel * [self.thetaU]\r\n elif len(self.thetaU) != nlevel:\r\n raise ValueError(\"thetaU must be a list of %d elements.\" % nlevel)\r\n\r\n self.nlevel = nlevel\r\n self.X = X[:]\r\n self.y = y[:]\r\n self.n_samples = n_samples\r\n\r\n return\r\n\r\n def _check_params(self):\r\n\r\n # Check regression model\r\n if not callable(self.regr):\r\n if self.regr in self._regression_types:\r\n self.regr = self._regression_types[self.regr]\r\n else:\r\n raise ValueError(\"regr should be one of %s or callable, \"\r\n \"%s was given.\"\r\n % (self._regression_types.keys(), self.regr))\r\n\r\n # Check rho regression model\r\n if not callable(self.rho_regr):\r\n if self.rho_regr in self._regression_types:\r\n self.rho_regr = self._regression_types[self.rho_regr]\r\n else:\r\n raise ValueError(\"rho_regr should be one of %s or callable, \"\r\n \"%s was given.\"\r\n % (self._regression_types.keys(), self.rho_regr))\r\n\r\n for i in range(self.nlevel):\r\n # Check correlation parameters\r\n if self.theta[i] is not None:\r\n self.theta[i] = array2d(self.theta[i])\r\n if np.any(self.theta[i] <= 0):\r\n raise ValueError(\"theta0 must be strictly positive.\")\r\n\r\n if self.theta0[i] is not None:\r\n self.theta0[i] = array2d(self.theta0[i])\r\n if np.any(self.theta0[i] <= 0):\r\n raise ValueError(\"theta0 must be strictly positive.\")\r\n else:\r\n self.theta0[i] = array2d(self.n_features * [THETA0_DEFAULT])\r\n\r\n lth = self.theta0[i].size\r\n\r\n if self.thetaL[i] is not None:\r\n self.thetaL[i] = array2d(self.thetaL[i])\r\n if self.thetaL[i].size != lth:\r\n raise ValueError(\"theta0 and thetaL must have the \"\r\n \"same length.\")\r\n else:\r\n self.thetaL[i] = array2d(self.n_features * [THETAL_DEFAULT])\r\n\r\n if self.thetaU[i] is not None:\r\n self.thetaU[i] = array2d(self.thetaU[i])\r\n if self.thetaU[i].size != lth:\r\n raise ValueError(\"theta0 and thetaU must have the \"\r\n \"same length.\")\r\n else:\r\n self.thetaU[i] = array2d(self.n_features * [THETAU_DEFAULT])\r\n\r\n if np.any(self.thetaL[i] <= 0) or np.any(self.thetaU[i] < self.thetaL[i]):\r\n raise ValueError(\"The bounds must satisfy O < thetaL <= \"\r\n \"thetaU.\")\r\n\r\n return\r\n\r\n\r\n#class MultiFiCoKrigingSurrogate(MultiFiSurrogateModel):\r\n# \"\"\"\r\n# OpenMDAO adapter of multi-fidelity recursive cokriging method described in [LeGratiet2013].\r\n#\r\n# See MultiFiCoKriging class.\r\n# \"\"\"\r\n#\r\n#\r\n# def __init__(self, regr='constant', rho_regr='constant',\r\n# theta=None, theta0=None, thetaL=None, thetaU=None,\r\n# tolerance=TOLERANCE_DEFAULT, initial_range=INITIAL_RANGE_DEFAULT):\r\n# \"\"\"\r\n# Initialize all attributes.\r\n# \"\"\"\r\n# super(MultiFiCoKrigingSurrogate, self).__init__()\r\n#\r\n# self.tolerance = tolerance\r\n# self.initial_range = initial_range\r\n# self.model = MultiFiCoKriging(regr=regr, rho_regr=rho_regr, theta=theta,\r\n# theta0=theta0, thetaL=thetaL, thetaU=thetaU)\r\n#\r\n# def predict(self, new_x):\r\n# \"\"\"\r\n# Calculate a predicted value of the response based on the current trained model.\r\n# \"\"\"\r\n# Y_pred, MSE = self.model.predict([new_x])\r\n# return Y_pred, np.sqrt(np.abs(MSE))\r\n#\r\n# def train_multifi(self, X, Y):\r\n# \"\"\"\r\n# Train the surrogate model with the given set of inputs and outputs.\r\n# \"\"\"\r\n# X, Y = self._fit_adapter(X, Y)\r\n# self.model.fit(X, Y, tol=self.tolerance,\r\n# initial_range=self.initial_range)\r\n#\r\n#\r\n# def _fit_adapter(self, X, Y):\r\n# \"\"\"\r\n# Manage special case with one fidelity.\r\n#\r\n# where can be called as [[xval1],[xval2]] instead of [[[xval1],[xval2]]]\r\n# we detect if shape(X[0]) is like (m,) instead of (m, n)\r\n# \"\"\"\r\n# if len(np.shape(np.array(X[0]))) == 1:\r\n# X = [X]\r\n# Y = [Y]\r\n#\r\n# X = [np.array(x) for x in reversed(X)]\r\n# Y = [np.array(y) for y in reversed(Y)]\r\n# return (X, Y)\r\n#\r\n#\r\n#class FloatMultiFiCoKrigingSurrogate(MultiFiCoKrigingSurrogate):\r\n# \"\"\"\r\n# Predictions are returned as the mean of the NormalDistribution predicted by base class model.\r\n# \"\"\"\r\n# def predict(self, new_x):\r\n# \"\"\"\r\n# Calculate a predicted value of the response based on the current trained model.\r\n#\r\n# Parameters\r\n# ----------\r\n# new_x : array-like\r\n# Point(s) at which the surrogate is evaluated.\r\n# \"\"\"\r\n# dist = super(FloatMultiFiCoKrigingSurrogate, self).predict(new_x)\r\n# return dist.mu\r\n#\r\n#\r\n#\r\n#if __name__ == \"__main__\":\r\n# import doctest\r\n# doctest.testmod()" }, { "alpha_fraction": 0.6897424459457397, "alphanum_fraction": 0.6971862316131592, "avg_line_length": 60.063636779785156, "blob_id": "12974e80e2f227401daa6358e975127d4c2d2368", "content_id": "94922d75b93078a17a2a0a3f559b08d7745758b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6717, "license_type": "permissive", "max_line_length": 533, "num_lines": 110, "path": "/demo.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import HighThroughput.manage\nfrom HighThroughput.communication.mysql import mysql_query, owner\nfrom time import sleep\nimport sys\ndef run():\n input('Welcome to the alpha test demo. This script will show you the main functionalities of the Queue Manager, once done please look inside the script to understand the various functions used. It is recommended to look through the case study in the manual beforehand. This script will reproduce what was shown there. \\n\\n To start please login to http://physics.epotentia.com/queue/ (only for the demo) then press any key to continue.')\n print('\\n\\nLogging in'),\n\n #Yes this is cheap, I know\n for i in range(1,4):\n sleep(1)\n sys.stdout.flush()\n print('.',)\n\n #These are deep level MySQL functions, you are discouraged from using them, but if you need to you can (ask me). All MySQL functions are supported, including statistical functions. The underlying library here tries to automatically limit your access to only your data. For this it does pattern recognition on your queries, very advanced queries may conflict with this. The owner variable is automatically set and contains your user id.\n name = mysql_query('SELECT `name` FROM `accounts` WHERE `id` = ' + owner)\n print('\\n\\n Welcome ' + name['name'] + ', are you ready to begin?')\n input()\n\n print('Great! We will start by setting up a settings and results template.')\n \n settings = {'Temperature' : 180, 'time' : 20}\n print('\\n\\nThe settings dictionary: ')\n print(settings)\n settingsid = HighThroughput.manage.template.add('Baking settings',settings,'VASP','settings')\n \n results = {'Crispiness' : '', 'Weight' : 0}\n print('\\n\\nThe results dictionary: ')\n print(results)\n resultsid = HighThroughput.manage.template.add('Baking results',results,'VASP','results')\n \n input('\\n\\nWe now have two templates ready to use in our workflow. Ready to add workflow?')\n \n name = 'Baking'\n button = ['Ingr. ready', 'Mixing', 'Mixed', 'Kneading', 'Kneaded', 'Rising', 'Risen', 'Baking', 'Finished']\n description = ['Ingredients ready', 'Mixing [server]', 'Mixed', 'Kneading [server]', 'Kneaded', 'Rising [server]', 'Risen', 'Baking [settings:Temperature]', 'Finished [results:weight]']\n buttonclass = ['warning','info','warning','info','warning','info','warning','info','success']\n entries = [{'stat' : i, 'stemplate' : settingsid, 'rtemplate' : resultsid, 'description' : description[i], 'buttonname' : button[i], 'buttonclass' : buttonclass[i], 'priority' : 1} for i in range(0,9)]\n print('')\n print(entries)\n print('')\n wid = HighThroughput.manage.workflow.add(name,entries)\n\n input('\\n\\nThe Baking workflow is ready. Ready to add queue?\\n')\n \n #Doesn't conserve order, I should fix that... Can use string instead a:b,c:d,...\n #fields = {'ID' : 'id', 'Info' : 'text', 'Status' : 'stat', 'Start' : 'start', 'End' : 'end'}\n fields = 'ID:id,Info:text,Status:stat,Start:start,End:end'\n qid = HighThroughput.manage.queue.add('Jan de Bakker, Inc.', wid, fields)\n\n input('\\n\\nPlease go to the website and click Queues => View queues. Currently the website does not refresh automatically when new calculations are added, but you can just click the tab again. (Updates are autorefreshed (for quick updates you can set the refresh rate to 3000ms, going lower may be overkill.)\\n\\nIngredients would be added as materials normally, but to avoid unnecessary fake file creation we\\'ll base our ingredients off a set of COD entries starting with FCC Al (CIF ID: 9012002). Ready to add calculations?\\n')\n print('Adding calculations.\\n')\n #add(material,queue,priority = 0,settings = None,results = None), templates are gotten from workflow by default\n calcs = []\n #I was going to edit this but then realized it's a bad idea to start renaming COD's materials for future usage yummy = ['Bread','Muffin','Cherry Pie','Pancake','Choccie Biscuit', 'Banana eclaire', 'Michel\\'s cake','Cookies!','Sinterklaasventje','You get the point by now.']\n for i in range(0,10):\n calcs.append(HighThroughput.manage.calculation.add(9012002+i,qid,0))\n #fix the names\n #HighThroughput.manage.material.modify({'id' : , 'text' : yummy[i]})\n\n input('\\n\\nYou can now see the calculations on the website. Next is a demonstration of how calculations are managed. You should be able to follow this live on the website. Set your refresh rate to 3000 ms. Ready?')\n \n print('\\n\\nFetching a waiting calculation from the queue',)\n \n #Add a bit of drama, the real system is of course instant!\n for i in range(1,4):\n sleep(1)\n sys.stdout.flush()\n print('.',)\n\n fid = HighThroughput.manage.calculation.fetch(qid)\n print('\\n\\n Found calculation id ' + str(fid) + '. Get full calc info?\\n\\n')\n\n calculation = HighThroughput.manage.calculation.get(fid)\n\n print(calculation)\n\n input('\\n\\n Start calculation?')\n \n HighThroughput.manage.calculation.start()\n\n input('\\n\\n End calculation?')\n \n HighThroughput.manage.calculation.end()\n\n #We're moving the the calculation back to before the last active status so 1 or 2 steps, if you want to rollback further please use the rollback function (calculations beyond that status will be deleted!)\n input('\\n\\n Now if something went wrong... Restart calculation?')\n\n HighThroughput.manage.calculation.restart()\n \n input('\\n\\n Up the pace? Keep in mind each blue workflow is a new \\'calculation\\' and you\\'ll have to manually click the queue currently (will fix sometime).')\n for i in range(0,20):\n fid = HighThroughput.manage.calculation.fetch(qid)\n calculation = HighThroughput.manage.calculation.get(fid)\n HighThroughput.manage.calculation.start()\n print('Starting calculation ' + str(HighThroughput.manage.calculation.calcid) + ' (status ' + str(HighThroughput.manage.calculation.stat) + ').')\n HighThroughput.manage.calculation.end()\n \n #Store the results, based on the template, can be put in the workflow description with [results:weight] etc, settings is similarly accessible.\n HighThroughput.manage.calculation.updateResults({'weight': (i+1)*50, 'Crispiness' : 'Just right'})\n \n print('Ending calculation ' + str(HighThroughput.manage.calculation.calcid) + ' (status ' + str(HighThroughput.manage.calculation.stat) + ').')\n \n input('\\n\\nContinuing will clean up everything added during this demo.')\n \n \n HighThroughput.manage.queue.remove(qid)\n HighThroughput.manage.workflow.removeAll(wid)\n HighThroughput.manage.template.remove(settingsid)\n HighThroughput.manage.template.remove(resultsid)\n" }, { "alpha_fraction": 0.5960264801979065, "alphanum_fraction": 0.5986754894256592, "avg_line_length": 34.71428680419922, "blob_id": "2bfdc6e62f3e44e32bdbae48f44813ca3774d5af", "content_id": "a428399b2074630ba8ac8cb7dbcdfa1cd462666c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 755, "license_type": "permissive", "max_line_length": 169, "num_lines": 21, "path": "/HTtools/HTdupes", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport HighThroughput as HT\nimport sys\nfrom HighThroughput.communication.mysql import *\n\nqueue = HT.manage.queue.get(sys.argv[1])\nworkflow = HT.manage.workflow.get(queue['workflow'])\ndupelist = []\nfor wf in workflow:\n dupes = mysql_query('SELECT `file` FROM `calculations` WHERE `queue` = ' + queue['id'] + ' AND `stat` = ' + wf['stat'] + ' GROUP BY `file` HAVING COUNT(`file`) > 1')\n \n if not isinstance(dupes, str):\n if isinstance(dupes, list):\n for dupe in dupes:\n if dupe['file'] not in dupelist:\n dupelist.append(dupe['file'])\n else:\n if dupes['file'] not in dupelist:\n dupelist.append(dupes['file'])\n\nprint('\\n'.join(dupelist))\n \n" }, { "alpha_fraction": 0.5493603944778442, "alphanum_fraction": 0.5599479675292969, "avg_line_length": 36.49187088012695, "blob_id": "fefa9e714985309d1f1e07d225ea458284d66453", "content_id": "864be09a5ab7ddd3ce6a5171782f45e7afd3a6e1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27674, "license_type": "permissive", "max_line_length": 163, "num_lines": 738, "path": "/modules/VASP.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from HighThroughput.utils.generic import mkdir, execute, getNodeInfo, isfloat,resubmit\nfrom HighThroughput.utils.eos import EquationOfState\nfrom HighThroughput.io.VASP import rescalePOSCAR, writeINCAR, writeKPOINTS, readINCAR, readKPOINTS\nimport os, time, shutil, subprocess, threading, sys, ase.io,json\nfrom HighThroughput.config import vasp\nimport HighThroughput.manage.calculation as manage\nfrom HighThroughput.communication.mysql import mysql_query\nfrom numpy.linalg import norm\nimport numpy as np\nfrom pymatgen.io.vasp.outputs import Vasprun\nimport HighThroughput.manage.calculation as manage\nfrom retry import retry\n\nimport warnings\nwarnings.filterwarnings(\"ignore\")\nimport pymatgen\nfrom pymatgen.ext.matproj import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.analysis.phase_diagram import *\nfrom pymatgen.entries.compatibility import MaterialsProjectCompatibility\n\n#cleanup function\n\ndef inherit(calc,path,contcar=True,chgcar=True,wavecar=True,settingsmod=None,grid=False,rescale=1.0):\n #pstep = int(math.ceil(float(stat)/2.)) -1\n if path is None:\n return True\n #inputfile = os.path.join(qdir, 'import', str(cfile) + '.vasp')\n \n #qdir, 'CALCULATIONS/' + cfile + '/STEP' + str(pstep)\n if contcar:\n contcarnames = ['CONTCAR','POSCAR' + calc['file'] + '.vasp','POSCAR' + calc['file'], calc['file'], calc['file'] + '.vasp']\n for name in contcarnames: \n temp = os.path.join(path, name)\n if os.path.isfile(temp):\n inputfile = temp\n print('Inheriting geometry from ' + inputfile + '.')\n shutil.copy(inputfile, './POSCAR')\n rescalePOSCAR('POSCAR',rescale)\n break;\n\n if chgcar:\n chgcarnames = ['CHGCAR.gz','CHGCAR','CHGCAR' + calc['file'] + '.gz','CHGCAR' + calc['file']]\n for name in chgcarnames: \n temp = os.path.join(path, name)\n if os.path.isfile(temp):\n density = temp\n out = 'CHGCAR'\n if density[-3:] == '.gz':\n out += '.gz'\n print('Inheriting charge density from ' + density + '.')\n shutil.copy(density, out)\n if calc['settings']['INCAR'].get('ICHARG') is None:\n calc['settings']['INCAR']['ICHARG'] = 1\n break;\n \n if wavecar:\n wavecarnames = ['WAVECAR.gz','WAVECAR','WAVECAR' + calc['file']+ '.gz','WAVECAR' + calc['file']]\n for name in wavecarnames: \n temp = os.path.join(path, name)\n if os.path.isfile(temp):\n wavecar = temp\n out = 'WAVECAR'\n if wavecar[-3:] == '.gz':\n out += '.gz'\n print('Inheriting wave functions from ' + wavecar + '.')\n shutil.copy(wavecar, out)\n break;\n if grid:\n outcar = os.path.join(path, 'OUTCAR')\n ng = execute('grep \"dimension x,y,z NGX\" ' + outcar + ' | head -n 1').strip().split()\n calc['settings']['INCAR']['NGX'] = int(ng[4])\n calc['settings']['INCAR']['NGY'] = int(ng[7])\n calc['settings']['INCAR']['NGZ'] = int(ng[10])\n \n if settingsmod:\n presults = manage.getResults(calc['parent'])\n presults['settingsmod'] = settingsmod\n manage.updateResults(presults,calc['parent'])\n print('These setting mods are inherited:')\n print(presults['settingsmod'])\n if settingsmod.get('KPOINTS') is not None and calc['settings'].get('KPOINTS').get('K') is not None:\n curkp = [int(x) for x in calc['settings']['KPOINTS']['K'].split(' ')]\n curmod = [int(x) for x in settingsmod['KPOINTS']['K'].split(' ')]\n calc['settings']['KPOINTS']['K'] = ' '.join([str(curkp[x] + curmod[x]) for x in range(3)])\n print(curkp,curmod)\n print('Calibration update to kpoints executed.')\n if settingsmod.get('INCAR') is not None:\n if settingsmod.get('INCAR').get('ENCUT') is not None:\n calc['settings']['INCAR']['ENCUT'] = int(calc['settings']['INCAR']['ENCUT']) + settingsmod['INCAR']['ENCUT']\n\n if settingsmod.get('INCAR').get('SIGMA') is not None:\n calc['settings']['INCAR']['SIGMA'] = settingsmod['INCAR']['SIGMA']\n\n if settingsmod.get('INCAR').get('ISMEAR') is not None:\n if int(calc['settings']['INCAR']['ISMEAR']) != -5:\n calc['settings']['INCAR']['ISMEAR'] = settingsmod['INCAR']['ISMEAR']\n\n if settingsmod.get('INCAR').get('ISPIN') is not None:\n calc['settings']['INCAR']['ISPIN'] = settingsmod['INCAR']['ISPIN']\n\n #if os.path.isfile('CHGCAR'):\n # os.rename('CHGCAR','CHGCAR.prec')\n return calc\n\ndef abort(cinfo,delay=0,mode = 0):\n # either switch between electronic and ionic or auto based on ibrion is possible\n #for now 0 is electronic stop, 1 ionic\n print('Aborting calculation with delay of ' + str(delay) + ' in dir ' + os.getcwd())\n time.sleep(delay)\n f = open('STOPCAR','w')\n if mode ==0:\n f.write('LABORT=.TRUE.')\n else:\n f.write('LSTOP=.TRUE.')\n f.close()\n open('aborted', 'a').close()\n manage.restart(cinfo['id'])\n psettings = manage.getSettings(manage.calcid)\n if 'continue' in psettings.keys():\n psettings['continue'] = str(int(psettings['continue']) + 1)\n else:\n psettings['continue'] = '1'\n manage.modify({'settings' : psettings, 'id' : manage.calcid})\n return 0\n\n\ndef checkpointStart(cinfo,early=4400):\n walltime = int(os.getenv('PBS_WALLTIME'))\n thread = threading.Thread(target=abort,args=(cinfo,walltime-early,0))\n thread.daemon = True\n thread.start()\n return 0\n\ndef cont(calc):\n print('DEBUG: continue')\n baks = 0\n bako = 0\n bakx = 0\n bakt = 0\n bakv = 0\n\n for file in os.listdir(os.curdir):\n if os.path.isfile(file) and file[0:10] == 'POSCAR.bak':\n baks += 1\n if os.path.isfile(file) and file[0:10] == 'OUTCAR.bak':\n bako += 1\n if os.path.isfile(file) and file[0:11] == 'XDATCAR.bak':\n bakx += 1\n if os.path.isfile(file) and file[0:11] == 'tempout.bak':\n bakt += 1\n if os.path.isfile(file) and file[0:11] == 'vasprun.bak':\n bakv += 1\n\n if os.path.isfile('CONTCAR') and os.stat('CONTCAR').st_size > 0:\n os.rename('POSCAR','POSCAR.bak' + str(baks))\n os.rename('CONTCAR','POSCAR')\n if os.path.isfile('OUTCAR') and os.stat('OUTCAR').st_size > 0:\n os.rename('OUTCAR','OUTCAR.bak' + str(bako))\n if os.path.isfile('XDATCAR') and os.stat('XDATCAR').st_size > 0:\n os.rename('XDATCAR','XDATCAR.bak' + str(bakx))\n if os.path.isfile('tempout') and os.stat('tempout').st_size > 0:\n os.rename('tempout','tempout.bak' + str(bakt))\n if os.path.isfile('vasprun.xml') and os.stat('vasprun.xml').st_size > 0:\n os.rename('vasprun.xml','vasprun.bak' + str(bakv))\n \n psettings = manage.getSettings(calc['parent'])\n presults = manage.getResults(calc['parent'])\n \n if 'continued' not in psettings.keys():\n psettings['continued'] = 1\n else:\n psettings['continued'] += 1\n\n\n if presults.get('settingsmod') is not None:\n if presults['settingsmod'].get('KPOINTS') is not None and calc['settings'].get('KPOINTS') is not None:\n if presults['settingsmod'].get('KPOINTS').get('K') is not None and calc['settings']['KPOINTS'].get('K') is not None:\n curkp = [int(x) for x in calc['settings']['KPOINTS']['K'].split(' ')]\n curmod = [int(x) for x in presults['settingsmod']['KPOINTS']['K'].split(' ')]\n calc['settings']['KPOINTS']['K'] = ' '.join([str(curkp[x] + curmod[x]) for x in range(3)])\n \n\n if presults.get('settingsmod').get('INCAR') is not None:\n if presults.get('settingsmod').get('INCAR').get('ENCUT') is not None:\n calc['settings']['INCAR']['ENCUT'] = int(calc['settings']['INCAR']['ENCUT']) + \\\n presults['settingsmod']['INCAR']['ENCUT']\n if presults.get('settingsmod').get('INCAR').get('SIGMA') is not None:\n calc['settings']['INCAR']['SIGMA'] = presults['settingsmod']['INCAR']['SIGMA']\n\n if presults.get('settingsmod').get('INCAR').get('ISMEAR') is not None:\n if int(calc['settings']['INCAR']['ISMEAR']) != -5:\n calc['settings']['INCAR']['ISMEAR'] = presults['settingsmod']['INCAR']['ISMEAR']\n\n if presults.get('settingsmod').get('INCAR').get('ISPIN') is not None:\n calc['settings']['INCAR']['ISPIN'] = presults['settingsmod']['INCAR']['ISPIN']\n\n manage.updateSettings(psettings, calc['parent'])\n manage.updateResults(presults, calc['parent'])\n\n return calc\n\ndef finish():\n #end and readresults, readsettings too possibly, makecif populateresults and convert E/atom etc possibilities of using tables for chemical potentials\n #DOS and bandstructure options here too or in seperate postprocess func\n #Incorporate HTfinish, other httools should go somewhere too\n print('')\n return 0\n\ndef initialize(settings,hard = ''):\n #print 'write incar kpoints potcar, make directory?'\n #inherit()\n writeSettings(settings)\n poscar = open('./POSCAR','r')\n lines = poscar.readlines()\n elements = lines[5][:-1].strip()\n execute('POTgen' + str(hard) + ' ' + str(elements))\n return 0\n\ndef prepare(settings):\n #preparing any configs, can turn on SP and SO here too\n parallelSetup(settings)\n #print 'settings should be modified anyways'\n return settings\n\n\ndef getIBZKPT(symmetry=True):\n curdir = os.getcwd()\n if os.path.isdir(os.path.join(curdir, 'genIBZKPT')):\n os.system('rm -rf ' + os.path.join(curdir, 'genIBZKPT'))\n\n mkdir('genIBZKPT')\n os.chdir(os.path.join(curdir,'genIBZKPT'))\n shutil.copy('../POSCAR','./POSCAR')\n shutil.copy('../POTCAR','./POTCAR')\n shutil.copy('../INCAR','./INCAR')\n\n if symmetry == False:\n f = open('./INCAR', 'a+')\n f.write('\\nISYM=0')\n f.close()\n\n shutil.copy('../KPOINTS','./KPOINTS')\n genIBZKPT = subprocess.Popen(vasp,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)\n i = 0\n while(not os.path.isfile(os.path.join(curdir,'genIBZKPT','IBZKPT'))):\n time.sleep(1)\n i+=1\n if i == 600:\n print('IBZKPT NOT GENERATED ERROR')\n os.chdir(curdir)\n os.system('rm -rf ' + os.path.join(curdir, 'genIBZKPT'))\n raise ValueError\n genIBZKPT.terminate()\n f = open('IBZKPT', 'r')\n lines = f.readlines()\n f.close()\n os.chdir(curdir)\n os.system('rm -rf ' + os.path.join(curdir,'genIBZKPT'))\n\n return int(lines[1].strip())\n\ndef detectU(poscar):\n poscar = open(os.path.join(poscar),'r')\n lines = poscar.readlines()\n elements = list(filter(None,lines[5][:-1].lstrip().split(' ')))\n if 'O' in elements or 'F' in elements:\n condition1 = True\n else:\n condition1 = False\n Uel = set(['Co', 'Cr', 'Fe', 'Mn', 'Mo', 'Ni', 'V', 'W'])\n Ldict = {'Co': 2, 'Cr': 2, 'Fe': 2, 'Mn': 2, 'Mo': 2, 'Ni': 2, 'V': 2, 'W': 2}\n Udict = {'Co': 3.32, 'Cr': 3.7, 'Fe': 5.3, 'Mn': 3.9, 'Mo': 4.38, 'Ni': 6.2, 'V': 3.25, 'W': 6.2}\n L = []\n U = []\n J = []\n\n detels = []\n for el in elements:\n J.append('0.00')\n if el in Ldict.keys() and condition1:\n L.append(str(Ldict[el]))\n U.append(str(Udict[el]))\n else:\n L.append('-1')\n U.append('0.00')\n \n return (' '.join(L), ' '.join(U), ' '.join(J)) \n\n\ndef detectSP(poscar):\n poscar = open(os.path.join(poscar),'r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n magel = set(['O','Ni','Cr','Co','Fe','Mn','Ce','Nd','Sm','Eu','Gd','Tb','Dy','Ho','Er','Tm']);\n magnetic = False;\n\n for magn in magel:\n if magn in elements.split(' '):\n magnetic = True;\n\n return magnetic\n\ndef detectSO(poscar):\n poscar = open(os.path.join(poscar),'r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n relel = set(['Cs','Ba','La','Lu','Hf','Ta','W','Re','Os','Ir','Pt','Au','Hg','Tl','Pb','Bi','Po','At','Rn']);\n\n relativistic = False;\n\n for rel in relel:\n if rel in elements.split(' '):\n relativistic = True;\n\n return relativistic\n\ndef run(ratio = 1,cwd = None):\n global vasp\n #could move hybrid to parallel setup\n if cwd is None:\n cwd = os.getcwd();\n nodes = getNodeInfo()\n\n #cores = mysql_query('SELECT `cores` FROM `clusters` WHERE `name` = ' + str(os.getenv('VSC_INSTITUTE_CLUSTER')))\n hybrid = str(int(min(nodes.values())/int(ratio)))\n total = sum(nodes.values())\n #return execute('mympirun -h ' + hybrid + ' --output ' + cwd + '/tempout ' + vasp)\n print('mpirun -np ' + str(total) + ' ' + vasp + ' > tempout')\n return execute('mpirun -np ' + str(total) + ' ' + vasp + ' > tempout')\n\ndef readSettings(settings):\n settings['INCAR'] = readINCAR()\n settings['KPOINTS'] = readKPOINTS()\n POTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\n settings['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n #read/writePOTCAR would be useful\n return settings\n\ndef parallelSetup(settings):\n try:\n numKP = getIBZKPT()\n except:\n numKP = getIBZKPT(symmetry=False)\n nodes = getNodeInfo()\n ncore = min(nodes.values())\n kpar = min(len(nodes),numKP)\n\n settings['INCAR']['NCORE'] = ncore\n if 'NPAR' in settings['INCAR'].keys():\n settings['INCAR'].pop('NPAR', None)\n if 'LHFCALC' in settings['INCAR'].keys():\n if settings['INCAR']['LHFCALC'] == '.TRUE.':\n settings['INCAR']['NCORE'] = 1\n\n #unsure about HFc\n if 'ALGO' in settings['INCAR'].keys():\n if settings['INCAR']['ALGO'][0:2] == 'GW' or settings['INCAR']['ALGO'] == 'ACFDT' or settings['INCAR']['ALGO'] == 'HFc':\n settings['INCAR']['NCORE'] = 1\n\n settings['INCAR']['KPAR'] = kpar\n return settings\n\ndef setupDir(settings):\n #print 'can make potcar and writesettings'\n #inherit too\n writeSettings(settings)\n return 0\n\ndef writeSettings(settings):\n writeKPOINTS(settings['KPOINTS'])\n writeINCAR(settings['INCAR'])\n return 0\n\ndef eosPrepare(directory = None, evname = 'EOS'):\n if directory is None:\n directory = os.getcwd()\n currentdir = os.getcwd()\n os.chdir(directory)\n\n if not os.path.isfile('./1.0/POSCAR'):\n return False\n\n poscar = open('./1.0/POSCAR', 'r')\n lines = poscar.readlines()\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n\n #Setup e-v.dat\n eos = {}\n with open(evname,'w') as eosfile:\n for i in sorted(os.listdir()):\n if os.path.isdir(i) and i.replace('.','',1).isdigit():\n E = execute('grep \\'energy without entropy\\' ' + i + '/OUTCAR | tail -1 | awk \\'{ print $7 }\\'').strip()\n V = execute('grep vol ' + i + '/OUTCAR | tail -n 1 | awk \\'{print $5}\\'').strip()\n eos[i] = (float(E)/numberofatoms,V)\n eosfile.write(V + ' ' + str(E) + '\\n')\n \n os.chdir(currentdir)\n return eos\n\ndef eosFit(directory = None, evname = 'EOS'):\n if directory is None:\n directory = os.getcwd()\n currentdir = os.getcwd()\n os.chdir(directory)\n os.chdir('../')\n \n data = np.loadtxt(evname)\n\n eos = EquationOfState(data[:,0], data[:,1])\n v0, e0, B, BP, residuals = eos.fit()\n B *= eos._e * 1.0e21\n\n outfile = open(evname.replace('.eos','') + '.eosout', 'w')\n if not isinstance(v0,np.float64):\n outfile.write('Could not fit an EOS.')\n else:\n outfile.write('Equation Of State parameters - least square fit of a real Birch Murnaghan curve' + '\\n' + '\\n')\n outfile.write('V0 \\t %.5f \\t A^3 \\t \\t %.4f \\t b^3 \\n' % (v0,v0/eos.b3))\n outfile.write('E0 \\t %.6f \\t eV \\t \\t %.6f \\t Ry \\n' % (e0,e0/eos.Ry))\n outfile.write('B \\t %.3f \\t GPa \\n' % (B))\n outfile.write('BP \\t %.3f \\n' % BP)\n outfile.write('\\n')\n outfile.write('1-R^2: '+str(residuals)+'\\n')\n eos.plot(filename=evname + '.png', show=None)\n outfile.close()\n\n\n\n os.chdir(currentdir)\n return v0, e0, B, BP, residuals\n\ndef eosRollback(calc, evname = 'EOS'):\n print('Commencing EOS rollback due to detected errors. ')\n step = int(np.ceil(float(calc['stat'])/2))\n crit = calc['results']['eoscheck']\n\n edir = crit['dirs'][step].split('/')[-2]\n print('The EOS dir is ' + edir)\n\n vols = sorted([x.split('/')[-1] for x in crit['dirs'] if edir in x])\n\n f = open(os.path.join('../',evname), 'r')\n enmin = 999999999.999999999\n i = 0\n for line in f.readlines():\n en = float(line.split()[1])\n\n if en < enmin:\n enmin = en\n volmin = vols[i]\n i += 1\n\n os.chdir('../')\n odir = 'old' + str(time.time())\n print('Backing up old EOS in ' + str(odir))\n\n if not os.path.isdir(odir):\n mkdir(odir)\n\n for v in vols:\n if os.path.isdir(v):\n shutil.copytree(v, os.path.join(odir,v))\n for i in ['CHGCAR', 'CHGCAR.gz', 'WAVECAR', 'WAVECAR.gz', 'CHG']:\n if os.path.isfile(os.path.join(odir, v, i)):\n os.remove(os.path.join(odir, v, i))\n\n for v in [evname, evname + '.eosout',evname + '.png']:\n if os.path.isfile(v):\n shutil.copy(v,os.path.join(odir, v))\n os.remove(v)\n print('DEBUG: volmin', volmin)\n if volmin != '1.0':\n for i in ['CHGCAR','CHGCAR.gz','WAVECAR','WAVECAR.gz','CONTCAR']:\n if os.path.isfile(os.path.join(volmin, i)):\n shutil.copy(os.path.join(volmin, i),os.path.join('1.0', i))\n\n vols.remove('1.0')\n\n for v in vols:\n shutil.rmtree(v)\n\n tstat = int(calc['stat']) - 2*(len(vols) + 1)+1\n print('Rolling back to stat ' + str(tstat))\n parent = mysql_query('SELECT `id` FROM `calculations` WHERE `queue` = ' + str(calc['queue']) + ' AND `file` = ' + calc['file'] + ' AND `stat` = ' + str(tstat))\n\n psettings = manage.getSettings(parent['id'])\n if 'continue' in psettings.keys():\n psettings['continue'] = str(int(psettings['continue']) + 1)\n else:\n psettings['continue'] = '1'\n manage.modify({'settings': psettings, 'id': parent['id']})\n\n manage.rollback(tstat, calc['id'])\n resubmit()\n exit()\n return True\n\n\ndef name(potcar):\n name = ''\n for p in potcar:\n temp = (p.split(' ')[-2].split('_')[0])\n name += temp\n return name\ndef getPotCorr():\n potcorr = json.load(open(os.path.join(os.path.dirname(__file__), '../ML/data/potcarcorr.json')))\n\n if os.path.isfile('CONTCAR'):\n poscar = open( 'CONTCAR', 'r')\n else:\n poscar = open( 'POSCAR', 'r')\n lines = poscar.readlines()\n\n species = list(filter(None, subprocess.Popen('grep TITEL POTCAR | awk \\'{print $4}\\'', stdout=subprocess.PIPE,\n shell=True).communicate()[0].decode().split('\\n')))\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n natoms = numberofatoms.split(' ')\n numberofatoms = sum(map(int, natoms))\n\n corr = 0\n for i in range(len(natoms)):\n corr += float(potcorr[species[i]]) * float(natoms[i])\n\n corr /= numberofatoms\n\n return corr\n\n\n@retry((ValueError, TypeError), tries=3, delay=30, backoff=2)\ndef getEhull(new=''):\n drone = VaspToComputedEntryDrone()\n queen = BorgQueen(drone, './', 4)\n entriesorig = queen.get_data()\n queen.load_data(os.path.join(os.path.dirname(__file__), '../ML/data/missingels.json'))\n entriesextra = queen.get_data()\n\n\n if new != '':\n compat = MaterialsProjectCompatibility(check_potcar=False)\n entriesorig = compat.process_entries(entriesorig)\n\n for entry in entriesorig:\n name = entry.name\n line = re.findall('[A-Z][^A-Z]*', name.replace('(', '').replace(')', ''))\n\n searchset = set(re.sub('\\d', ' ', ' '.join(line)).split())\n entries = filter(lambda e: set(re.sub('\\d', ' ', str(e.composition).replace(' ', '')).split()) == searchset,\n entriesorig)\n\n entriesextra = filter(lambda e: set(re.sub('\\d', ' ', str(e.composition).replace(' ', '')).split()) & searchset,\n entriesextra)\n\n a = MPRester(\"s2vUo6mzETOHLdbu\")\n \n all_entries = a.get_entries_in_chemsys(set(searchset)) + list(entries) + list(entriesextra)\n\n pd = PhaseDiagram(all_entries)#,None\n\n\n\n for e in pd.stable_entries:\n if e.entry_id == None:\n reaction = pd.get_equilibrium_reaction_energy(e)\n return str(reaction) + ' None'\n\n for e in pd.unstable_entries:\n decomp, e_above_hull = pd.get_decomp_and_e_above_hull(e)\n pretty_decomp = [(\"{}:{}\".format(k.composition.reduced_formula, k.entry_id), round(v, 2)) for k, v in\n decomp.items()]\n if e.entry_id == None:\n return str(e_above_hull) + ' ' + str(pretty_decomp)\n #return execute('bash -c \\'HTehull ./ ' + new + '\\' | tail -n 1')\n\ndef gather(results):\n if 'Ehull' in results.keys():\n results['Ehullold'] = 0\n results['potcorr'] = 0\n results['path'] = ''\n resultkeys = list(results.keys()).copy()\n results['Edisp'] = 0\n\n for key in resultkeys:\n print(key)\n if key[0:2] == 'E0' and 'disp' not in key:\n try:\n vdw = float(execute('grep \\'Edisp\\' OUTCAR | awk \\'{print $3}\\''))\n except ValueError:\n vdw = 0\n results['Edisp'] = vdw\n results[key + 'disp'] = float(execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\''))\n results[key] = results[key + 'disp'] - vdw\n if 'atom' in key:\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n \n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n results[key] /= numberofatoms\n results[key + 'disp'] /= numberofatoms\n elif key == 'natoms':\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n \n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n results[key] = numberofatoms\n elif key == 'Ehull':\n results[key + 'old'] = float(getEhull().split(' ')[0])\n ehull,path = tuple([x.strip() for x in getEhull(new='1').split(' ',maxsplit=1)])\n potcorr = getPotCorr()#float(execute('HTpotcorr ./ 1 | tail -n 1'))\n results[key] = float(ehull) + potcorr\n results['path'] = path\n results['potcorr'] = potcorr\n elif key == 'Eatom':\n #to be implemented\n results[key] = float(execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\''))\n poscar = open('POSCAR', 'r')\n lines = poscar.readlines()\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n results[key] /= numberofatoms\n results[key] -= float(execute('HTeatom ./ | tail -n 1'))\n elif key == 'Epure':\n #to be implemented\n results[key] = float(execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\''))\n poscar = open('POSCAR', 'r')\n lines = poscar.readlines()\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n results[key] /= numberofatoms\n results[key] -= float(execute('HTepure ./ | tail -n 1'))\n elif key == 'Eelectro':\n results[key] = execute('HTelectro ./')\n elif key == 'cellparams':\n crystal = ase.io.read('CONTCAR')\n results[key] = list(crystal.get_cell_lengths_and_angles())\n elif key == 'volume':\n results[key] = float(execute('grep vol OUTCAR | tail -n 1 | awk \\'{print $5}\\''))\n elif key == 'eos':\n test = eosPrepare(directory='../')\n if not test:\n continue\n v0, e0, B, BP, residuals = eosFit()\n results[key] = {'V0': v0, 'E0': e0, 'B0': B, 'BP': BP, 'res': residuals}\n elif key == 'BG':\n vr = Vasprun('vasprun.xml',occu_tol=0.1)\n results[key] = vr.eigenvalue_band_properties[0]\n elif key == 'smearerr':\n s0 = float(execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $7 }\\''))\n s = float(execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $4 }\\''))\n results[key] = s0 - s\n elif key == 'magmom':\n magmom = execute('grep number.*magnetization OUTCAR | tail -n 1 | awk \\'{print $6}\\'').isdigit()\n if isfloat(magmom):\n results[key] = np.abs(float(magmom))\n else:\n results[key] = 0\n\n return results\n\ndef compress():\n nodes = getNodeInfo()\n ncore = min(nodes.values())\n if os.path.isfile('CHGCAR'):\n print('Compressing CHGCAR in ' + os.getcwd() + '.')\n execute('pigz -f -6 -p' + str(ncore) + ' CHGCAR')\n\n if os.path.isfile('WAVECAR'):\n print('Compressing WAVECAR in ' + os.getcwd() + '.')\n execute('pigz -f -6 -p' + str(ncore) + ' CHGCAR')\n\ndef decompress():\n nodes = getNodeInfo()\n ncore = min(nodes.values())\n if os.path.isfile('CHGCAR.gz'):\n print('Decompressing CHGCAR.gz in ' + os.getcwd() + '.')\n execute('pigz -f -d -6 -p' + str(ncore) + ' CHGCAR.gz')\n\n if os.path.isfile('WAVECAR.gz'):\n print('Decompressing WAVECAR.gz in ' + os.getcwd() + '.')\n execute('pigz -f -d -6 -p' + str(ncore) + ' WAVECAR.gz')\n\ndef redivideKP(kp,lc):\n lc = np.array(lc)\n kp = np.array(kp)\n ratio = 1/lc\n total = np.prod(kp)\n newkp = np.array([0,0,0])\n i=0\n ratio /= np.min(ratio)\n ratio = np.round(ratio,0)\n\n while(np.prod(ratio*newkp) < total):\n newkp = np.round(i*ratio,0)\n newkp += (newkp+1)%2\n i += 1\n\n return newkp\n\ndef setupKP(settings,minkp):\n crystal = ase.io.read('POSCAR')\n cell = crystal.get_cell();\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3);\n nat = crystal.get_number_of_atoms()\n minkp /= nat\n\n lc = [na,nb,nc]\n kp = [int(x) for x in settings['KPOINTS']['K'].split()]\n\n while(np.prod(kp) < minkp):\n kp[0] += 2\n kp[1] += 2\n kp[2] += 2\n kp = redivideKP(kp,lc)\n\n settings['KPOINTS']['K'] = ' '.join([str(int(x)) for x in kp])\n return settings\n \n" }, { "alpha_fraction": 0.5836626887321472, "alphanum_fraction": 0.5836626887321472, "avg_line_length": 24.299999237060547, "blob_id": "87fab3ffa13fbd42dc566155d952758c3b6877d1", "content_id": "2e6674d3b36d56274f68c4771c84e4271bf8a397", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 759, "license_type": "permissive", "max_line_length": 58, "num_lines": 30, "path": "/io/CIF.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import ase.io\nimport os\n\ndef read(name,software='VASP',directory=None):\n if directory == None:\n directory = os.getcwd()\n olddir = ''\n if(directory != os.getcwd()):\n olddir = os.getcwd()\n os.chdir(directory)\n\n fileform = {'VASP' : 'vasp', 'Gaussian' : 'gaussian'};\n CIF = ase.io.read(name,format=fileform[software])\n if olddir:\n os.chdir(olddir)\n return CIF\n\ndef write(name,CIF,directory=None):\n olddir = ''\n if directory == None:\n directory = os.getcwd()\n if(directory != os.getcwd()):\n olddir = os.getcwd()\n os.chdir(directory)\n ase.io.write(name, CIF, format='cif')\n if olddir:\n os.chdir(olddir)\n\n with open (name, 'r') as CIFfile:\n return CIFfile.read()\n" }, { "alpha_fraction": 0.4493837058544159, "alphanum_fraction": 0.4984845519065857, "avg_line_length": 29.745342254638672, "blob_id": "b090e30495834092d7d4138f65f4d4a1bf97bde5", "content_id": "227c3921cba7781d672bd22c1f50a07eaea1c64d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4949, "license_type": "permissive", "max_line_length": 86, "num_lines": 161, "path": "/utils/eos.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n#!/usr/bin/env python\n\nimport matplotlib;\nmatplotlib.use('agg')\nimport seaborn;\nimport numpy as np\nimport pylab as pl\nfrom scipy.optimize import curve_fit\n\ndef BM3(x, a, b, c, d):\n return a+ b*x + c*x**2 + d*x**3\n\nclass EquationOfState:\n _e = 1.60217733e-19\n b3 = 0.52917720859**3\n Ry = 13.6056923\n \"\"\"Fit equation of state for bulk systems.\n\n The following equation is used::\n\n 2 3 -2/3\n E(V) = c + c t + c t + c t , t = V\n 0 1 2 3\n\n Use::\n\n eos = EquationOfState(volumes, energies)\n v0, e0, B, BP = eos.fit()\n eos.plot()\n\n \"\"\"\n def __init__(self, volumes, energies):\n\n self.v = np.array(volumes)\n self.e = np.array(energies)\n self.v0 = None\n\n def fit2(self):\n \"\"\"Calculate volume, energy, and bulk modulus.\n\n Returns the optimal volume, the minumum energy, and the bulk\n modulus. Notice that the ASE units for the bulk modulus is\n eV/Angstrom^3 - to get the value in GPa, do this::\n\n v0, e0, B = eos.fit()\n print B * _e * 1.0e21, 'GPa'\n\n \"\"\"\n\n fitdata = np.polyfit(self.v ** -(2.0 / 3.), self.e, 3, full=True)\n ssr = fitdata[1]\n sst = np.sum((self.e - np.average(self.e)) ** 2.)\n self.residuals = float(ssr / sst)\n fit0 = np.poly1d(fitdata[0])\n fit1 = np.polyder(fit0, 1) # 1st derivative of a polynomial fit0\n fit2 = np.polyder(fit1, 1)\n fit3 = np.polyder(fit2, 1)\n\n self.v0 = None\n for t in np.roots(fit1):\n if t > 0 and fit2(t) > 0:\n self.v0 = t ** (-3. / 2.)\n break\n\n if self.v0 is None:\n nv = len(self.v)\n middle = int(np.round(nv / 2))\n minimumv = np.min(self.v)\n self.v0 = minimumv - (self.v - minimumv)\n print('Warning no minimum found, extrapolating.')\n\n self.e0 = fit0(t)\n der2 = fit2(t)\n der3 = fit3(t)\n der2V = 4. / 9. * t ** 5 * der2\n der3V = -20. / 9. * t ** (13. / 2.) * der2 - 8. / 27. * t ** (15. / 2.) * der3\n self.B = der2V / t ** (3. / 2.)\n self.BP = -1 - t ** (-3. / 2.) * der3V / der2V\n self.fit0 = fit0\n\n return self.v0, self.e0, self.B, self.BP, self.residuals\n\n def fit(self):\n \"\"\"Calculate volume, energy, and bulk modulus.\n\n Returns the optimal volume, the minumum energy, and the bulk\n modulus. Notice that the ASE units for the bulk modulus is\n eV/Angstrom^3 - to get the value in GPa, do this::\n\n v0, e0, B = eos.fit()\n print B * _e * 1.0e21, 'GPa'\n\n \"\"\"\n #print(self.v,self.e)\n fitdata = np.polyfit(self.v**-(2.0 / 3.), self.e, 3, full=True)\n ssr = fitdata[1]\n sst = np.sum((self.e - np.average(self.e))**2.)\n self.residuals = float(ssr/sst)\n #print(fitdata[0])\n fit0 = np.poly1d(fitdata[0])\n fit1 = np.polyder(fit0, 1) # 1st derivative of a polynomial fit0\n fit2 = np.polyder(fit1, 1)\n fit3 = np.polyder(fit2, 1)\n\n self.v0 = None\n for t in np.roots(fit1):\n if t > 0 and fit2(t) > 0:\n self.v0 = t**(-3./2.)\n break\n\n if self.v0 is None:\n nv = len(self.v)\n middle = int(np.round(nv/2))\n minimumv = np.min(self.v)\n self.v0 = minimumv - (self.v - minimumv)\n print('Warning no minimum found, extrapolating.')\n\n self.e0 = fit0(t)\n der2 = fit2(t)\n der3 = fit3(t)\n der2V = 4./9. * t**5 * der2\n der3V = -20./9. * t**(13./2.) * der2 - 8./27. * t**(15./2.) * der3\n self.B = der2V / t**(3./2.)\n self.BP = -1 - t**(-3./2.) * der3V / der2V\n self.fit0 = fit0\n\n return self.v0, self.e0, self.B, self.BP, self.residuals\n\n def plot(self, filename=None, show=None):\n \"\"\"Plot fitted energy curve.\n\n Uses Matplotlib to plot the energy curve. Use *show=True* to\n show the figure and *filename='abc.png'* or\n *filename='abc.eps'* to save the figure to a file.\"\"\"\n\n if self.v0 is None:\n self.fit()\n\n if filename is None and show is None:\n show = False\n\n# x = 3.95\n# f = pl.figure(figsize=(x * 2.5**0.5, x))\n f = pl.figure()\n f.subplots_adjust(left=0.12, right=0.9, top=0.9, bottom=0.12)\n pl.plot(self.v, self.e, 'o')\n x = np.linspace(min(self.v), max(self.v), 100)\n pl.plot(x, self.fit0(x**-(2.0 / 3)), '-r')\n pl.xlabel(u'volume [$\\AA^3$]')\n pl.ylabel(u'energy [eV]')\n pl.title(u'E: %.3f eV, V: %.3f $\\AA^3$, B: %.3f GPa, B$_P$: %.3f' %\n (self.e0, self.v0, self.B *self._e * 1.0e21, self.BP), y = 1.02)\n\n if show:\n pl.show()\n if filename is not None:\n f.savefig(filename)\n\n return f" }, { "alpha_fraction": 0.7174129486083984, "alphanum_fraction": 0.7213930487632751, "avg_line_length": 34.85714340209961, "blob_id": "337f6fe89aee04cd824252356bdbbd214c39e58f", "content_id": "01394c75ea410d2c13853d17ab9fae129a680cd8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1005, "license_type": "permissive", "max_line_length": 144, "num_lines": 28, "path": "/HTtools/HTenergiesdb", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nfrom pymatgen.ext.matproj import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.analysis.phase_diagram import *\nimport re,sys\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone)\nqueen.load_data(sys.argv[1])\nentriesorig = queen.get_data()\nsql = open('allenergies.sql','w')\n\ndef name(potcar):\n name = ''\n for p in potcar:\n temp = (p.split(' ')[-2].split('_')[0])\n name += temp\n return name\n\nfor entry in entriesorig:\n text = name(entry.parameters['potcar_symbols'])\n ehull = entry.uncorrected_energy\n sql.write('UPDATE `zintlfinal` SET `E0` = ' + str(ehull) + ' WHERE `file` IN (SELECT `file` FROM `map` WHERE `text` = \\'' + text + '\\');\\n')\nsql.close()\n\n" }, { "alpha_fraction": 0.6414779424667358, "alphanum_fraction": 0.6514898538589478, "avg_line_length": 33.105690002441406, "blob_id": "c3ab8850a8d58d1764f321c33d781a0482b7c8ce", "content_id": "6d125d22d9e1e6c63a5a2a108ad6d8e67cbd191d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4195, "license_type": "permissive", "max_line_length": 153, "num_lines": 123, "path": "/modules/Gaussian.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from HighThroughput.communication.mysql import *\nfrom HighThroughput.utils.generic import *\nfrom HighThroughput.io.Gaussian import *\nimport os,time,shutil,subprocess,linecache, threading\n#cleanup function\n\ndef inherit(stat,qdir,cfile):\n if stat < 2:\n inputfile = os.path.join(qdir, 'import', str(cfile) + '.com')\n else:\n inputfile = os.path.join(qdir, 'CALCULATIONS/' + cfile + '/STEP' + str(int(stat)-2), str(cfile) + '.com')\n shutil.copy(inputfile, './' + str(cfile) + '.com')\n\n return 0 \n\ndef abort(cinfo,delay=0,mode = 0):\n import HighThroughput.manage.calculation as manage\n # either switch between electronic and ionic or auto based on ibrion is possible\n #for now 0 is electronic stop, 1 ionic\n #time.sleep(delay)\n #open('aborted', 'a').close()\n #manage.restart(cinfo['id'])\n #psettings = manage.getSettings(manage.calcid)\n #if 'continue' in psettings.keys():\n # psettings['continue'] = str(int(psettings['continue']) + 1)\n #else:\n # psettings['continue'] = '1'\n #manage.modify({'settings' : psettings, 'id' : manage.calcid})\n return 0\n\n\ndef checkpointStart(cinfo,early=4400):\n walltime = int(os.getenv('PBS_WALLTIME'))\n thread = threading.Thread(target=abort,args=(cinfo,walltime-early,0))\n thread.daemon = True\n thread.start()\n return 0\n\ndef cont(calc,settings):\n import HighThroughput.manage.calculation as manage\n bakc = 0\n bakl = 0\n for file in os.listdir(os.curdir):\n if os.path.isfile(file) and file[6:10] == '.com':\n baks += 1\n if os.path.isfile(file) and file[6:10] == '.log':\n bako += 1\n\n if os.path.isfile(settings['name'] + '.com') and os.stat(settings['name'] + '.com').st_size > 0:\n shutil.copy(COMname,COMname + '.bak' + str(baks))\n # if 'SCF=Restart' not in settings['route']:\n # settings['route'] += ' SCF=Restart'\n\n if os.path.isfile(settings['name'] + '.log') and os.stat(settings['name'] + '.log').st_size > 0:\n os.rename(settings['name'] + '.log',settings['name'] + '.log' + '.bak' + str(bako))\n psettings = manage.getSettings(calc['parent'])\n if 'continued' not in psettings.keys():\n psettings['continued'] = 1\n else:\n psettings['continued'] += 1\n manage.updateSettings(psettings, calc['parent'])\n return calc\n\ndef finish():\n #end and readresults, readsettings too possibly, makecif populateresults and convert E/atom etc possibilities of using tables for chemical potentials\n #DOS and bandstructure options here too or in seperate postprocess func\n #Incorporate HTfinish, other httools should go somewhere too\n print('')\n return 0\n\ndef initialize(settings):\n #print 'write incar kpoints potcar, make directory?'\n #inherit()\n writeSettings(settings)\n return 0\n\ndef prepare(settings):\n #preparing any configs, can turn on SP and SO here too\n parallelSetup(settings)\n #print 'settings should be modified anyways'\n return settings\n\ndef detectSP(settings):\n if settings['multiplicity'] > 1:\n magnetic = True\n else:\n magnetic = False\n return magnetic\n\ndef detectSO(poscar):\n #we're doing organic chem here, let me know when needed\n return relativistic\n\ndef run(ratio = 1,cwd = None):\n #could move hybrid to parallel setup\n if cwd == None:\n cwd = os.getcwd();\n COMname = subprocess.Popen('ls -la | grep com | awk \\'{print $9}\\'',stdout=subprocess.PIPE,shell=True).communicate() [0].strip()\n return execute('g09 < ' + COMname + ' > ' + COMname.strip('.com') + '.log')\n \n\ndef readSettings(settings):\n settings = readCOM()\n return settings\n\ndef parallelSetup(settings):\n nodes = getNodeInfo()\n ncore = min(nodes.values())\n settings['ncore'] = ncore\n fmem = subprocess.Popen('free -m | fgrep cache: | awk \\'{print $4}\\'',stdout = subprocess.PIPE, shell=True).communicate()[0].strip()\n settings['mem'] = fmem + 'mb'\n return settings\n\ndef setupDir(settings):\n #print 'can make potcar and writesettings'\n #inherit too\n #Currently not implemented\n writeSettings(settings)\n return 0\n\ndef writeSettings(settings):\n writeCOM(settings)\n return 0\n" }, { "alpha_fraction": 0.6052631735801697, "alphanum_fraction": 0.6052631735801697, "avg_line_length": 18, "blob_id": "01a285e679f3b2d33157665689b8ccdd4021f21b", "content_id": "029bc9da8cc1584a35cfa9b07a26a5b928ed2aac", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 38, "license_type": "permissive", "max_line_length": 19, "num_lines": 2, "path": "/communication/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__=['mysql']\nfrom . import mysql\n" }, { "alpha_fraction": 0.6326530575752258, "alphanum_fraction": 0.6505101919174194, "avg_line_length": 18.600000381469727, "blob_id": "bd3c694036198484b0cfc5f626e8dc71e47ddff8", "content_id": "7b24c1992eb5218491eca031c4e164be87a4f54a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 392, "license_type": "permissive", "max_line_length": 50, "num_lines": 20, "path": "/HTtools/HTeoscp", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys,os\nimport shutil\n\neosf = sys.argv[1]\neost = sys.argv[2]\n\nshutil.copytree(eosf, eost + 'f' + eosf)\n\nos.chdir(eost + 'f' + eosf)\nos.system('cont')\n\nold = open(os.path.join('../',eost,'CONTCAR'),'r')\ncoords = old.readlines()[1:5]\nnew = open('POSCAR','r+')\nnewcoords = new.readlines()\nnewcoords[1:5] = coords\nnew.seek(0)\nnew.write(''.join(newcoords))\nnew.close()\n" }, { "alpha_fraction": 0.604182779788971, "alphanum_fraction": 0.6160598993301392, "avg_line_length": 36.6019401550293, "blob_id": "acc014396318ec0da06661b0a1fa41a098cec00d", "content_id": "c3cfae538be4340919cfbe894eb85ff0e308219d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15492, "license_type": "permissive", "max_line_length": 179, "num_lines": 412, "path": "/ML/models/priority.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Jul 26 15:28:56 2018\n\n@author: Michiel\n\"\"\"\n\n# -*- coding: utf-8 -*-\nimport pandas as pd\nfrom HighThroughput.manage.calculation import setPriority, getPriority, setMultiplePriorities\nfrom HighThroughput.ML.features.database import getFile,getResults,getComposition,getID\nfrom HighThroughput.ML.features.elements import addElemental\nfrom HighThroughput.utils.generic import getClass\nfrom HighThroughput.communication.mysql import mysql_query\nimport sklearn.gaussian_process as gp\nfrom scipy.stats import norm\nfrom sklearn.decomposition import PCA\nimport numpy as np\nfrom gpflowopt.domain import ContinuousParameter\nfrom gpflowopt.design import LatinHyperCube\nimport gpflowopt\nimport json\nfrom HighThroughput.ML.models.mf_kriging import MultiFiCoKriging\n\ndef calcInitialMLPriority(queue,stat, features=['mass','Ecoh','EN','IP'], N_init = 50, stable_limit=0.05):\n\n allmats = getComposition(queue,0)\n \n files_allmats = getFile(queue, 0)\n \n elref = set(['mass','Ecoh','EN','IP'])\n \n elfeatures = set(features).intersection(elref)\n \n allmats = addElemental(allmats,elfeatures)\n\n allfeats = [x for x in allmats.columns if ''.join([i for i in x if not i.isdigit()]) in features]\n \n #apply pca to all materials and to train set \n pca = PCA(n_components=8)\n train_means1 = np.mean(allmats[allfeats].values, axis = 0)\n train_stds1 = np.std(allmats[allfeats].values , axis = 0)\n \n throwinds = np.where(train_stds1 == 0)[0]\n \n transf = np.delete(allmats[allfeats].values, throwinds, axis=1)\n train_means1 = np.delete(train_means1, throwinds)\n train_stds1 = np.delete(train_stds1, throwinds)\n\n \n X_all = pca.fit_transform((transf-train_means1)/train_stds1) \n\n train_means = np.mean(X_all, axis = 0)\n train_stds = np.std(X_all, axis = 0)\n\n X = (X_all-train_means)/train_stds \n \n domain = gpflowopt.domain.ContinuousParameter('x1', min(X[:,0]), max(X[:,0]))\n\n for i in np.arange(1, X.shape[1]):\n domain += gpflowopt.domain.ContinuousParameter('x'+str(i+1), min(X[:,i]), max(X[:,i]))\n\n design = LatinHyperCube(N_init, domain)\n \n #X0 is the intial sampling plan in continuous space\n X0 = design.generate()\n \n indices = []\n \n for x0 in X0:\n for j in range(X.shape[0]):\n index_new = np.linalg.norm(X-x0,axis=1).argsort()[j]\n if index_new not in indices:\n indices.append(index_new)\n break \n \n priority = X.shape[0]*np.ones((len(indices)), dtype=int)\n \n priority = pd.DataFrame({'id' : allmats.id.iloc[indices], 'priority' : priority})\n \n print(\"priorities of initial sampling plan are set\")\n return priority\n \n\ndef calcMLPriority(queue,stat,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target='Ehull',features=['mass','Ecoh','EN','IP'],N_init=50,stable_limit = 0.05):\n \n materials = getComposition(queue,stat)\n\n if isinstance(materials, list):\n if isinstance(materials[0], str):\n print('Initializing ML priorities.')\n return calcInitialMLPriority(queue,stat, features, N_init = N_init)\n\n if len(materials)<N_init and (isinstance(materials, pd.DataFrame) or isinstance(materials[0], dict)):\n print(\"skipped\")\n return None \n\n allmats = getComposition(queue,0)\n \n files_allmats = getFile(queue, 0)\n \n elref = set(['mass','Ecoh','EN','IP'])\n \n elfeatures = set(features).intersection(elref)\n \n allmats = addElemental(allmats,elfeatures)\n\n allfeats = [x for x in allmats.columns if ''.join([i for i in x if not i.isdigit()]) in features]\n \n #apply pca to all materials and to train set\n# files_allmats.to_pickle(\"file_to_id\")\n# allmats[['id']+allfeats].to_pickle(\"allmats\")\n \n # ((allmats[allfeats]-allmats[allfeats].mean(axis = 0))/allmats[allfeats].std(axis = 0)).to_pickle(\"X_input_pca\")\n \n pca = PCA(n_components=8)\n \n train_means1 = np.mean(allmats[allfeats].values, axis = 0)\n train_stds1 = np.std(allmats[allfeats].values , axis = 0)\n \n# np.save(\"train_means1\", train_means1)\n# np.save(\"train_stds1\" , train_stds1)\n \n X_all = pca.fit_transform((allmats[allfeats].values-train_means1)/train_stds1) \n\n# np.save(\"X_pca\", X_all)\n\n train_means = np.mean(X_all, axis = 0)\n train_stds = np.std(X_all, axis = 0)\n\n X = (X_all-train_means)/train_stds \n \n# np.save(\"X_nrm\", X)\n \n# if not isinstance(materials, pd.DataFrame):\n# domain = gpflowopt.domain.ContinuousParameter('x1', min(X[:,0]), max(X[:,0]))\n# \n# for i in np.arange(1, X.shape[1]):\n# domain += gpflowopt.domain.ContinuousParameter('x'+str(i+1), min(X[:,i]), max(X[:,i]))\n# \n# design = LatinHyperCube(N_init, domain)\n# \n# #X0 is the intial sampling plan in continuous space\n# X0 = design.generate()\n# \n# #indices will contain the indices of the materials to sample initially\n# indices = []\n# \n# #look for the indices of the materials that lay closest to the sample points in continuous space\n# for x0 in X0:\n# for j in range(X.shape[0]):\n# index_new = np.linalg.norm(X-x0,axis=1).argsort()[j]\n# if index_new not in indices:\n# indices.append(index_new)\n# break \n# \n# priority = X.shape[0]*np.ones((len(indices)), dtype=int)\n# \n# # priority = {}\n# \n# # priority = np.zeros((X.shape[0]), dtype = int)\n# #priority is put equal to X.shape to assure preference over the updated priorities based on ML\n# # for index in indices:\n# # priority[index] = X.shape[0]\n# \n# #priority can be computed directly, without \"indices\", but \"indices\" might be useful for debugging\n# \n# priority = pd.DataFrame({'id' : allmats.id.iloc[indices], 'priority' : priority})\n# \n# print(\"priorities of initial sampling plan are set\")\n# \n# return priority \n \n if isinstance(materials, pd.DataFrame): \n materials = addElemental(materials,elfeatures)\n \n files_materials = getFile(queue, stat)\n \n if target in ['Ehull','E0','Eatom','Epure']:\n materialt = getResults(queue,stat,[target])\n\n materials = materials.join(materialt)\n\n X_train = pca.transform((materials[allfeats]-allmats[allfeats].mean(axis = 0))/allmats[allfeats].std(axis = 0))\n \n #initialize kernel and GP\n kernel = gp.kernels.ConstantKernel()*gp.kernels.Matern(nu=5/2)+gp.kernels.WhiteKernel()\n model = gp.GaussianProcessRegressor(kernel=kernel,\n alpha=1e-5,\n n_restarts_optimizer=10,\n normalize_y=True)\n #fit model\n# print((allmats[allfeats]-allmats[allfeats].mean(axis = 0))/allmats[allfeats].std(axis = 0))\n model.fit((X_train-train_means)/train_stds,materials[target])\n \n ids_done = set([id for ind, id in enumerate(allmats.id) if files_allmats['file'].iloc[ind] in set(files_materials['file'])])\n \n ids_TBD = list(set(allmats.id).difference(ids_done))\n\n indices_TBD = np.array([index for index, id in enumerate(allmats.id) if id in ids_TBD])\n \n #get predictions and uncertainties\n mu, sigma = model.predict(X[indices_TBD], return_std=True)\n\n prob_stab = norm.cdf((stable_limit-mu)/sigma)\n \n print(np.max(prob_stab))\n# print(files_allmats['file'].iloc[indices_TBD][prob_stab.argsort()[-1]])\n \n #get rank, the higher the better\n rank = prob_stab.argsort() \n \n #create priorities based on rank \n priority = np.zeros(X.shape[0], dtype=int)\n \n #The higher in the ranking the higher the priority should be \n for ind, rnk in enumerate(rank):\n priority[indices_TBD[rnk]] = ind\n\n priority = pd.DataFrame({'id' : allmats.id, 'priority' : priority})\n# print(allmats.id) \n# priority = pd.DataFrame({'id' : ids_TBD, 'priority' : priority})\n# output = priority[~priority.id.isin(materials['id'])]\n return priority\n\ndef calcMLPriority_mf(queue,stats,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target='Ehull',features=['mass','Ecoh','EN','IP'],N_init=50,stable_limit = 0.05):\n\n threshold = 0.1\n\n if not isinstance(stats, list):\n stats = [stats]\n else:\n stats = sorted(stats)\n\n materials = {stat:getComposition(queue,stat) for stat in stats}\n \n if isinstance(materials[stats[0]], list):\n if isinstance(materials[stats[0]][0], str):\n return calcInitialMLPriority(queue,stats[0], features, N_init = N_init)\n\n if len(materials[stats[0]])<N_init and (isinstance(materials[stats[0]], pd.DataFrame) or isinstance(materials[stats[0]][0], dict)):\n print(\"skipped\")\n return None \n \n allmats = getComposition(queue,0)\n \n files_allmats = getFile(queue, 0)\n \n allmats = allmats.join(files_allmats)\n \n allmats = allmats.set_index(\"file\") \n \n elref = set(['mass','Ecoh','EN','IP'])\n \n elfeatures = set(features).intersection(elref)\n \n elfeatures = list(elfeatures)\n allmats = addElemental(allmats,elfeatures)\n \n allfeats = [x for x in allmats.columns if ''.join([i for i in x if not i.isdigit()]) in features] \n \n n_feats = 8 \n pca = PCA(n_components=n_feats)\n \n train_means1 = np.mean(allmats[allfeats].values, axis = 0)\n train_stds1 = np.std(allmats[allfeats].values , axis = 0)\n \n throwinds = np.where(train_stds1 == 0)[0]\n \n transf = np.delete(allmats[allfeats].values, throwinds, axis=1)\n train_means1 = np.delete(train_means1, throwinds)\n train_stds1 = np.delete(train_stds1, throwinds)\n\n X_all = pca.fit_transform((transf-train_means1)/train_stds1) \n\n# np.save(\"X_pca\", X_all)\n\n train_means = np.mean(X_all, axis = 0)\n train_stds = np.std(X_all, axis = 0)\n\n X = (X_all-train_means)/train_stds\n \n allmats = allmats.assign(x0 = X[:,0], x1 = X[:,1], x2 = X[:,2], x3 = X[:,3],\\\n x4 = X[:,4], x5 = X[:,5], x6 = X[:,6], x7 = X[:,7])\n\n feat_names = [\"x\"+str(i) for i in range(n_feats)] \n \n files_materials = {stat: getFile(queue, stat) for stat in stats}\n \n for stat in stats: \n# if isinstance(materials[stat], pd.DataFrame): \n# materials[stat] = addElemental(materials[stat],elfeatures)\n mat_files = getFile(queue, stat)\n \n materials[stat] = materials[stat].join(mat_files)\n \n materials[stat] = materials[stat].set_index(\"file\")\n if target in ['Ehull','E0','Eatom','Epure']:\n materialt = getResults(queue,stat,[target]).set_index(\"file\")\n\n materials[stat] = materials[stat].join(materialt)\n\n X_lvls = []\n Y_lvls = []\n inds_prev = set()\n #stats_skipped = 0\n\n for i, stat in enumerate(stats[::-1]):\n ids_done = set([id for ind, id in enumerate(allmats.index) if files_allmats['file'].iloc[ind] in set(files_materials[stat]['file'])]) \n inds = ids_done.difference(inds_prev)\n if len(inds)>0:\n# print(inds)\n# print(list(materials[stat].index))\n# print(list(allmats.index))\n# print(inds)\n# print(files_materials[stat]['file'].values)\n# print(files_allmats['file'].loc[inds].values)\n# inds_target = [np.where(files_materials[stat]['file'].values == files_allmats['file'].loc[ind])[0][0] for ind in inds]\n# print(inds_target)\n X_lvl = [allmats[feat_names].reindex(inds).values]\n# print(materials[stat][target].values)\n Y_lvl = [materials[stat][target].reindex(inds).values]\n if np.isfinite(Y_lvl).all() and np.isfinite(X_lvl).all():\n inds_prev = inds_prev.union(inds)\n #print('i',i,'stat',stat,'level',len(X_lvl),'levels',len(X_lvls))\n# print(\"indices updated\")\n #if i>0:\n # X_lvl.append(X_lvls[i-1-stats_skipped])\n # Y_lvl.append(Y_lvls[i-1-stats_skipped])\n if len(X_lvls)>0 and len(Y_lvls)>0:\n X_lvl.append(X_lvls[-1])\n Y_lvl.append(Y_lvls[-1])\n X_lvls.append(np.vstack(X_lvl))\n Y_lvls.append(np.hstack(Y_lvl))\n #print('i',i,'stat',stat,'level',len(X_lvl),'levels',len(X_lvls))\n #else:\n #print('Hurray, for I have skipped')\n #stats_skipped += 1 \n #else:\n #stats_skipped += 1\n# print(X_lvl)\n# print(Y_lvl)\n\n model = MultiFiCoKriging(regr = 'linear', rho_regr = 'linear')\n# print(\"preprocess time: \" + str(time.time()-start)) \n #fit model\n \n try:\n model.fit(X_lvls[::-1], Y_lvls[::-1])\n except (ValueError, IndexError) as e:\n print(e)\n print(\"priorities have not been updated, because of NaNs in data. Please check the calculation results.\")\n return None\n \n indices_TBD = list(set(allmats.index).difference(inds_prev))\n \n \n #get predictions and uncertainties\n mu, sigma = model.predict(allmats[feat_names].loc[indices_TBD].values)\n\n mu = mu.flatten()\n sigma = sigma.flatten()\n\n prob_stab = norm.cdf((stable_limit-mu)/sigma)\n max_prob_stab = np.max(prob_stab)\n print(max_prob_stab)\n# print(files_allmats['file'].iloc[indices_TBD][prob_stab.argsort()[-1]])\n \n #get rank, the higher the better\n if max_prob_stab>threshold:\n rank = prob_stab.argsort()\n else:\n print(\"uncertainty sampled\")\n rank = sigma.argsort()\n# print(rank) \n \n #create priorities based on rank\n priority = np.zeros(len(rank), dtype = int)\n ids = np.zeros(len(rank), dtype = int)\n# priority = pd.DataFrame(np.zeros((X.shape[0], 2), dtype=int),index=allmats.index, columns = [\"id\", \"priority\"])\n# \n# #The higher in the ranking the higher the priority should be \n# for ind, rnk in enumerate(rank):\n# priority[\"priority\"].loc[indices_TBD[rnk]] = ind\n# priority[\"id\"].loc[indices_TBD[rnk]] = allmats.id.loc[indices_TBD[rnk]]\n indices_all = pd.Index(allmats.index)\n\n for ind, rnk in enumerate(rank):\n loc = indices_all.get_loc(indices_TBD[rnk])\n priority[ind] = ind\n ids[ind] = allmats.id.iloc[loc]\n \n# print(priority)\n# priority = pd.DataFrame({'id' : allmats.id, 'priority' : priority.values}) \n# print(priority)\n priorities = pd.DataFrame({'id' : ids, 'priority' : priority})\n# output = priority[~priority.id.isin(materials['id'])]\n return priorities\n\ndef updateMLPriority(queue,stat,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target='Ehull',features=['mass','Ecoh','EN','IP'],maxParallel=30,N_init=50):\n priorities = calcMLPriority_mf(queue,stat,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target=target,features=features,N_init=N_init)\n if isinstance(priorities, pd.DataFrame):\n setMultiplePriorities(priorities)\n# priorities = priorities.sort_values(ascending=False,by='priority')\n# print(priorities)\n# for i,p in priorities.iterrows():\n# print(i)\n# setPriority(p['priority'], p['id'])\n\ndef setMLPriority(queue, stat, features=['mass','Ecoh','EN','IP'], N_init = 50,stable_limit=0.05):\n priorities = calcInitialMLPriority(queue, stat, features, N_init,stable_limit)\n setMultiplePriorities(priorities)\n" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6894736886024475, "avg_line_length": 20.11111068725586, "blob_id": "7523445d49efbbfbd11506d44870bf1b68020d58", "content_id": "c12747160fc9c22c4b80df2f3d245f2261630dd4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 190, "license_type": "permissive", "max_line_length": 41, "num_lines": 9, "path": "/examples/Backtesting/cleanup.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport HighThroughput.manage.queue as HTq\n\nimport pickle\n\nwith open('backtest.pkl','rb') as btfile:\n newq, materials = pickle.load(btfile)\n HTq.remove(btfile)\n" }, { "alpha_fraction": 0.6285714507102966, "alphanum_fraction": 0.6285714507102966, "avg_line_length": 17, "blob_id": "8fbedc3e30709612b82b48adc9f6ad0a31d55c56", "content_id": "1317e73337a417e69c2b8f38f0adb517e59310fb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 35, "license_type": "permissive", "max_line_length": 17, "num_lines": 2, "path": "/utils/diagnostics.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "def memTracker():\n print('temp')" }, { "alpha_fraction": 0.6744438409805298, "alphanum_fraction": 0.6768854856491089, "avg_line_length": 27.353845596313477, "blob_id": "34760f0eaa8a74a561b589bdee1baa23763d9618", "content_id": "9c4aa069140be2a263f01ea64436a8beaf324a7c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3686, "license_type": "permissive", "max_line_length": 159, "num_lines": 130, "path": "/examples/HTvasp", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, random, re, shutil, subprocess, sys, time,math\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nimport HighThroughput.manage.material as material\nfrom HighThroughput.io.VASP import *\nfrom HighThroughput.utils.generic import *\nfrom HighThroughput.modules.VASP import *\nfrom HighThroughput.errors.generic import *\nfrom HighThroughput.communication.mysql import *\n\nimport argparse\n\nparser = argparse.ArgumentParser(description='VASP wrapper')\nparser.add_argument('-q','--queue', metavar='qid', type=int,\n help='The queue you wish to submit the calculation to, a default type queue or one optimized for the specific type of calculation.')\n\nargs = parser.parse_args()\n\nINCAR = readINCAR()\n\nif 'CALCID' in INCAR.keys():\n sid = INCAR['CALCID']\nelse:\n os.mkdir('import')\n os.chdir('import')\n shutil.copy('../POSCAR','POSCAR' + INCAR['SYSTEM'])\n#Could have a mat argument for COD entries\n matid = material.add('POSCAR' + INCAR['SYSTEM'])\n sid = HT.add(matid,args.queue)\n HT.modify({'id' : cid, 'file' : matid})\n os.chdir('../')\nprint sid\nHT.start(sid)\nprint sid\ncid = HT.calcid\nprint cid\ncalc = HT.get(cid)\nprint calc\ncalc['settings'] = readSettings(calc['settings'])\ncalc['settings']['continue'] = 1\ncalc['settings']['INCAR']['CALCID'] = sid\nwriteSettings(calc['settings'])\nHT.updateSettings(calc['settings'],cid)\n\n\ncfile = calc['file']\nstatus = int(calc['stat'])\n\nprint('Server: ' + calc['server'])\n\nif os.path.isfile('aborted'):\n os.remove('aborted')\n\ncheckpointStart(calc)\n\nparent = HT.getSettings(calc['parent'])\n\nif 'continue' not in parent.keys():\n parent['continue'] = 0\nif 'continued' not in parent.keys():\n parent['continued'] = 0\n\nif int(parent['continue']) > int(parent['continued']):\n print 'Continuing job'\n cont(calc)\n\n#Can reenable if you want\n#if detectSP('POSCAR'):\n# calc['settings']['INCAR']['ISPIN'] = 2\n\nparallelSetup(calc['settings'])\nperror = HT.getResults(calc['parent'])\nif perror.get('errors') != None:\n fixerrors(calc)\nwriteSettings(calc['settings'])\nrun()\nfinderrors(calc)\nif os.path.isfile('STOPCAR'):\n os.remove('STOPCAR')\n\n\n\nprint('END STATUS ' + calc['stat'])\n\n\n#UPDATE POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\ncalc['settings']['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\nif os.path.isfile('aborted'):\n print 'Calculation aborted'\n #execute('HTvasp ' + str(qid) + ' ' + str(submit_arg))\n sys.exit()\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $4 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n HT.end(cid)\n #cleanup function\n os.remove('CHG')\n \n print calc['results']\n results = calc['results']\n #could leave this out when working with QZP's\n\n energy = float(energy)\n\n results['E0'] = energy\n\n print('Updating results')\n# updateresults could be assumed from dictionary keys and automated.\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nHT.updateSettings(calc['settings'], cid)\n\n#if int(HT.fetch(str(qid))) > 0:\n# execute('betaHSE ' + str(qid) + ' ' + str(submit_arg))\n" }, { "alpha_fraction": 0.5502228140830994, "alphanum_fraction": 0.5821049213409424, "avg_line_length": 43.181819915771484, "blob_id": "44e7198285353a62be4fc85531cc8f017198c532", "content_id": "8df77dc98bfb2b1a079a09d1997ada8d93a33edc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2917, "license_type": "permissive", "max_line_length": 312, "num_lines": 66, "path": "/HTtools/HTFNV", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport warnings\nwarnings.filterwarnings(\"ignore\")\nimport os, sys, subprocess\nrefpot = sys.argv[1]\neV = float(subprocess.Popen('grep ENCUT OUTCAR | awk \\'{print $3}\\'',shell=True,stdout=subprocess.PIPE).communicate()[0].decode().strip())\necut = eV/13.605698065894 \n\neps = 16.2\noutcar = 'OUTCAR'\nencut = subprocess.Popen('grep ENCUT ' + outcar,stdout=subprocess.PIPE,shell=True).communicate()[0].decode().strip().split()[2]\nspecies = subprocess.Popen('grep POTCAR: ' + outcar + ' | gawk -- \\'{ if (!a[$0]++) b[c++] = $0; } END { for (i = 0; i < c; ++i) { k = b[i]; print a[k] \" \" k;} }\\' | awk \\'{print $4}\\'',stdout=subprocess.PIPE,shell=True).communicate()[0].decode().strip().split('\\n')\nnatom = subprocess.Popen('grep \"ions per type\" ' + outcar,stdout=subprocess.PIPE,shell=True).communicate()[0].decode().strip().split()\nnelect = subprocess.Popen(' grep \"NELECT\" ' + outcar + ' | awk \\'{print $3}\\' ',stdout=subprocess.PIPE,shell=True).communicate()[0].decode().strip()\n\nif species[0][0:2] == 'Ge':\n nge = natom[4]\n if len(species[0]) > 2:\n potge = 14\n else:\n potge = 4\n if len(natom) == 6:\n nga = natom[5]\n if len(species[1]) > 2:\n potga = 13\n else:\n potga = 3\n else:\n nga = 0\n potga = 0\nelse:\n nga = natom[4]\n if len(species[0]) > 2:\n potga = 13\n else:\n potga = 3\n if len(natom) == 6:\n nge = natom[5]\n if len(species[1]) > 2:\n potge = 14\n else:\n potge = 4\n else:\n nge = 0\n potge = 0\neps=16.2\nif float(sys.argv[2]) > 0:\n eps = float(sys.argv[2])\ncharge = int(nga)*int(potga) + int(nge)*int(potge) - int(float(nelect))\ncharge = -charge\nrun1 = subprocess.Popen('sxdefectalign --vasp --ecut ' + str(ecut) + ' --charge ' + str(charge) + ' --eps ' + str(eps) + ' --center 0,0,0 --relative --vdef LOCPOT --vref ' + str(refpot) + ' | grep mesh',shell=True,stdout=subprocess.PIPE).communicate()[0]\nngx = run1.decode().split()[2]\nimport pandas as pd\nfrom pandas.compat import StringIO\ntemp = subprocess.Popen('head -n ' + str(ngx) + ' vline-eV-a0.dat',shell=True,stdout=subprocess.PIPE).communicate()[0].decode('utf-8')\ndf = pd.read_table(StringIO(temp),header = None)\nif charge > 0:\n C = df[1].min()\nelse:\n C = df[1].max()\n\nrun2 = subprocess.Popen('sxdefectalign --vasp --ecut ' + str(ecut) + ' --charge ' + str(charge) + ' --eps ' + str(eps) + ' --center 0,0,0 --relative --vdef LOCPOT --vref ' + str(refpot) + ' -C ' + str(C) + ' | grep \\'Defect correction\\' | awk \\'{print $4}\\' ',shell=True,stdout=subprocess.PIPE).communicate()[0]\n\nprint('sxdefectalign --vasp --ecut ' + str(ecut) + ' --charge ' + str(charge) + ' --eps ' + str(eps) + ' --center 0,0,0 --relative --vdef LOCPOT --vref ' + str(refpot) + ' -C ' + str(C) + ' | grep \\'Defect correction\\' | awk \\'{print $4}\\' ')\n\nprint(run2.decode().strip())\n\n" }, { "alpha_fraction": 0.48728594183921814, "alphanum_fraction": 0.5028541684150696, "avg_line_length": 31.627119064331055, "blob_id": "d7375a15bbed59d9dab119ea22d2a06b9e6f79a1", "content_id": "6baa775944dbbb86d550b72025928d8d9f6d4ae3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1927, "license_type": "permissive", "max_line_length": 131, "num_lines": 59, "path": "/io/Gaussian.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os,subprocess,json\n\ndef readCOM(directory=None):\n if directory == None:\n directory = os.getcwd()\n template = dict()\n COMname = subprocess.Popen('ls -la | grep com | awk \\'{print $9}\\'',stdout=subprocess.PIPE,shell=True).communicate()[0].strip()\n COMfile = open(os.path.join(directory,COMname),'r')\n template['name'] = COMname.strip('.com') \n i = 0\n \n for line in COMfile:\n if i >= 1:\n i += 1\n \n if line[0] == '%':\n temp = line[1:-1].strip().split('=')\n template[str(temp[0])] = temp[1] \n \n if line[0] == '#':\n template['route']=line[1:-1].strip()\n i=1\n\n if i == 3:\n template['comment']=line.strip()\n\n if i == 5:\n cm = line.strip().split()\n template['charge'] = cm[0]\n template['multiplicity'] = cm[1]\n break\n COMfile.close()\n return template\n\ndef writeCOM(settings,directory=None):\n if directory == None:\n directory = os.getcwd()\n if not isinstance(settings,dict):\n settings = json.loads(settings)\n cfile = open(os.path.join(directory,settings['name'] + '.com'),'r')\n COMfile = cfile.readlines()\n routeindex = 0\n for i in range(0,len(COMfile)):\n if COMfile[i][0] == '%':\n temp = COMfile[i][1:-1].strip().split('=')\n COMfile[i] = '%' + temp[0] + '=' + str(settings[temp[0]]) + '\\n'\n if COMfile[i][0] == '#':\n COMfile[i] = '#' + settings['route'] + '\\n'\n routeindex = i\n\n if routeindex > 0:\n if routeindex == i - 2:\n COMfile[i] = settings['comment'] + '\\n'\n\n if routeindex == i - 4:\n COMfile[i] = settings['charge'] + ' ' + settings['multiplicity'] + '\\n'\n open(os.path.join(directory,settings['name'] + '.com'),'w').write(''.join(COMfile))\n cfile.close()\n return 0\n\n\n" }, { "alpha_fraction": 0.623707115650177, "alphanum_fraction": 0.6325726509094238, "avg_line_length": 37.55696105957031, "blob_id": "850b1ffe50e6d4d14d8dee4fc0347a6fc2b8b1eb", "content_id": "4fab32fa43a08079e52fe06f816b1af1a205ea1f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6091, "license_type": "permissive", "max_line_length": 119, "num_lines": 158, "path": "/ML/models/energy.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport pandas as pd\nfrom HighThroughput.manage.calculation import setPriority, getPriority, setMultiplePriorities\nfrom HighThroughput.ML.features.database import getFile,getResults,getComposition,getID\nfrom HighThroughput.ML.features.elements import addElemental\nfrom HighThroughput.utils.generic import getClass\nfrom HighThroughput.communication.mysql import mysql_query\nimport sklearn.gaussian_process as gp\nfrom scipy.stats import norm\nfrom sklearn.decomposition import PCA\nimport numpy as np\nfrom gpflowopt.domain import ContinuousParameter\nfrom gpflowopt.design import LatinHyperCube\nimport gpflowopt\nimport json\nfrom HighThroughput.ML.models.mf_kriging import MultiFiCoKriging\n\n\ndef calcML(queue, stat, modelClass='sklearn.gaussian_process.GaussianProcessRegressor', target='Ehull',\n features=['mass', 'Ecoh', 'EN', 'IP'], N_init=50, stable_limit=0.05):\n materials = getComposition(queue, stat)\n\n if isinstance(materials, list):\n if isinstance(materials[0], str):\n return calcInitialMLPriority(queue, stat, features, N_init=N_init)\n\n if len(materials) < N_init and (isinstance(materials, pd.DataFrame) or isinstance(materials[0], dict)):\n print(\"skipped\")\n return None\n\n allmats = getComposition(queue, 0)\n\n files_allmats = getFile(queue, 0)\n\n elref = set(['mass', 'Ecoh', 'EN', 'IP'])\n\n elfeatures = set(features).intersection(elref)\n\n allmats = addElemental(allmats, elfeatures)\n\n allfeats = [x for x in allmats.columns if ''.join([i for i in x if not i.isdigit()]) in features]\n\n # apply pca to all materials and to train set\n # files_allmats.to_pickle(\"file_to_id\")\n # allmats[['id']+allfeats].to_pickle(\"allmats\")\n\n # ((allmats[allfeats]-allmats[allfeats].mean(axis = 0))/allmats[allfeats].std(axis = 0)).to_pickle(\"X_input_pca\")\n\n pca = PCA(n_components=8)\n\n train_means1 = np.mean(allmats[allfeats].values, axis=0)\n train_stds1 = np.std(allmats[allfeats].values, axis=0)\n\n # np.save(\"train_means1\", train_means1)\n # np.save(\"train_stds1\" , train_stds1)\n\n X_all = pca.fit_transform((allmats[allfeats].values - train_means1) / train_stds1)\n\n # np.save(\"X_pca\", X_all)\n\n train_means = np.mean(X_all, axis=0)\n train_stds = np.std(X_all, axis=0)\n\n X = (X_all - train_means) / train_stds\n\n # np.save(\"X_nrm\", X)\n\n # if not isinstance(materials, pd.DataFrame):\n # domain = gpflowopt.domain.ContinuousParameter('x1', min(X[:,0]), max(X[:,0]))\n #\n # for i in np.arange(1, X.shape[1]):\n # domain += gpflowopt.domain.ContinuousParameter('x'+str(i+1), min(X[:,i]), max(X[:,i]))\n #\n # design = LatinHyperCube(N_init, domain)\n #\n # #X0 is the intial sampling plan in continuous space\n # X0 = design.generate()\n #\n # #indices will contain the indices of the materials to sample initially\n # indices = []\n #\n # #look for the indices of the materials that lay closest to the sample points in continuous space\n # for x0 in X0:\n # for j in range(X.shape[0]):\n # index_new = np.linalg.norm(X-x0,axis=1).argsort()[j]\n # if index_new not in indices:\n # indices.append(index_new)\n # break\n #\n # priority = X.shape[0]*np.ones((len(indices)), dtype=int)\n #\n # # priority = {}\n #\n # # priority = np.zeros((X.shape[0]), dtype = int)\n # #priority is put equal to X.shape to assure preference over the updated priorities based on ML\n # # for index in indices:\n # # priority[index] = X.shape[0]\n #\n # #priority can be computed directly, without \"indices\", but \"indices\" might be useful for debugging\n #\n # priority = pd.DataFrame({'id' : allmats.id.iloc[indices], 'priority' : priority})\n #\n # print(\"priorities of initial sampling plan are set\")\n #\n # return priority\n\n if isinstance(materials, pd.DataFrame):\n materials = addElemental(materials, elfeatures)\n\n files_materials = getFile(queue, stat)\n\n if target in ['Ehull', 'E0', 'Eatom', 'Epure']:\n materialt = getResults(queue, stat, [target])\n\n materials = materials.join(materialt)\n\n X_train = pca.transform((materials[allfeats] - allmats[allfeats].mean(axis=0)) / allmats[allfeats].std(axis=0))\n\n # initialize kernel and GP\n kernel = gp.kernels.ConstantKernel() * gp.kernels.Matern(nu=5 / 2) + gp.kernels.WhiteKernel()\n model = gp.GaussianProcessRegressor(kernel=kernel,\n alpha=1e-5,\n n_restarts_optimizer=10,\n normalize_y=True)\n # fit model\n # print((allmats[allfeats]-allmats[allfeats].mean(axis = 0))/allmats[allfeats].std(axis = 0))\n model.fit((X_train - train_means) / train_stds, materials[target])\n\n ids_done = set(\n [id for ind, id in enumerate(allmats.id) if files_allmats['file'].iloc[ind] in set(files_materials['file'])])\n\n ids_TBD = list(set(allmats.id).difference(ids_done))\n\n indices_TBD = np.array([index for index, id in enumerate(allmats.id) if id in ids_TBD])\n\n # get predictions and uncertainties\n mu, sigma = model.predict(X[indices_TBD], return_std=True)\n\n prob_stab = norm.cdf((stable_limit - mu) / sigma)\n\n print(np.max(prob_stab))\n # print(files_allmats['file'].iloc[indices_TBD][prob_stab.argsort()[-1]])\n\n # get rank, the higher the better\n rank = prob_stab.argsort()\n\n # create priorities based on rank\n priority = np.zeros(X.shape[0], dtype=int)\n\n # The higher in the ranking the higher the priority should be\n for ind, rnk in enumerate(rank):\n priority[indices_TBD[rnk]] = ind\n\n priority = pd.DataFrame({'id': allmats.id, 'priority': priority})\n # print(allmats.id)\n # priority = pd.DataFrame({'id' : ids_TBD, 'priority' : priority})\n # output = priority[~priority.id.isin(materials['id'])]\n return mu,sigma" }, { "alpha_fraction": 0.701154351234436, "alphanum_fraction": 0.7054296731948853, "avg_line_length": 35.546875, "blob_id": "a0ffed742a9229e166967955973ba069e6672a4d", "content_id": "9ef9ba4214fc6089b3d62325620434072045adbc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2339, "license_type": "permissive", "max_line_length": 124, "num_lines": 64, "path": "/HTtools/HTehull", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport warnings\nwarnings.filterwarnings(\"ignore\")\nimport pymatgen\nprint(pymatgen.__file__)\nfrom pymatgen.ext.matproj import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.analysis.phase_diagram import *\nfrom pymatgen.entries.compatibility import MaterialsProjectCompatibility\nimport re, sys, json, os\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone, sys.argv[1], 4)\nentriesorig = queen.get_data()\nqueen.load_data(os.path.join(os.path.dirname(__file__), '../ML/data/missingels.json'))\nentriesextra = queen.get_data() \n\nprint(entriesorig)\n\nif len(sys.argv) > 2:\n compat = MaterialsProjectCompatibility(check_potcar=False)\n entriesorig = compat.process_entries(entriesorig) \n\nfor entry in entriesorig:\n name = entry.name\n line = re.findall('[A-Z][^A-Z]*',name.replace('(','').replace(')',''))\n\nsearchset= set(re.sub('\\d',' ',' '.join(line)).split())\nentries = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split())==searchset, entriesorig)\n\nentriesextra = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split()) & searchset, entriesextra)\n#This initializes the REST adaptor. Put your own API key in.\na = MPRester(\"s2vUo6mzETOHLdbu\")\n#unknownEls = set(['Po','Ra'])\n\n#if unknownEls & searchset:\n# print('99', None)\n# exit()\n\n#print(a.get_stability(entriesorig))\nall_entries = a.get_entries_in_chemsys(set(searchset)) + list(entries) + list(entriesextra)\n\npd = PhaseDiagram(all_entries)\n\ndef name(potcar):\n name = ''\n for p in potcar:\n temp = (p.split(' ')[-2].split('_')[0])\n name += temp\n return name\nfor e in pd.stable_entries:\n if e.entry_id == None:\n reaction = pd.get_equilibrium_reaction_energy(e)\n print(reaction,None)\n\nfor e in pd.unstable_entries:\n decomp, e_above_hull = pd.get_decomp_and_e_above_hull(e)\n pretty_decomp = [(\"{}:{}\".format(k.composition.reduced_formula, k.entry_id), round(v, 2)) for k, v in decomp.items()]\n if e.entry_id == None:\n print(e_above_hull,pretty_decomp)\n" }, { "alpha_fraction": 0.603383481502533, "alphanum_fraction": 0.6090225577354431, "avg_line_length": 34.46666717529297, "blob_id": "8c5e0db03be9ea6a58960f0a882aeb2f00344d46", "content_id": "bcb73f1ffbcf3384fd375b0f4ea7b5279f372cd2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 532, "license_type": "permissive", "max_line_length": 139, "num_lines": 15, "path": "/HTtools/HToverview", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nfrom HighThroughput.communication.mysql import mysql_query\nimport sys\n\nif int(sys.argv[1]) > 0:\n overview = mysql_query('SELECT `stat`, COUNT(`stat`) AS `count` FROM `calculations` WHERE queue = ' + sys.argv[1] + ' GROUP BY `stat`')\n print('Stat\\t# jobs')\n print(overview)\n if(type(overview) == list):\n for o in overview:\n print(o['stat'] + '\\t' + o['count'])\n else:\n print(overview['stat'] + '\\t' + overview['count'])\nelse:\n print('Please enter a proper Queue ID.')\n" }, { "alpha_fraction": 0.630446195602417, "alphanum_fraction": 0.6540682315826416, "avg_line_length": 28.78125, "blob_id": "b22c6ca47fd3e06ad85919d99de64cbdcae1916b", "content_id": "b3e671d0b617d04a3df6c901288d03823424453b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1905, "license_type": "permissive", "max_line_length": 178, "num_lines": 64, "path": "/examples/Backtesting/backtest.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport HighThroughput.manage.queue as HTq\nimport HighThroughput.manage.calculation as HTc\nimport HighThroughput.ML.models.priority as HTML\nfrom HighThroughput.communication.mysql import mysql_query\nimport pickle, sys, json\nimport random\nimport numpy as np\nimport tensorflow as tf\n\ntf.set_random_seed(666)\nrandom.seed(666)\nnp.random.seed(666)\n\nnsample = 500\nstable = 0\n#target = sys.argv[1]\nrdict = {}\nrlist = []\nlimit = 0.05\nN_init = 50\nbatch_size = 1\n\nnewq = 251\n\ntarget = 'Ehull'\n\n#with open('backtest.pkl','rb') as btfile:\n# newq, materials = pickle.load(btfile)\n\nmaterials = mysql_query('SELECT `file`, `Ehull` FROM `zintlfinal` WHERE `queue` = 239')\n\nfor mat in materials:\n #result = json.loads(mat['results'])\n rdict[mat['file']] = float(mat[target])\n\ntstable = sum([1 for x in rdict.values() if float(x) < limit])\nprint('There are ' + str(tstable) + ' stable materials to find in this queue.')\n\n#HTML.setMLPriority(newq,stat=2)\n\n#HTML.setMLPriority(newq, 2, ['mass','Ecoh','EN','IP'], N_init)\n#\n#for i in range(N_init):\n# calc = HTc.fetchgetstart(newq)\n# HTc.updateResults({target: rdict[calc['file']]})\n#\n# if rdict[calc['file']] < limit:\n# stable += 1\n# print('Found ' + str(stable) + ' stable materials (' + str(int(round(100*stable/tstable,0))) + ' %) in ' + str(i+1) + ' samples.')\n# HTc.end()\n \nfor i in range(nsample):\n if i%batch_size==0:\n HTML.updateMLPriority(newq,stat=2,modelClass= 'sklearn.gaussian_process.GaussianProcessRegressor',target = 'Ehull',features = ['mass','Ecoh','EN','IP'] ,maxParallel=1)\n calc = HTc.fetchgetstart(newq)\n print(calc)\n HTc.updateResults({target: rdict[calc['file']]})\n\n if rdict[calc['file']] < limit:\n stable += 1\n print('Found ' + str(stable) + ' stable materials (' + str(int(round(100*stable/tstable,0))) + ' %) in ' + str(i+1) + ' samples.')\n HTc.end()" }, { "alpha_fraction": 0.7638036608695984, "alphanum_fraction": 0.7760736346244812, "avg_line_length": 29.5625, "blob_id": "0a10b57d942b82f623704885f2e412f08c960f2c", "content_id": "3a407872d0ef66995fdd3c4bb354d968c3f28a67", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 978, "license_type": "permissive", "max_line_length": 60, "num_lines": 32, "path": "/HTtools/HTdos", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# from pymatgen.electronic_structure.dos import CompleteDos\nfrom pymatgen.electronic_structure.core import Spin\nfrom pymatgen.electronic_structure.plotter import DosPlotter\nfrom pymatgen.electronic_structure.core import Spin, Orbital\nfrom pymatgen.io.vasp.outputs import Vasprun, Procar\nfrom pymatgen.core.ion import Ion\nimport sys\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport numpy as np\nvasprun = Vasprun(sys.argv[1])\n\npdos = vasprun.pdos\ntdos = vasprun.tdos\nidos = vasprun.idos\nefermi = vasprun.efermi\nenergy = vasprun.eigenvalues\nstructure = vasprun.structures\n\n\nde = tdos.energies[1] - tdos.energies[0]\ndiff = []\ntotal = tdos.densities[Spin.up]\ndiffx = tdos.energies-efermi+de/2\nfor i in range(len(idos.densities[Spin.up])-1):\n diff.append((total[i+1] - total[i])/de)\n\nplt.plot(tdos.energies-efermi,tdos.densities[Spin.up])\nplt.plot(tdos.energies-efermi,idos.densities[Spin.up])\nplt.plot(diffx[0:-1],np.array(diff)/1000)\nplt.show()\n" }, { "alpha_fraction": 0.7089372277259827, "alphanum_fraction": 0.7185990214347839, "avg_line_length": 29.703702926635742, "blob_id": "3141d5d2e3048c7b6a6f1758b72beba905ce1be9", "content_id": "ec3f7749a6962285b6212e8328102aa2fb4b1293", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 828, "license_type": "permissive", "max_line_length": 134, "num_lines": 27, "path": "/examples/Backtesting/setup.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Setup a backtesting queue\nimport HighThroughput.manage.queue as HTq\nimport HighThroughput.manage.calculation as HTc\n\nfrom HighThroughput.communication.mysql import mysql_query\nimport pickle, sys, json\n\n# The target queue we will try predicting\ntqueue = sys.argv[1]\n\n# The target status of the final property calculation\ntstat = sys.argv[2]\n\n# We use a default 1 step, 3 stat workflow\nnewq = HTq.add('Backtesting ' + str(tqueue),workflow = 20)\n\n# get all the materials\nmaterials = mysql_query('SELECT `file`, `results` FROM `calculations` WHERE `queue` = ' + str(tqueue) + ' AND `stat` = ' + str(tstat))\n\n# Add them all to our new queue\nfor mat in materials:\n HTc.add(mat['file'], newq, priority = 1)\n\n# Save the info\nwith open('backtest.pkl','wb') as btfile:\n pickle.dump((newq,materials),btfile)" }, { "alpha_fraction": 0.5728453993797302, "alphanum_fraction": 0.5875512957572937, "avg_line_length": 36.96104049682617, "blob_id": "a29dcc4b562b899957775236e077297e85b99c5e", "content_id": "c4eff7d745aa1111ece35266cb1c46d107675edf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2924, "license_type": "permissive", "max_line_length": 113, "num_lines": 77, "path": "/errors/Gaussianfixes.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os,math\n\ndef test(calc):\n #Dummy\n print('This is a bugfix.')\n return True\n\ndef rmWAVECAR(calc):\n #2: CHGCAR more reliable so clear WAVECAR\n open('WAVECAR', 'w').close() \n return True\n\ndef rmCHGCAR(calc):\n #7: In case of corrupted density\n open('CHGCAR', 'w').close()\n return True\n\ndef algoSwitch(calc):\n #3: Switch between All/Damped Normal/Fast\n if 'ALGO' not in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['ALGO'] = 'Fast'\n elif calc['settings']['INCAR']['ALGO'][0] == 'N':\n calc['settings']['INCAR']['ALGO'] = 'Normal'\n elif calc['settings']['INCAR']['ALGO'][0] == 'D':\n calc['settings']['INCAR']['ALGO'] = 'A'\n elif calc['settings']['INCAR']['ALGO'][0] == 'A':\n calc['settings']['INCAR']['ALGO'] = 'D'\n return True\n\ndef halveStep(calc):\n #4: bit much for multiple times\n if 'TIME' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['TIME'] = math.ceil(float(calc['settings']['INCAR']['TIME'])*100.0/2.0)/100.0\n elif 'POTIM' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['POTIM'] = math.ceil(float(calc['settings']['INCAR']['POTIM'])*100.0/2.0)/100.0\n return True\n\ndef doubleStep(calc):\n #5: bit much for multiple times\n if 'TIME' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['TIME'] = float(calc['settings']['INCAR']['TIME'])*2.0\n elif 'POTIM' in calc['settings']['INCAR']:\n calc['settings']['INCAR']['POTIM'] = float(calc['settings']['INCAR']['POTIM'])*2.0\n return True\n\ndef preconv(calc):\n #8: Preconverge calculation with another algorithm.\n preconvAlgo = {'A' : 'N', 'D' : 'N'}\n calc['settings']['INCAR']['ALGOb'] = calc['settings']['INCAR']['ALGO']\n calc['settings']['INCAR']['ALGO'] = preconvAlgo[calc['settings']['INCAR']['ALGO'][0]]\n calc['settings']['INCAR']['NELMb'] = calc['settings']['INCAR']['NELM'] \n calc['settings']['INCAR']['NELM'] = '8'\n return True\n\ndef restorePreconv(calc):\n #9: Restore the original settings before preconvergence.\n if os.path.isfile('CHGCAR.prec'):\n if os.stat('CHGCAR.prec').st_size > 0:\n os.rename('CHGCAR.prec','CHGCAR')\n if 'ALGOb' in calc['settings']['INCAR'].keys():\n calc['settings']['INCAR']['ALGO'] = calc['settings']['INCAR']['ALGOb'] \n del calc['settings']['INCAR']['ALGOb'] \n if 'NELMb' in calc['settings']['INCAR'].keys(): \n calc['settings']['INCAR']['NELM'] = calc['settings']['INCAR']['NELMb']\n del calc['settings']['INCAR']['NELMb']\n return True\n\ndef startWAVECAR(calc):\n #10 Ensure a preconverged WAVECAR is used for the new coefficients and the density.\n calc['settings']['INCAR']['ISTART'] = \"1\"\n calc['settings']['INCAR']['ICHARG'] = \"0\" \n return True\n\ndef startCHGCAR(calc):\n calc['settings']['INCAR']['ISTART'] = \"0\"\n calc['settings']['INCAR']['ICHARG'] = \"1\"\n return True\n\n" }, { "alpha_fraction": 0.5100113749504089, "alphanum_fraction": 0.5204297304153442, "avg_line_length": 34.304595947265625, "blob_id": "55edf5a2ddd388376006c9330fd9958aa10c7782", "content_id": "d587cb1b34843b1bde0bcc8afe7739523b2182c8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6143, "license_type": "permissive", "max_line_length": 529, "num_lines": 174, "path": "/manage/material.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from HighThroughput.communication.mysql import *\nimport HighThroughput.io.CIF as CIF\nimport os\nimport ase.io\nfrom numpy.linalg import norm\nfrom numpy import dot, arccos, degrees\n\ndef get(cifid):\n if int(cifid) < 10000000:\n table = 'data'\n else:\n table = 'newdata'\n return mysql_query('SELECT * FROM `' + table + '` WHERE `file` = ' + str(cifid))\n\ndef importdir(directory=os.getcwd()):\n i=0;\n os.chdir(directory)\n for filename in os.listdir(directory):\n if os.path.isdir(directory + '/' + filename):\n importdir(directory + '/' + filename);\n else:\n val = add(filename)\n if val != 0:\n i = i+1\n return i\n\ndef importdirbatch(directory=os.getcwd(),group=None,description='No description provided.'):\n owner = mysql_query('')\n\n if group is None:\n group = mysql_query('INSERT INTO mgroups (`owner`, `description`) VALUES (' + str(owner) + ',' + str(description) + ')')\n print('The materials group is ' + str(group))\n i=0;\n os.chdir(directory)\n filenames = []\n for filename in os.listdir(directory):\n if os.path.isdir(directory + '/' + filename):\n importdir(directory + '/' + filename);\n else:\n filenames.append(filename)\n return batchadd(filenames,group)\n\ndef modify(params):\n query = 'UPDATE `newdata` SET '\n for key in params.keys():\n if key != 'file':\n query += '`' + key + '` ='\n if not str(params[key]).isdigit():\n query += '\\'' + str(params[key]) + '\\''\n else: \n query += str(params[key])\n query += ', ' \n query = query[:-2] + ' WHERE `file` = ' + str(params['file'])\n result = mysql_query(query)\n \n if (result == '1'):\n print('The calculation has been modified. Please verify.')\n elif (result == '0'):\n print('Nothing to modify.')\n else: \n print('Help... Me...')\n return result\n\n\ndef batchadd(filenames,group):\n query = ''\n owner = mysql_query('')\n name = mysql_query('SELECT `name` FROM `accounts` WHERE `id` = ' + owner)\n i=0\n for filename in filenames:\n title = filename.rsplit('.', 2);\n print(filename)\n if not title[0].isdigit():\n crystal = ase.io.read(filename);\n else:\n continue\n\n elements = crystal.get_chemical_symbols();\n unique = set(elements);\n\n for el in unique:\n el = el + str(elements.count(el));\n\n unique = [el + str(elements.count(el)) for el in unique];\n unique = ' '.join(unique);\n\n cell = crystal.get_cell();\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a), 3);\n nb = round(norm(b), 3);\n nc = round(norm(c), 3);\n alpha = round(degrees(arccos(dot(b, c) / nb / nc)), 1);\n beta = round(degrees(arccos(dot(c, a) / nc / na)), 1);\n gamma = round(degrees(arccos(dot(a, b) / na / nb)), 1);\n volume = round(crystal.get_volume(), 3);\n\n CIFtext = CIF.write('temp.cif', crystal)\n os.remove('temp.cif')\n\n\n query += 'INSERT INTO newdata (`a`, `b`, `c`, `alpha`, `beta`, `gamma`, `vol`, `celltemp`, `formula`, `calcformula`, `authors`, `year`, `text`, `owner`, `cif`,`mgroup`) VALUES (' + str(na) + ', ' + str(nb) + ', ' + str(nc) + ',' + str(alpha) + ', ' + str(beta) + ', ' + str(gamma) + ',' + str(volume) + ', 0, \\'' + unique + '\\', \\'' + unique + '\\', \\'' + name['name'] + '\\', 2012,\\'' + title[0].replace('POSCAR', '').replace('.com', '') + '\\', ' + owner + ',\\'' + CIFtext.replace('\\'','\\\\\\'') + '\\', ' + str(group) + ');'\n\n i += 1\n if i % 100 == 0:\n result = mysql_query(query);\n print(result)\n query = ''\n\n cifid = result\n\n if int(result) > 0:\n print('Inserted batch ' + str(i) + ' into the materials database.')\n else:\n print('Insert failed on batch ' + str(i) + '.')\n result = mysql_query(query);\n print(result)\n query = ''\n\n cifid = result\n\n if int(result) > 0:\n print('Inserted batch ' + str(i) + ' into the materials database.')\n else:\n print('Insert failed on batch ' + str(i) + '.')\n return cifid\n\ndef add(filename):\n title = filename.rsplit('.',2);\n print(filename) \n if not title[0].isdigit():\n crystal = ase.io.read(filename);\n else:\n return 0\n\n elements = crystal.get_chemical_symbols();\n unique = set(elements);\n \n for el in unique:\n el = el + str(elements.count(el));\n \n unique = [el + str(elements.count(el)) for el in unique];\n unique= ' '.join(unique);\n\n cell = crystal.get_cell();\n a = cell[0];\n b = cell[1];\n c = cell[2];\n na = round(norm(a),3);\n nb = round(norm(b),3);\n nc = round(norm(c),3);\n alpha = round(degrees(arccos(dot(b,c)/nb/nc)),1);\n beta = round(degrees(arccos(dot(c,a)/nc/na)),1);\n gamma = round(degrees(arccos(dot(a,b)/na/nb)),1);\n volume = round(crystal.get_volume(),3); \n \n CIFtext = CIF.write('temp.cif',crystal)\n os.remove('temp.cif')\n\n owner = mysql_query('')\n name = mysql_query('SELECT `name` FROM `accounts` WHERE `id` = ' + owner)\n \n result = mysql_query('INSERT INTO newdata (`a`, `b`, `c`, `alpha`, `beta`, `gamma`, `vol`, `celltemp`, `formula`, `calcformula`, `authors`, `year`, `text`, `owner`, `cif`) VALUES (' + str(na) + ', ' + str(nb) + ', ' + str(nc) + ',' + str(alpha) + ', ' + str(beta) + ', ' + str(gamma) + ',' + str(volume) + ', 0, \\'' + unique + '\\', \\'' + unique + '\\', \\'' + name['name'] + '\\', 2012,\\'' + title[0].replace('POSCAR','').replace('.com','') + '\\', ' + owner + ',\\'' + CIFtext.replace('\\'','\\\\\\'') + '\\')');\n cifid = result\n if int(result) > 0:\n print('Inserted ' + title[0].replace('POSCAR','').replace('.com','') + ' into the materials database as cif ' + str(cifid) + '.')\n else:\n print('Insert failed.')\n if len(title) > 1:\n os.rename(filename,str(cifid) + '.' + title[1]);\n else:\n os.rename(filename,str(cifid));\n return cifid\n" }, { "alpha_fraction": 0.571465253829956, "alphanum_fraction": 0.5758275389671326, "avg_line_length": 39.17525863647461, "blob_id": "836114eb5a61b703c7e9dd37075750ed94bf1412", "content_id": "fbf1912a6913be1b1e1199e9e7e2bb559ba64a13", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3897, "license_type": "permissive", "max_line_length": 163, "num_lines": 97, "path": "/errors/generic.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from ..manage.calculation import updateResults,getResults\nfrom ..communication.mysql import *\nimport importlib,json\n\ndef finderrors(calc):\n detectors = mysql_query('SELECT `id`,`detector`,`software` FROM `errors` WHERE `software` IN (\\'' + calc['software'] + '\\', \\'any\\') ORDER BY `priority` DESC')\n swdet = importlib.import_module('HighThroughput.errors.' + calc['software'] + 'detectors')\n generic = importlib.import_module('HighThroughput.errors.genericdetectors')\n errors = []\n calc['cerrors'] = []\n if not isinstance(detectors,list):\n detectors = [detectors]\n for det in detectors:\n #could join\n detector = mysql_query('SELECT `name` FROM `errorhandlers` WHERE `id` = ' + det['detector'])\n print('Checking for error ' + str(detector['name']))\n if det['software'] == 'any':\n func = getattr(generic,detector['name'])\n else:\n func = getattr(swdet,detector['name'])\n detected = func(calc)\n if detected:\n errors.append(det['id'])\n calc['cerrors'] = errors\n\n results = getResults(calc['parent'])\n if 'errors' not in results.keys():\n results['errors'] = 0\n\n print('DEBUG: number of errors' + str(len(errors)))\n if len(errors) > 0:\n err =','.join(errors)\n if 'errorfix' not in results.keys():\n results['errorfix'] = {}\n #else:\n # results['errorfix'] = json.loads(results['errorfix'])\n \n if results['errors'] != err:\n results['errors'] = err\n results['errorfix'][err] = 0\n #results['errorfix'] = json.dumps(results['errorfix'])\n updateResults(results,calc['parent'])\n mainfuncs = importlib.import_module('HighThroughput.modules.' + calc['software'])\n abort = getattr(mainfuncs,'abort')\n return abort(calc)\n else:\n if 'errors' in results.keys():\n del results['errors']\n if 'errorfix' in results.keys():\n del results['errorfix']\n updateResults(results,calc['parent'])\n del calc['cerrors']\n return 0\n\ndef fixerrors(calc):\n results = getResults(calc['parent'])\n if 'errors' not in results.keys():\n return 0\n if 'errorfix' not in results.keys():\n results['errorfix'] = {}\n #else:\n #results['errorfix'] = json.loads(results['errorfix'])\n\n errors = results['errors'].split(',')\n for err in errors:\n fixflow = mysql_query('SELECT `flow` FROM `errors` WHERE `id` = ' + err)\n steps = fixflow['flow'].split(';')\n expanded = []\n for step in steps:\n curflow=step.split(':')\n if len(curflow) == 1:\n curflow.append(1)\n for i in range(0,int(curflow[1])):\n expanded.append(curflow[0])\n if err not in results['errorfix'].keys():\n results['errorfix'][err] = 0\n \n current = expanded[results['errorfix'][err] % len(expanded)]\n actions = current.split(',')\n swdet = importlib.import_module('HighThroughput.errors.' + calc['software'] + 'fixes')\n generic = importlib.import_module('HighThroughput.errors.genericfixes')\n for action in actions:\n fix = mysql_query('SELECT `name`,`description`, `software` FROM `errorhandlers` WHERE `id` = ' + action)\n if fix['software'] == 'any':\n func = getattr(generic,fix['name'])\n else:\n func = getattr(swdet,fix['name'])\n print('ERROR FIX: ' + fix['description'])\n func(calc)\n results['errorfix'][err] += 1\n #results['errorfix'] = json.dumps(results['errorfix'])\n results2 = getResults(calc['parent'])\n results2['errorfix'] = results['errorfix']\n print('This is the final errorfix update.')\n print(results2)\n updateResults(results2,calc['parent']) \n return 0\n" }, { "alpha_fraction": 0.5121693015098572, "alphanum_fraction": 0.5216931104660034, "avg_line_length": 35.346153259277344, "blob_id": "41bf4d9a70c5b3916fd9393800e98cd010ad809c", "content_id": "816a7d6b6d5b1532fd0d1071e13163091877d3c7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3780, "license_type": "permissive", "max_line_length": 302, "num_lines": 104, "path": "/ML/features/database.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom HighThroughput.communication.mysql import mysql_query\nimport pandas as pd\nimport json\nimport re\ncolumns_elements = {'Na': 1, 'K' : 1, 'Rb': 1, 'Cs': 1,\n 'Mg': 2, 'Ca': 2, 'Sr': 2, 'Ba': 2,\n 'Al': 3, 'Ga': 3, 'In': 3, 'Tl': 3,\n 'Si': 4, 'Ge': 4, 'Sn': 4, 'Pb': 4,\n 'P' : 5, 'As': 5, 'Sb': 5, 'Bi': 5,\n 'S' : 6, 'Se': 6, 'Te': 6}\n\ndef _gcd (a,b):\n if (b == 0):\n return a\n else:\n return _gcd (b, a % b)\n \ndef _get_gcd(array):\n res = array[0]\n for c in array[1::]:\n res = _gcd(res , c)\n return res\n \ndef getComposition(queue,stat):\n# rows = mysql_query('SELECT `calculations`.`file` AS `file`, `calculations`.`id` AS `id`, `newdata`.`formula` AS `formula` FROM `calculations` INNER JOIN `newdata` ON `calculations`.`file`=`newdata`.`file` WHERE `calculations`.`queue` = ' + str(queue) + ' AND `calculations`.`stat` = ' + str(stat))\n rows = mysql_query('SELECT `calculations`.`file` AS `file`, `calculations`.`id` AS `id`, `newdata`.`text` AS `text` FROM `calculations` INNER JOIN `newdata` ON `calculations`.`file`=`newdata`.`file` WHERE `calculations`.`queue` = ' + str(queue) + ' AND `calculations`.`stat` = ' + str(stat))\n \n if type(rows) != list:\n return [rows]\n compdict = {'id' : []}\n for row in rows:\n compdict['id'].append(row['id'])\n \n# formula = row['formula'].split()\n formula = re.findall('[A-Z][^A-Z]*', row['text'])\n# print(formula)\n stoich = [[int(i) for i in x if i.isdigit()] for x in formula]\n stoich = [x if len(x)>0 else [1] for x in stoich]\n stoich = [sum([i*10**(len(x)-ind-1) for ind, i in enumerate(x)]) for x in stoich]\n els = [ ''.join([i for i in x if not i.isdigit()]) for x in formula]\n# ids_reordered = sorted(range(len(els)), key=[columns_elements[el] for el in els].__getitem__)\n \n# gcd = _get_gcd(stoich)\n \n# stoich = [int(stoich[id]/gcd) for id in ids_reordered]\n# els = [els[id] for id in ids_reordered] \n if len(els) == 3:\n els.append('He')\n stoich.append(0)\n for i in range(len(els)):\n if 'el' + str(i) not in compdict:\n compdict['el' + str(i)] = []\n compdict['stoich' + str(i)] = []\n \n compdict['el' + str(i)].append(els[i])\n compdict['stoich' + str(i)].append(stoich[i])\n composition = pd.DataFrame(compdict)\n composition.fillna(0)\n return composition\n \ndef getFile(queue,stat):\n rows = mysql_query('SELECT `file` FROM `calculations` WHERE `queue` = ' + str(queue) + ' AND `stat` = ' + str(stat))\n \n file = []\n \n for row in rows:\n file.append(row['file'])\n \n file = pd.DataFrame({'file' : file})\n return file\n\n\ndef getID(queue,stat):\n rows = mysql_query('SELECT `id` FROM `calculations` WHERE `queue` = ' + str(queue) + ' AND `stat` = ' + str(stat))\n \n cid = []\n \n for row in rows:\n cid.append(row['id'])\n \n ID = pd.DataFrame({'id' : cid})\n return ID\n \ndef getResults(queue,stat,keys):\n rows = mysql_query('SELECT `file`, `results` FROM `calculations` WHERE `queue` = ' + str(queue) + ' AND `stat` = ' + str(stat))\n \n keys.insert(0,'file')\n \n resultsdict = { 'file' : []}\n \n for key in keys:\n resultsdict[key] = []\n \n for row in rows:\n resultsdb = json.loads(row['results'])\n for key in keys:\n if key == 'file':\n resultsdict[key].append(row[key])\n else: \n resultsdict[key].append(resultsdb[key])\n \n results = pd.DataFrame(resultsdict)\n return results\n" }, { "alpha_fraction": 0.605597972869873, "alphanum_fraction": 0.6183205842971802, "avg_line_length": 29.230770111083984, "blob_id": "40740865a64721493cb4ee8a9623ec7728653382", "content_id": "28ce2431525228c2641eea6fb009724d6b6ecf41", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 393, "license_type": "permissive", "max_line_length": 150, "num_lines": 13, "path": "/HTtools/HTjobid", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys\nfrom HighThroughput.communication.mysql import *\n\nif len(sys.argv) == 4:\n extra = ' `stat` = ' + str(sys.argv[3])\nelse:\n extra = ' `leaf` = 1'\n\nresult = mysql_query('SELECT `jobid` FROM `calculations` WHERE `queue` = ' + str(sys.argv[1]) + ' AND `file` = ' + str(sys.argv[2]) + ' AND ' + extra)\n\nif not isinstance(result,str):\n print(result['jobid'])\n" }, { "alpha_fraction": 0.49193549156188965, "alphanum_fraction": 0.5427419543266296, "avg_line_length": 37.734375, "blob_id": "dbccb9bffc606a6cf4d10cf4ee93b6b26175c0d9", "content_id": "cd47702fd384f79754b91fa9cb7eebef7836d409", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2480, "license_type": "permissive", "max_line_length": 151, "num_lines": 64, "path": "/HTtools/HTeoslimfix", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#output to file, view with more\nimport os,subprocess,sys\n\ndef checkeos(eospath):\n #check r2\n if not os.path.isfile(eospath):\n return\n\n r = subprocess.Popen('cat ' + eospath + '.eosout | grep 1-R | awk \\'{print $2}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n B = subprocess.Popen('cat ' + eospath + '.eosout | grep GPa | awk \\'{print $2}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n BP = subprocess.Popen('cat ' + eospath + '.eosout | grep BP | awk \\'{print $2}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n V0 = subprocess.Popen('cat ' + eospath + '.eosout | grep V0 | awk \\'{print $2}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n E0 = subprocess.Popen('cat ' + eospath + '.eosout | grep E0 | awk \\'{print $2}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n Vmin = subprocess.Popen('head -n 1 ' + eospath + ' | awk \\'{print $1}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n Vmax = subprocess.Popen('tail -n 1 ' + eospath + ' | awk \\'{print $1}\\'', stdout = subprocess.PIPE, shell = True).communicate()[0].strip()\n color = ['\\033[1m\\033[32m','\\033[1m\\033[33m','\\033[1m\\033[91m']\n psplit = eospath.split('/')\n \n output = '\\033[0m' + psplit[-3] + ': '\n \n if float(r) < 0.0005:\n i = 0\n elif float(r) > 0.009:\n i = 2\n subprocess.Popen('HTeosreset ' + sys.argv[1] + ' ' + psplit[-3],shell=True)\n else:\n i = 1\n\n output += ' \\t\\033[0m1-R^2: ' + color[i] + r\n\n if float(B) < 10 or float(B) > 400:\n j = 2\n else:\n j = 0\n\n output += ' \\t\\033[0m B:' + color[j] + B\n\n if float(BP) < 3 or float(BP) > 5:\n k = 2\n else:\n k = 0\n\n output += ' \\t\\033[0m BP:' + color[k] + BP\n\n if float(V0) < float(Vmin) or float(V0) > float(Vmax):\n l = 2\n elif (float(V0) - float(Vmin))/(float(Vmax) - float(V0)) < 0.05 or (float(Vmax) - float(V0))/(float(Vmax) - float(V0)) < 0.05:\n l = 1\n else:\n l = 0\n\n output += color[l] + ' \\t\\033[0mV: ' + color[l] + Vmin + '<' + V0 + '<' + Vmax\n \n if i != 0 or j !=0 or k !=0 or l != 0:\n output += ' \\033[1m(X)'\n if i != 0 or l != 0:\n print(output)\n\nrootdir = os.curdir\nfor dirs in os.listdir(rootdir):\n if dirs[0] == '1':\n eospath = os.path.join(rootdir,dirs,'STEP4/EOS_data')\n checkeos(eospath)\n\n" }, { "alpha_fraction": 0.5990338325500488, "alphanum_fraction": 0.6287094354629517, "avg_line_length": 32.69767379760742, "blob_id": "f09e3e1a2c2e40fcf91b0cde93fa71a473b82350", "content_id": "4f6b2d3be058e2e6b1a7f4cfaa6cadc963cd5f26", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1449, "license_type": "permissive", "max_line_length": 102, "num_lines": 43, "path": "/HTtools/HTeosreset", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, shutil, sys , time\n\nvols = [str(float(vol)/100) for vol in range(94,108,2)]\n\nedir = sys.argv[2]\n\nf = open(os.path.join(edir,'STEP4/EOS_data'),'r')\nenmin = 999999999.999999999\ni=0\nfor line in f.readlines():\n en = float(line.split()[1])\n \n if en < enmin:\n enmin = en\n volmin = vols[i]\n i += 1\n\nshutil.copy2(os.path.join(edir,'STEP3',volmin,'CONTCAR'),os.path.join('../import',edir + '.vasp'))\nif os.path.isfile(os.path.join(edir,'STEP3',volmin,'CHGCAR')):\n shutil.copy2(os.path.join(edir,'STEP3',volmin,'CHGCAR'),os.path.join('../import','CHGCAR' + edir))\n\ndirlist = ['STEP3/' + vol for vol in vols if vol != volmin]\ndirlist.extend(['BANDS','DOS','STEP4'])\n\nfor dirs in dirlist:\n if os.path.isfile(os.path.join(edir,dirs,'CHGCAR')):\n os.remove(os.path.join(edir,dirs,'CHGCAR'))\n if os.path.isfile(os.path.join(edir,dirs,'CHG')):\n os.remove(os.path.join(edir,dirs,'CHG'))\n if os.path.isfile(os.path.join(edir,dirs,'WAVECAR')):\n os.remove(os.path.join(edir,dirs,'WAVECAR'))\n\n\nif not os.path.isdir(os.path.join(edir,'old' + str(time.time()))):\n os.mkdir(os.path.join(edir,'old' + str(time.time())))\n\ndirlist = ['BANDS','DOS','STEP4','STEP3']\nfor dirs in dirlist:\n if os.path.isdir(os.path.join(edir,dirs)):\n shutil.move(os.path.join(edir,dirs),os.path.join(edir,'old' + str(time.time())))\n\nos.system('HTrollback ' + sys.argv[1] + ' ' + edir + ' 0')\n" }, { "alpha_fraction": 0.6571428775787354, "alphanum_fraction": 0.6725274920463562, "avg_line_length": 36.91666793823242, "blob_id": "7e538c481a5bed6e5ea6884a2aba04255d22c052", "content_id": "e096275084499bd5eb39ff140641fc50b859420d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 455, "license_type": "permissive", "max_line_length": 90, "num_lines": 12, "path": "/examples/Backtesting/reset.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom HighThroughput.communication.mysql import mysql_query\nimport pickle\n\n#with open('backtest.pkl','rb') as btfile:\n# newq, materials = pickle.load(btfile)\n\nnewq = 251 \n\nmysql_query('DELETE FROM `calculations` WHERE `queue` = ' + str(newq) + ' AND `stat` > 0')\nmysql_query('UPDATE `calculations` SET `priority` = 0 WHERE `queue` = ' + str(newq))\nmysql_query('UPDATE `calculations` SET `leaf` = 1 WHERE `queue` = ' + str(newq))\n" }, { "alpha_fraction": 0.5233442187309265, "alphanum_fraction": 0.5255157351493835, "avg_line_length": 35.84000015258789, "blob_id": "92e92cfe2adf62205b5f9c8850d355be8b336e6c", "content_id": "083c95daf924ac4fb5676bc7c4b70c4222043670", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1842, "license_type": "permissive", "max_line_length": 234, "num_lines": 50, "path": "/manage/template.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from ..communication.mysql import *\nimport json\n\n#Could add global or make class as well.\n\ndef add(name,value,software,ttype):\n result = mysql_query('INSERT INTO `templates` (`software`, `name`, `type`, `template`, `owner`) VALUES (\\'' + str(software) + '\\', \\'' + str(name) + '\\', \\'' + str(ttype) + '\\', \\'' + json.dumps(value) + '\\', ' + str(owner) + ')')\n tid = result \n if(int(result) > 0):\n print('Template ' + str(name) + ' (' + str(tid) + ') has been succesfully added.')\n else:\n print('Adding template ' + str(name) + ' has failed.')\n return tid\n\ndef get(tid):\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(tid))\n return json.loads(template['template'])\n\ndef modify(params):\n#should add check for id\n query = 'UPDATE `templates` SET '\n for key in params.keys():\n if key != 'id':\n query += '`' + key + '` ='\n if key == 'template':\n query += '\\'' + json.dumps(params[key]) + '\\''\n elif not str(params[key]).isdigit():\n query += '\\'' + params[key] + '\\''\n else:\n query += str(params[key])\n query += ', '\n query = query[:-2] + ' WHERE `id` = ' + str(params['id'])\n result = mysql_query(query)\n \n if (result == '1'):\n print('The template has been modified. Please verify.')\n else:\n print('Help... Me...')\n\n return int(result)\n\ndef remove(tid):\n name = mysql_query('SELECT `name` FROM `templates` WHERE `id` = ' + str(tid))\n result = mysql_query('DELETE FROM `templates` WHERE `id` = ' + str(tid))\n if (result == '1'):\n print('The ' + name['name'] + ' (' + str(tid) + ') template has been removed.')\n else:\n print('Removing the ' + name['name'] + ' (' + str(tid) + ') has failed.')\n\n return int(result)\n" }, { "alpha_fraction": 0.6661786437034607, "alphanum_fraction": 0.6661786437034607, "avg_line_length": 50.224998474121094, "blob_id": "ad4d5038c58ce19eeff2f9103affe76e0612dee5", "content_id": "6fa62faecadcaef41daf30923a460192fddf17be", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2049, "license_type": "permissive", "max_line_length": 80, "num_lines": 40, "path": "/interfaces/pymatgen.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from pymatgen.entries.compatibility import *\n\nclass MaterialsProjectCompatibility(Compatibility):\n \"\"\"\n This class implements the GGA/GGA+U mixing scheme, which allows mixing of\n entries. Note that this should only be used for VASP calculations using the\n MaterialsProject parameters (see pymatgen.io.vaspio_set.MPVaspInputSet).\n Using this compatibility scheme on runs with different parameters is not\n valid.\n\n Args:\n compat_type: Two options, GGA or Advanced. GGA means all GGA+U\n entries are excluded. Advanced means mixing scheme is\n implemented to make entries compatible with each other,\n but entries which are supposed to be done in GGA+U will have the\n equivalent GGA entries excluded. For example, Fe oxides should\n have a U value under the Advanced scheme. A GGA Fe oxide run\n will therefore be excluded under the scheme.\n correct_peroxide: Specify whether peroxide/superoxide/ozonide\n corrections are to be applied or not.\n check_potcar_hash (bool): Use potcar hash to verify potcars are correct.\n \"\"\"\n\n def __init__(self, compat_type=\"Advanced\", correct_peroxide=True,\n check_potcar_hash=False,check_potcar=False):\n self.compat_type = compat_type\n self.correct_peroxide = correct_peroxide\n self.check_potcar_hash = check_potcar_hash\n fp = os.path.join(MODULE_DIR, \"MPCompatibility.yaml\")\n if check_potcar:\n super(MaterialsProjectCompatibility, self).__init__(\n [PotcarCorrection(MPRelaxSet, check_hash=check_potcar_hash),\n GasCorrection(fp),\n AnionCorrection(fp, correct_peroxide=correct_peroxide),\n UCorrection(fp, MPRelaxSet, compat_type)])\n else:\n super(MaterialsProjectCompatibility, self).__init__(\n [GasCorrection(fp),\n AnionCorrection(fp, correct_peroxide=correct_peroxide),\n UCorrection(fp, MPRelaxSet, compat_type)])\n" }, { "alpha_fraction": 0.5515994429588318, "alphanum_fraction": 0.5565878748893738, "avg_line_length": 39.599998474121094, "blob_id": "6ea90a7168a728e649198dafd628044769864355", "content_id": "1a77a61f96edf28662a26deaf369665073a01445", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16037, "license_type": "permissive", "max_line_length": 327, "num_lines": 395, "path": "/manage/calculation.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from ..communication.mysql import mysql_query\nimport json,os\nimport HighThroughput.io.CIF as CIF\nimport time,pdb\ncalcid = 0;\nsw = '';\nstat = 0;\nif os.getenv('PBS_JOBID') is not None:\n jobid = str(os.getenv('PBS_JOBID').split('.')[0]) \nelse:\n jobid = '0'\n\ndef fetchgetstart(qid):\n global calcid, sw, stat\n i=0\n calc = ''\n while i < 10 and isinstance(calc,str):\n calc = mysql_query('FETCHGETSTART ' + str(qid) + ' ' + os.getenv('VSC_INSTITUTE_CLUSTER') + ' ' + str(os.getenv('PBS_JOBID')).split('.')[0] )\n if i > 0:\n time.sleep(60)\n i += 1\n \n if calc.get('id') is None:\n print('The database did not return any calculations from queue ' + str(qid) + ' at this time.')\n\n calcid = calc['id']\n sw = calc['software']\n stat = calc['stat']\n if not type(calc['results']) == dict:\n calc['results'] = json.loads(calc['results'])\n if not type(calc['settings']) == dict:\n calc['settings'] = json.loads(calc['settings'])\n return calc\n\ndef fetch(qid):\n return mysql_query('FETCHQ ' + str(qid))\n\ndef addgroup(group,queue,priority = '',settings = None,results = None, status = 0):\n rows = mysql_query('SELECT `id` FROM `newdata` WHERE `mgroup` = ' + str(group))\n materials = []\n for row in rows:\n materials.append(row['id'])\n batchadd(materials,queue,priority = '',settings = None,results = None, status = 0)\n\ndef batchadd(materials,queue,priority = '',settings = None,results = None, status = 0):\n global calcid, stat, sw\n #API conflict\n wftemplate = mysql_query('SELECT `priority`, `rtemplate`, `stemplate` FROM `workflows` WHERE `id` = (SELECT `workflow` FROM `queues` WHERE `id` = ' + str(queue) + ') AND `stat` = ' + str(status))\n if settings == None:\n settings = wftemplate['stemplate']\n\n if results == None:\n results = wftemplate['rtemplate']\n\n software = ''\n\n if isinstance(settings, dict):\n settings = json.dumps(settings)\n print('Be sure to update the software type.')\n elif str(settings).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(settings))\n settings = template['template']\n software = template['software']\n\n if isinstance(results, dict):\n results = json.dumps(results)\n print('Be sure to update the software type.')\n elif str(results).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(results))\n results = template['template']\n software = template['software']\n\n sw = software\n\n if priority == '':\n priority = wftemplate['priority']\n owner = mysql_query('');\n query = ''\n i=0\n for material in materials:\n if '10' not in material:\n print('skipped ' + material)\n continue\n query += 'INSERT INTO `calculations` (`queue`, `priority`, `owner`, `results`, `settings`, `software`, `file`, `stat`,`leaf`) VALUES (' + str(queue) + ', ' + str(priority) + ', ' + str(owner) + ', \\'' + results + '\\', \\'' + settings + '\\', \\'' + software + '\\', ' + str(material) + ', ' + str(status) + ',1);'\n i+=1\n if i % 100 == 0:\n result = mysql_query(query);\n query = ''\n result = mysql_query(query);\n cid = result\n calcid = result\n queue = mysql_query('SELECT `id`, `name` FROM `queues` WHERE `id` = ' + str(queue))\n\n if(int(result) > 0):\n print('Added calculations to the ' + queue['name'] + ' queue (' + str(queue['id']) + ') as calculation ' + str(cid) + '.')\n else:\n print('Adding calculations to the ' + queue['name'] + ' queue (' + str(queue['id']) + ') failed.')\n return cid\n\ndef add(material,queue,priority = '',settings = None,results = None, status = 0):\n global calcid, stat, sw\n #API conflict\n wftemplate = mysql_query('SELECT `priority`, `rtemplate`, `stemplate` FROM `workflows` WHERE `id` = (SELECT `workflow` FROM `queues` WHERE `id` = ' + str(queue) + ') AND `stat` = ' + str(status))\n if settings == None:\n settings = wftemplate['stemplate']\n\n if results == None:\n results = wftemplate['rtemplate']\n\n software = ''\n\n if isinstance(settings, dict):\n settings = json.dumps(settings)\n print('Be sure to update the software type.')\n elif str(settings).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(settings))\n settings = template['template']\n software = template['software']\n\n if isinstance(results, dict):\n results = json.dumps(results)\n print('Be sure to update the software type.')\n elif str(results).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(results))\n results = template['template']\n software = template['software']\n\n sw = software\n\n if priority == '':\n priority = wftemplate['priority']\n\n owner = mysql_query('');\n result = mysql_query('INSERT INTO `calculations` (`queue`, `priority`, `owner`, `results`, `settings`, `software`, `file`, `stat`,`leaf`) VALUES (' + str(queue) + ', ' + str(priority) + ', ' + str(owner) + ', \\'' + results + '\\', \\'' + settings + '\\', \\'' + software + '\\', ' + str(material) + ', ' + str(status) + ',1)');\n oldcid = calcid\n cid = result\n calcid = result\n queue = mysql_query('SELECT `id`, `name` FROM `queues` WHERE `id` = ' + str(queue))\n\n if(int(result) > 0):\n mysql_query('UPDATE `calculations` SET `leaf` = 0 WHERE `id` = ' + str(oldcid))\n print('Added calculation for material ' + str(material) + ' (' + str(cid) + ') to the ' + queue['name'] + ' queue (' + str(queue['id']) + ') as calculation ' + str(cid) + '.')\n else:\n print('Adding calculation for material ' + str(material) + ' to the ' + queue['name'] + ' queue (' + str(queue['id']) + ') failed.')\n return cid\n\ndef modify(params):\n query = 'UPDATE `calculations` SET '\n for key in params.keys():\n if key != 'id':\n query += '`' + key + '` ='\n if isinstance(params[key],dict):\n query += '\\'' + json.dumps(params[key]).translate(str.maketrans({\"'\": r\"\\'\"})) + '\\''\n elif not str(params[key]).isdigit():\n query += '\\'' + str(params[key]).translate(str.maketrans({\"'\": r\"\\'\"})) + '\\''\n else:\n query += str(params[key])\n query += ', '\n query = query[:-2] + ' WHERE `id` = ' + str(params['id'])\n #query = query.translate(str.maketrans({\"'\": r\"\\'\"}))\n result = int(bool(mysql_query(query)))\n if (result == 1):\n print('The calculation has been modified. Please verify.')\n elif (result == 0):\n print('Nothing to modify.')\n else:\n print('Help... Me...')\n return result\n\ndef getSettings(cid = None):\n if(cid == None):\n cid = calcid\n result = mysql_query('SELECT `settings` FROM `calculations` WHERE `id` = ' + str(cid))\n return json.loads(result['settings'])\n\ndef getResults(cid = None):\n if(cid == None):\n cid = calcid\n result = mysql_query('SELECT `results` FROM `calculations` WHERE `id` = ' + str(cid))\n return json.loads(result['results'])\n\n\ndef updateSettings(settings,cid = None):\n if(cid == None):\n cid = calcid\n\n if isinstance(settings, dict):\n settings = json.dumps(settings)\n elif str(settings).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(settings))\n settings = template['template']\n\n tempdict = {'id' : cid, 'settings': settings}\n return modify(tempdict)\n\ndef updateResults(results,cid = None):\n if(cid == None):\n cid = calcid\n\n if isinstance(results, dict):\n results = json.dumps(results)\n elif str(results).isdigit():\n template = mysql_query('SELECT * FROM `templates` WHERE `id` = ' + str(results))\n results = template['template']\n print('Updating results of calculation ' + str(cid) + '.')\n tempdict = {'id' : cid, 'results': results}\n return modify(tempdict)\n\ndef remove(cid):\n result = mysql_query('DELETE FROM `calculations` WHERE `id` = ' + str(cid))\n if (result == '1'):\n print('Calculation ' + str(cid) + ' has been removed.')\n else:\n print('Removing calculation ' + str(cid) + ' has failed.')\n\ndef get(cid):\n global calcid, sw, stat;\n\n material = mysql_query('SELECT `file` FROM `calculations` WHERE `id` = ' + str(cid))\n\n\n if isinstance(material, str):\n return material\n \n if(int(material['file']) < 10000000):\n table = 'data'\n else:\n table = 'newdata'\n\n result = mysql_query('SELECT * FROM `calculations` JOIN `' + table + '` ON (`calculations`.`file` = `' + table + '`.`file`) WHERE `calculations`.`id` = ' + str(cid))\n result['results'] = json.loads(result['results'])\n result['settings'] = json.loads(result['settings'])\n\n if not isinstance(result, str):\n calcid = cid\n sw = result['software']\n stat = result['stat']\n else:\n print('Retrieving calculation ' + str(cid) + ' failed.')\n return result\n\ndef start(cid = None):\n global stat,sw,calcid\n status = 0\n manual = True\n if cid == None:\n cid = calcid\n manual = False\n\n if(int(cid) > 0):\n calc = mysql_query('SELECT * FROM `calculations` WHERE `id` = ' + str(cid))\n if manual == False:\n status = stat\n else:\n status = calc['stat']\n #already = mysql_query('SELECT COUNT(`file`) AS `count` FROM `calculations` WHERE `queue` = ' + calc['queue'] + ' AND `file` = ' + calc['file'] + ' AND `stat` IN (' + str(int(calc['stat'])+1) + ', ' + str(int(calc['stat'])+2) + ' AND `start` > DATE_SUB(NOW(), INTERVAL 1 HOUR))')\n already = mysql_query('SELECT COUNT(`file`) AS `count` FROM `calculations` WHERE `parent` = ' + str(calc['id']))\n\n if int(status) % 2 != 0 or int(already['count']) > 0:\n return 0\n\n #restart = mysql_query('SELECT COUNT(`file`) AS `count` FROM `calculations` WHERE `queue` = ' + calc['queue'] + ' AND `file` = ' + calc['file'] + ' AND `stat` = ' + calc['stat'])\n # and restart['count'] == 1\n status = int(status) + 1\n#using stat here as global feels a bit dodgy\n wftemplate = mysql_query('SELECT `priority`, `rtemplate`, `stemplate` FROM `workflows` WHERE `id` = (SELECT `workflow` FROM `queues` WHERE `id` = ' + str(calc['queue']) + ') AND `stat` = ' + str(status))\n if(int(wftemplate['rtemplate']) > 0):\n results =wftemplate['rtemplate']\n else:\n results = calc['results']\n\n if(int(wftemplate['stemplate']) > 0):\n settings = wftemplate['stemplate']\n else:\n settings = calc['settings']\n\n if isinstance(wftemplate,str):\n priority = calc['priority']\n else:\n priority = wftemplate['priority']\n sw=calc['software']\n add(calc['file'],calc['queue'],priority, settings, results, status)\n mysql_query('UPDATE `calculations` SET `parent` = ' + str(cid) + ' WHERE `id` = ' + str(calcid))\n cid = calcid\n stat = status\n return int(mysql_query('UPDATE `calculations` SET `start` = NOW(), `server` = \\'' + str(os.getenv('VSC_INSTITUTE_CLUSTER')) + '\\', `jobid` = \\'' + jobid + '\\' WHERE `id` = ' + str(cid)));\n\ndef restart(cid = None, reset = False):\n global stat,calcid,jobid\n #problem with 0 case\n status = 0\n manual = True\n settings = ''\n results = ''\n if cid == None:\n cid = calcid\n calc = get(cid)\n stat = int(calc['stat'])\n if(int(cid) > 0):\n if stat % 2 != 0:\n stat = stat - 1\n rollback(stat,cid=cid)\n cid=calcid\n calc = get(cid)\n #return 1\n else:\n stat = stat - 2\n rollback(stat,cid=cid)\n cid = calcid\n calc = get(cid)\n #return 1\n #cid = calcid\n\n status = stat\n results = json.dumps(calc['results']).replace('\\'','\\\\\\'')\n settings = json.dumps(calc['settings'])\n if reset:\n wftemplate = mysql_query('SELECT `rtemplate`, `stemplate` FROM `workflows` WHERE `id` = (SELECT `workflow` FROM `queues` WHERE `id` = ' + str(calc['queue']) + ') AND `stat` = ' + str(status))\n if(int(wftemplate['rtemplate']) > 0):\n template = mysql_query('SELECT `template` FROM `templates` WHERE `id` = ' + str(wftemplate['rtemplate']))\n results = template['template']\n\n if(int(wftemplate['stemplate']) > 0):\n template = mysql_query('SELECT `template` FROM `templates` WHERE `id` = ' + str(wftemplate['stemplate']))\n settings = template['template']\n print('UPDATE `calculations` SET `stat` = ' + str(status) + ', `start` = 0, `end` = 0, `server` = \\'' + str(os.getenv('VSC_INSTITUTE_CLUSTER')) + '\\', `jobid` = \\'' + jobid + '\\', `results` = \\'' + results + '\\', `settings` = \\'' + settings + '\\', `leaf` = 1 WHERE `id` = ' + str(cid))\n return int(mysql_query('UPDATE `calculations` SET `stat` = ' + str(status) + ', `start` = 0, `end` = 0, `server` = \\'' + str(os.getenv('VSC_INSTITUTE_CLUSTER')) + '\\', `jobid` = \\'' + jobid + '\\', `results` = \\'' + results + '\\', `settings` = \\'' + settings + '\\', `leaf` = 1 WHERE `id` = ' + str(cid)));\n\ndef rollback(status, cid=None):\n global calcid, stat\n manual = True\n if cid != None:\n calcid = cid\n current = get(calcid)\n while int(current['stat']) > int(status) and not isinstance(current,str):\n oldcid = current['id']\n current = get(current['parent'])\n if isinstance(current,str):\n restart(oldcid)\n break\n else:\n mysql_query('DELETE FROM `calculations` WHERE `id` = ' + str(oldcid))\n calcid = current['id']\n modify({'id' : current['id'], 'leaf': 1})\n return 1\n\ndef showAll(qid):\n return mysql_query('SELECT * FROM `calculations` WHERE `queue` = ' + str(qid))\n\ndef end(cid = None):\n global stat\n status = 0\n manual = True\n if cid == None:\n cid = calcid\n manual = False\n if(int(cid) > 0):\n if manual == False:\n stat = int(stat) + 1\n status = stat\n else:\n status = mysql_query('SELECT `stat` FROM `calculations` WHERE `id` = ' + str(cid))\n status = int(status['stat'])+1\n output = {'VASP' : 'CONTCAR', 'Gaussian' : str(cid) + '.log'}\n CIFtext = ''\n if sw:\n if sw in output.keys():\n if os.path.isfile(output[sw]):\n cif = CIF.read(output[sw],sw)\n CIFtext = CIF.write('temp.cif',cif)\n os.remove('temp.cif')\n return int(bool(mysql_query('UPDATE `calculations` SET `stat` = ' + str(status) + ', `end` = NOW(), `server` = \\'' +str(os.getenv('VSC_INSTITUTE_CLUSTER')) + '\\', `jobid` = \\'' + str(os.getenv('PBS_JOBID')).split('.')[0] + '\\', `cif` = \\'' + CIFtext.replace('\\'','\\\\\\'') + '\\' WHERE `id` = ' + str(cid))));\n else:\n return 0\n\ndef setPriority(priority,cid = None):\n if cid == None:\n cid = calcid\n\n if(str(priority).isdigit()):\n # print('UPDATE `calculations` SET `priority` = ' + str(priority) + ' WHERE `id` = ' + str(cid)) \n return mysql_query('UPDATE `calculations` SET `priority` = ' + str(priority) + ' WHERE `id` = ' + str(cid))\n else:\n print('Priorities are number, the higher the number the higher the priority')\n return 0\n \ndef setMultiplePriorities(priorities):\n priorities = priorities.sort_values(ascending=False,by='priority')\n query = ''\n for i,p in priorities.iterrows():\n query += 'UPDATE `calculations` SET `priority` = ' + str(p['priority']) + ' WHERE `id` = ' + str(p['id']) + ';\\n'\n return mysql_query(query)\n \ndef getPriority(qid, stat):\n return mysql_query('SELECT `id` from `calculations` WHERE `stat` = '+str(stat)+' AND `queue` = ' + str(qid))\n" }, { "alpha_fraction": 0.5923464894294739, "alphanum_fraction": 0.625496506690979, "avg_line_length": 30.699758529663086, "blob_id": "a596ab173821da8a27bae8727ec5647ff393bedd", "content_id": "061cf9cb8bd295ee4f065b1cdafcce0f41dadb6e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13092, "license_type": "permissive", "max_line_length": 343, "num_lines": 413, "path": "/examples/IntelligentScreening/matscreen.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n#!/usr/bin/env python\n\nimport os, random, re, shutil, subprocess, sys, time, json\nimport numpy as np\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nfrom HighThroughput.io.VASP import *\nfrom HighThroughput.utils.generic import mkdir, execute, resubmit\nfrom HighThroughput.modules.VASP import *\nfrom HighThroughput.errors.generic import *\n#from HighThroughput.ML.models.priority import updateMLpriority\n\nqid = sys.argv[1]\n#Can change number of nodes based on step and kpoints too of course for the next submit\nsubmit_arg = '' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys.argv[4]\n\n# Set up the queue directory paths, this is always the same and should be moved to the .highthroughput file in a dictionary except for manual overrides\nuser = os.getenv('USER')\nscratch = os.getenv('VSC_SCRATCH_VO')\nqdir = os.path.join(scratch, 'queues', str(qid))\nsubmitscript = os.path.join(qdir,'matscreen') #this is what needs updating for auto error fix resubmit\n\n# Fetching a job and immediately starting it, ensuring we don't run the same job twice\nprint('Fetching...')\ncinfo = HT.fetchgetstart(qid)\nprint('Fetched calculation ' + str(cinfo['id']))\n\nif int(cinfo['id']) <= 0:\n print('No calculations left')\n sys.exit()\n \n#Defining directories\ncdir = [os.path.join(qdir,'CALCULATIONS',cinfo['file'],x) for x in ['CALIB/low','RELAX/vol','RELAX/all','EOSL/1.0','EOSL/1.02','EOSL/0.98','EOSL/1.04','EOSL/0.96','EOSL/1.06','EOSL/0.94','RELAX/internalL','CALIB/high','EOSH/1.0','EOSH/1.02','EOSH/0.98','EOSH/1.04','EOSH/0.96','EOSH/1.06','EOSH/0.94','RELAX/internalH','ENERGY','DOS','BANDS']]\ncdir = [os.path.join(qdir,'import')] + cdir\n\n# Starting the actual calculations\nstatus = int(cinfo['stat'])\n\nprint('Starting calculation ' + str(cinfo['id']) + ' on cluster: ' + cinfo['server'] + '.')\n\n# Navigating to the directory of the current workflow step\nos.chdir(os.path.join(qdir,'CALCULATIONS'))\nmkdir(str(cinfo['file']))\n\nos.chdir(os.path.join(qdir,'CALCULATIONS',cinfo['file']))\nstep = int(np.ceil(float(cinfo['stat'])/2))\n\n# Configure settings per step\n# By default always inherit\ninheritcontcar = True\ninheritchgcar = True\ninheritwavecar = True\n# Can define this by default, though I've added an if statement for each step anyways\ninheritstep = step - 1\nparent = HT.get(cinfo['parent'])\ninheritmod = None\ninheritgrid = False\nrescale = 1.0\n\nif parent['parent'] != '0':\n pparent = HT.getResults(parent['parent'])\n inheritmod = pparent.get('settingsmod')\n print('The following settingsmod will be inherited: ')\n print(inheritmod)\n\n \n#==============================================================================\n# In this section you can also make manual changes to the settings, for example:\n# if int(qid) == 167 or int(qid) == 171:\n# cinfo['settings']['KPOINTS']['K'] = '12 12 12'\n# cinfo['settings']['INCAR']['NKRED'] = '2'\n# cinfo['settings']['INCAR']['LSUBROT'] = '.TRUE.'\n# if int(cfile) == 10025486:\n# cinfo['settings']['INCAR']['ALGO'] = 'A'\n# cinfo['settings']['INCAR']['NELM'] = 30\n# cinfo['settings']['INCAR']['TIME'] = 1.95\n# if int(cfile) == 10031153:\n# cinfo['settings']['INCAR']['MAGMOM'] = '2*1.0000 2*-1.0000'\n#==============================================================================\n\nminkp = 2500\n\nif step > 11:\n minkp = 10000\n\ncinfo['settings']['INCAR']['ISMEAR'] = 0\n\nif step > 1:\n eoscriteria1 = {'res' : 0.03, 'B0' : (3.,100.), 'BP' : None, 'V0' : (parent['results']['volume']*0.985/1.06,parent['results']['volume']*1.015/1.06), 'dirs' : cdir}\n eoscriteria2 = {'res' : 0.03, 'B0' : (3.,100.), 'BP' : None, 'V0' : (parent['results']['volume']*0.98/1.06,parent['results']['volume']*1.02/1.06), 'dirs' : cdir}\n\nif step > 10:\n cinfo['settings']['INCAR']['ISPIN'] = 2\n\nif step == 1:\n # Low Calibration\n minkp = 0\n inheritstep = 0\nelif step == 2:\n # Vol relax\n inheritstep = 1\n # Modify the template\n cinfo['settings']['INCAR']['ISIF'] = 7\nelif step == 3:\n # Full relax\n inheritstep = 2\n cinfo['settings']['INCAR']['ISIF'] = 3\nelif step == 4:\n # EOS 1.0\n inheritstep = 3\n rescale = 1.0\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 5:\n # EOS 1.02\n inheritstep = 4\n inheritgrid = True\n rescale = 1.02\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 6:\n # EOS 0.98\n inheritstep = 4\n inheritgrid = True\n rescale = 0.98\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 7:\n # EOS 1.04\n inheritstep = 5\n inheritgrid = True\n rescale = 1.04/1.02\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 8:\n # EOS 0.96\n inheritstep = 6\n inheritgrid = True\n rescale = 0.96/0.98\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 9:\n # EOS 1.06\n inheritstep = 7\n inheritgrid = True\n rescale = 1.06/1.04\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 10:\n # EOS 0.94\n inheritstep = 8\n inheritgrid = True\n rescale = 0.94/0.96\n cinfo['results']['eos'] = \"\"\n cinfo['results']['eoscheck'] = eoscriteria1\n cinfo['settings']['INCAR']['ISIF'] = 4\nelif step == 11:\n # Final internal relaxation\n cinfo['settings']['INCAR']['ISIF'] = 4\n #lorbit then max magnetic moment\n cinfo['results']['spincheck'] = 0.1\n rescale = -parent['results']['eos']['V0']\n inheritstep = 4\nelif step == 12:\n # High calib\n minkp = 0\n inheritstep = 11\n rescale = 1.0\nelif step == 13:\n # EOS 1.0\n #TO DO CHANGE STEP\n inheritstep = 12\n #rescale = 1.0\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 14:\n # EOS 1.02\n inheritstep = 5\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*1.02\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 15:\n # EOS 0.98\n inheritstep = 6\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*0.98/1.02\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\n cinfo['settings']['INCAR']['ENCUT'] = 520\nelif step == 16:\n # EOS 1.04\n inheritstep = 7\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*1.04/0.98\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 17:\n # EOS 0.96\n inheritstep = 8\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*0.96/1.04\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 18:\n # EOS 1.06\n inheritstep = 9\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*1.06/0.96\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 19:\n # EOS 0.94\n inheritstep = 10\n inheritchgcar = False\n inheritwavecar = False\n #rescale = -parent['results']['volume']*0.94/1.06\n cinfo['results']['eos'] = \"\"\n cinfo['results']['eoscheck'] = eoscriteria2\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['IBRION'] = -1\n cinfo['settings']['INCAR']['ISIF'] = 2\nelif step == 20:\n # Final internal relaxation\n cinfo['settings']['INCAR']['ISIF'] = 4\n rescale = -parent['results']['eos']['V0']\n inheritstep = 12\nelif step == 21:\n # Final single point energy calculation\n cinfo['settings']['INCAR']['NSW'] = 1\n cinfo['settings']['INCAR']['ISMEAR'] = -5\n # For stress tensor\n cinfo['settings']['INCAR']['ISIF'] = 2\n inheritstep = 20\n#elif step == 22:\n # DOS\n# inheritstep = 21\n# cinfo['settings']['INCAR']['ENCUT'] = 520\n#elif step == 23:\n # BANDS\n# inheritstep = 22\n# cinfo['settings']['INCAR']['ENCUT'] = 520\n#cleaning up certain chgcar and wavecars in later steps might be good\n \nprint('Starting step ' + str(step) + ' in ' + cdir[step] + '.')\nmkdir(cdir[step])\nos.chdir(cdir[step])\n\n# Clearing\nif os.path.isfile('aborted'):\n os.remove('aborted')\n\nif os.path.isfile('STOPCAR'):\n os.remove('STOPCAR')\n\n# Start checkpointing, second argument should be how much time you need to write\n# out the files necessary for restart, i. e. how much time before the walltime a \n# stop signal will be sent to your ab initio program.\n\n# Could be automated based on LOOP time\ncheckpointStart(cinfo,1800)\n\n# Here we continue the job if it was checkpointed in a previous job\n\nif 'continue' not in parent['settings'].keys():\n parent['settings']['continue'] = 0\nif 'continued' not in parent['settings'].keys():\n parent['settings']['continued'] = 0\n\nperror = parent['results']\nif perror.get('errors') != None:\n fixerrors(cinfo)\n\nif int( parent['settings']['continue']) > int(parent['settings']['continued']):\n print('Continuing job from calculation id: ' + str(cinfo['parent']) + '.')\n cont(cinfo)\nelse:\n print('Initializing job.')\n inherit(cinfo,cdir[inheritstep],contcar = inheritcontcar,chgcar = inheritchgcar,wavecar = inheritwavecar, grid = inheritgrid, settingsmod = inheritmod,rescale = rescale)\n #Verify your potcardir, potgen should possibly just become a python function.\n initialize(cinfo['settings'])\n\n#Post setup modifications.\nencutlow = 1.25*float(execute('grep ENMAX POTCAR | awk \\'{print $3}\\' | cut -d\\; -f1 | sort -n | tail -n1'))\n\nif step < 12:\n cinfo['settings']['INCAR']['ENCUT'] = encutlow\nelse:\n cinfo['settings']['INCAR']['ENCUT'] = 520\n\nif detectSP('POSCAR'):\n # This could be abstracted further, though the magnetic elements chosen in \n # detectSP are not uniquely chosen either.\n cinfo['settings']['INCAR']['ISPIN'] = 2\n\n#Redivide KP AFTER initializing for settingsmod\nsetupKP(cinfo['settings'], minkp)\n#Setup parallellization settings\nparallelSetup(cinfo['settings'])\n\nwriteSettings(cinfo['settings'])\n\n#Preprocessing\n# can check eos errors based on e-v.dat\ndecompress()\nrun()\ncompress()\n\n# Error catching\n\nfinderrors(cinfo)\n\n# Checkpoint cleanup\nif os.path.isfile('STOPCAR'):\n os.remove('STOPCAR')\n\nprint('Ending Calculation')\n\n# Checkpoint abortion\nif os.path.isfile('aborted'):\n print('Calculation aborted')\n resubmit()\n sys.exit()\n\n\n\n# Post-processing (run hooks? or extra functions in VASP module)\n\n# Gather results and settings and end calculation\n\n# Nowadays I take the energy with sigma -> 0, while in theory without entropy should be nearly the same, \n# this seems more robust and is also used by the VASP group\n\n# Store detected errors\nif 'error' in locals():\n HT.updateResults({'error':error}, cinfo['id'])\nelse:\n print('Gathering results')\n results = gather(cinfo['results'])\n\n print('Updating results.')\n# updateresults could be assumed from dictionary keys and automated.\n HT.updateResults(results, cinfo['id'])\n \n \n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n HT.end(cinfo['id'])\n #cleanup function\n # Can change this to a step dictionary\n os.remove('CHG')\n \nprint('Updating settings.')\n\n# Update POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\ncinfo['settings']['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\nHT.updateSettings(cinfo['settings'], cinfo['id'])\n\n# Update priority with Machine Learning model\n#if step == 21:\n# updateMLPriority(cinfo['queue'], 42)\n\nstat_info = os.stat(qdir)\nuid = stat_info.st_uid\ngid = stat_info.st_gid\n\nfor root, dirs, files in os.walk(os.path.join(qdir,'CALCULATIONS',cinfo['file'])):\n for momo in dirs:\n try:\n os.chown(os.path.join(root, momo), -1, gid)\n os.chmod(os.path.join(root, momo),0o774)\n except PermissionError as e:\n continue\n\n for momo in files:\n try:\n os.chown(os.path.join(root, momo), -1, gid)\n os.chmod(os.path.join(root, momo), 0o664)\n except PermissionError as e:\n continue\n\nfor root,dirs, files in os.walk(os.path.join(os.path.join(qdir,'LOGFILES'))):\n for momo in files:\n try:\n os.chown(os.path.join(root, momo), -1, gid)\n os.chmod(os.path.join(root, momo), 0o664)\n except PermissionError as e:\n continue\n\n\nnewcalc = int(HT.fetch(str(qid)))\n\nif newcalc > 0:\n resubmit()\n print('Submitted new calculation in queue ' + str(qid) + '.')\nelse:\n print('Queue ' + str(qid) + ' is finished.')\nprint('Calculation ended.')\n" }, { "alpha_fraction": 0.8181818127632141, "alphanum_fraction": 0.8181818127632141, "avg_line_length": 21, "blob_id": "a4abc08ae798c8895f02f52a3a7f76cba51dcacf", "content_id": "3245248eb7855a2713a954e9f85abe0324bdf0af", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 88, "license_type": "permissive", "max_line_length": 42, "num_lines": 4, "path": "/HTtools/HTimport", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport HighThroughput\nHighThroughput.manage.material.importdir()\n" }, { "alpha_fraction": 0.6828908324241638, "alphanum_fraction": 0.6917403936386108, "avg_line_length": 29.81818199157715, "blob_id": "69239894de806e2c4760bdced14f2102f894e970", "content_id": "36145e059ff6b9e8f0ef34b9f03879031f13fbaf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 678, "license_type": "permissive", "max_line_length": 159, "num_lines": 22, "path": "/HTtools/HTepure", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport sys,json,subprocess,os\npotcorr = json.load(open(os.path.join(os.path.dirname(__file__),'../ML/data/epure.json')))\n\nposcar = open(os.path.join(sys.argv[1],'POSCAR'),'r')\nlines = poscar.readlines()\n\nspecies = list(filter(None,subprocess.Popen('grep TITEL POTCAR | awk \\'{print $4}\\'',stdout=subprocess.PIPE,shell=True).communicate()[0].decode().split('\\n')))\nnumberofatoms = lines[6][:-1].lstrip()\nnumberofatoms = \" \".join(numberofatoms.split())\nnatoms = numberofatoms.split(' ')\nnumberofatoms = sum(map(int, natoms))\n\ncorr = 0\nfor i in range(len(natoms)):\n corr += float(potcorr[species[i]])*float(natoms[i])\n\ncorr /= numberofatoms\n\n\nprint(corr)\n" }, { "alpha_fraction": 0.5401402711868286, "alphanum_fraction": 0.5744349360466003, "avg_line_length": 70.27777862548828, "blob_id": "252a400aa17d3122cf559e83b6e8b6b4e27211d4", "content_id": "993e763fb80b383253bc0671d18b6734f4281edc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1283, "license_type": "permissive", "max_line_length": 261, "num_lines": 18, "path": "/HTtools/HTeoserror", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python \nimport os ,subprocess\n\nmaindir = os.curdir\n\nfor i in os.listdir(maindir):\n if i[0:3] == '100':\n for j in ['0.94','0.96','0.98','1.0','1.02','1.04','1.06']:\n test = subprocess.Popen('cat ' + os.path.join(maindir,i,'STEP3',j,'tempout') + '| fgrep \"reached required accuracy - stopping structural energy minimisation\" | wc -l',shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0].strip()\n #energies = subprocess.Popen('grep \\'gy w\\' ' + os.path.join(maindir,i,'STEP3',j,'OUTCAR') + ' | awk \\'{print $4}\\'',shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0].split()\n energies = subprocess.Popen('grep \\'F=\\' ' + os.path.join(maindir,i,'STEP3',j,'tempout') + ' | awk \\'{print $3}\\'',shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()[0].split()\n energies = [float(x) for x in energies]\n if len(energies) == 7:\n diff = energies[-1] - min(energies)\n if (diff < -0.001):\n print(i + '/STEP3/' + j + ' relax issue min:' + str(min(energies)) + ' final:' + str(energies[-1]) + ' diff:' + str(min(energies)-energies[-1]))\n if test == '0':\n print(i + '/STEP3/' + j)\n" }, { "alpha_fraction": 0.5654450058937073, "alphanum_fraction": 0.5933682322502136, "avg_line_length": 22.875, "blob_id": "bdccdd24870c2c9663f294dca10fb2949c03f1a7", "content_id": "ac68ad7d7625e2b49b29b9368a2f8a2344981051", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "permissive", "max_line_length": 84, "num_lines": 24, "path": "/HTtools/HTsortposcar", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys\nfrom operator import itemgetter\nimport math\n\ndef norm(l):\n return float(l[0])**2+float(l[1])**2+float(l[2])**2\nf = open(sys.argv[1],'r')\nposcar = f.readlines()\n\na = float(sys.argv[2])\ndef incell(l):\n sort = -(int(float(l[0]) <= a) + int(float(l[1]) <= a) + int(float(l[2]) <= a))\n return sort\ni=0\ncoords = []\nfor line in poscar:\n if i >= 8:\n coords.append(line.strip().split())\n i+= 1\ncoords = sorted(coords,key=norm)\ncoords = [' '.join(x) for x in coords]\ntext = ''.join(poscar[0:8]) + '\\n'.join(coords)\nprint(text)\n" }, { "alpha_fraction": 0.708794116973877, "alphanum_fraction": 0.7124802470207214, "avg_line_length": 39.38298034667969, "blob_id": "5a5c06e01498ce1c0f0cc3d5788ff9a7fb1e15f6", "content_id": "b93cd0892bcdca329fc83fbed2f7535c9118b8c5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1899, "license_type": "permissive", "max_line_length": 125, "num_lines": 47, "path": "/HTtools/HTphase", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nfrom pymatgen.ext.matproj import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.analysis.phase_diagram import *\nimport re,sys\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone)\nqueen.load_data(sys.argv[1])\nentriesorig = queen.get_data()\n\n\nline = sys.argv[2]\nsearchset= set(re.sub('\\d',' ',line.strip()).split())\nentries = filter(lambda e: set(re.sub('\\d',' ',str(e.composition).replace(' ','')).split())==searchset, entriesorig)\n\n#This initializes the REST adaptor. Put your own API key in.\na = MPRester(\"s2vUo6mzETOHLdbu\")\n\nall_entries = a.get_entries_in_chemsys(set(searchset)) + entries\nprint('fetched entries')\npd = PhaseDiagram(all_entries)\nprint('made phase diagram')\nprint('analyzing')\nprint('Stable Entries (formula, materials_id)\\n--------')\n\ndef name(potcar):\n name = ''\n for p in potcar:\n temp = (p.split(' ')[-2].split('_')[0])\n name += temp\n return name\nfor e in pd.stable_entries:\n if e.entry_id == None:\n reaction = pd.get_equilibrium_reaction_energy(e)\n print(name(e.parameters['potcar_symbols']),e.composition.reduced_formula, e.entry_id, reaction)\n\nprint('\\nUnstable Entries (formula, materials_id, e_above_hull (eV/atom), decomposes_to)\\n--------')\nfor e in pd.unstable_entries:\n decomp, e_above_hull = pd.get_decomp_and_e_above_hull(e)\n pretty_decomp = [(\"{}:{}\".format(k.composition.reduced_formula, k.entry_id), round(v, 2)) for k, v in decomp.iteritems()]\n if e.entry_id == None:\n print(name(e.parameters['potcar_symbols']),e.composition.reduced_formula, e.entry_id, e_above_hull, pretty_decomp)\n\n" }, { "alpha_fraction": 0.7734056711196899, "alphanum_fraction": 0.7774762511253357, "avg_line_length": 37.73684310913086, "blob_id": "98525502ae34851d7cacb2100a048fb3f0a95d0a", "content_id": "29e47ac1e1e272032323db6a6e77ead4ba6d9e16", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 737, "license_type": "permissive", "max_line_length": 77, "num_lines": 19, "path": "/HTtools/HTBorg", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\n\n# These three lines assimilate the data into ComputedEntries.\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone, sys.argv[1], 4)\n#queen = BorgQueen(drone)\n#queen.load_data(\"zintl_noPo.json\")\n#entries = queen.get_data()\n\n# It's a good idea to perform a save_data, especially if you just assimilated\n# a large quantity of data which took some time. This allows you to reload\n# the data using a BorgQueen initialized with only the drone argument and\n# calling queen.load_data(\".json\")\nqueen.save_data(sys.argv[2])\n\n# These few lines generates the phase diagram using the ComputedEntries.\n\n" }, { "alpha_fraction": 0.6327043771743774, "alphanum_fraction": 0.6603773832321167, "avg_line_length": 36.904762268066406, "blob_id": "630e32e54571fef8db89766412565c0d8428c74e", "content_id": "4f0044736dce803fabb78e220c4df2a29d7fd9b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 795, "license_type": "permissive", "max_line_length": 167, "num_lines": 21, "path": "/config.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os,subprocess\n\nvasp = 'vasp'\n\nif os.getenv('VSC_INSTITUTE_CLUSTER') == 'breniac':\n vasp = 'vasp544ae-breniac-2016a_02-meta-rec-std'\n\nscheduler = 'PBS'\n\nif int(subprocess.Popen('printenv | grep SLURM | wc -l',shell=True,stdout=subprocess.PIPE).communicate()[0].decode()) > 0:\n scheduler = 'SLURM'\n walltime = subprocess.Popen('qstat -f | grep -A 15 $SLURM_JOB_ID | grep walltime | awk \\'{print $3}\\'',shell=True,stdout=subprocess.PIPE).communicate()[0].decode()\n if ':' in walltime:\n hours, minutes, seconds = tuple([int(x) for x in walltime.split(':')])\n os.environ['PBS_WALLTIME'] = str(hours*3600+minutes*60+seconds)\n\n#group name to share queue directory\n#test\n#if os.getenv('VSC_INSTITUTE_CLUSTER') == 'breniac':\n# vasp = 'vasp_std'" }, { "alpha_fraction": 0.6518771052360535, "alphanum_fraction": 0.6621160507202148, "avg_line_length": 35.625, "blob_id": "d710c1e036ac3ea487c4274b11d90b20acfed5b8", "content_id": "2e285c73a712586fab6ab5c038532ec50c1e5ed0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 293, "license_type": "permissive", "max_line_length": 151, "num_lines": 8, "path": "/HTtools/HTstatus", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys\nfrom HighThroughput.communication.mysql import *\n\nresult = mysql_query('SELECT `stat` FROM `calculations` WHERE `queue` = ' + str(sys.argv[1]) + ' AND `file` = ' + str(sys.argv[2]) + ' AND `leaf` = 1')\n\nif not isinstance(result,str):\n print(result['stat'])\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 21, "blob_id": "2c6cd7c08d1ce84d3ae8800760e6cc1257fcd66d", "content_id": "c4fad645a47bae25b1468d9fd10c2322f4816af5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 45, "license_type": "permissive", "max_line_length": 21, "num_lines": 2, "path": "/utils/__init__.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "__all__ = ['generic']\nfrom . import generic\n\n" }, { "alpha_fraction": 0.743697464466095, "alphanum_fraction": 0.7489495873451233, "avg_line_length": 38.66666793823242, "blob_id": "21b2da34d268687bc7a7c42ec1c44819ed005e73", "content_id": "40acaec32a70d363a69f55623ddba97b3523069f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 952, "license_type": "permissive", "max_line_length": 74, "num_lines": 24, "path": "/HTtools/HTphaseprep", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nfrom pymatgen.matproj.rest import MPRester\nfrom pymatgen import Composition\nfrom pymatgen.entries.computed_entries import ComputedEntry\nfrom pymatgen.core.units import FloatWithUnit\nfrom pymatgen.analysis.reaction_calculator import ComputedReaction\nfrom pymatgen.apps.borg.hive import VaspToComputedEntryDrone\nfrom pymatgen.apps.borg.queen import BorgQueen\nfrom pymatgen.phasediagram.maker import PhaseDiagram\nfrom pymatgen.phasediagram.plotter import PDPlotter\nfrom pymatgen.phasediagram.analyzer import *\nimport re,os, sys\ndrone = VaspToComputedEntryDrone()\nqueen = BorgQueen(drone)\nqueen.load_data(sys.argv[1])\nentriesorig = queen.get_data()\ni=0\nfor entry in entriesorig:\n name = entry.name\n name = re.findall('[A-Z][^A-Z]*',name.replace('(','').replace(')',''))\n name = ' '.join(name)\n name = ''.join(i for i in name if not i.isdigit())\n os.system('echo \\'' + name + '\\' >> entries' + str(int(i/20)))\n i+=1\n" }, { "alpha_fraction": 0.6296086311340332, "alphanum_fraction": 0.6415201425552368, "avg_line_length": 35.72916793823242, "blob_id": "17787a669646f04f0af42b14fce2236148989a26", "content_id": "349bd1a5f3ab65a14986ab016809bd698f430322", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1763, "license_type": "permissive", "max_line_length": 113, "num_lines": 48, "path": "/ML/models/volume.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "from mendeleev import element\nimport numpy as np\nfrom sqlalchemy import create_engine\nimport pandas as pd\n \ndef compute_volume(poscar_lines):\n unit_vec1 = np.expand_dims(np.array(poscar_lines[2].split(), dtype=float),axis = 1)\n unit_vec2 = np.expand_dims(np.array(poscar_lines[3].split(), dtype=float),axis = 1)\n unit_vec3 = np.expand_dims(np.array(poscar_lines[4].split(), dtype=float),axis = 1)\n unit_mat = np.concatenate((unit_vec1, unit_vec2, unit_vec3), axis = 1)\n \n return np.abs(np.linalg.det(unit_mat))\n\ndef compute_volume_atoms(poscar_lines, atoms_vol):\n elements_all = poscar_lines[5].split()\n elements_count = np.asarray(poscar_lines[6].split(), dtype=float)\n atoms_vols = [atoms_vol[element_old] for element_old in elements_all]\n volume_atoms = sum([atom_vols*element_count for atom_vols, element_count in zip(atoms_vols, elements_count)])\n \n return volume_atoms\n\ndef basicVolume():\n\n filename = #TODO: add POSCAR prototype\n \n with open(path + filename) as input_file:\n old_lines = input_file.readlines()\n \n volume_org = compute_volume(old_lines)\n \n volume_org_atoms = compute_volume_atoms(old_lines, atoms_vol)\n \n rescale = volume_org/volume_org_atoms\n \n volume_est = []\n \n filenames = #TODO: add composition new materials\n \n for filename in filenames:\n if int(filename.split('\\\\')[-1][6:]) in indices_239:\n with open(filename) as input_file:\n lines_new = input_file.readlines()\n elements_new = lines_new[5].split()\n volume_atoms = compute_volume_atoms(lines_new, atoms_vol)\n \n volume_est.append(volume_atoms*rescale)\n \n return np.array(volume_est)\n" }, { "alpha_fraction": 0.5668802857398987, "alphanum_fraction": 0.5786644816398621, "avg_line_length": 26.561254501342773, "blob_id": "a48956bee3d073e0a674035988cc0594ef1abbb7", "content_id": "2acbcd56e5444f8de3f15e3dcc78668f8b0f0f85", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9674, "license_type": "permissive", "max_line_length": 127, "num_lines": 351, "path": "/examples/VASP.pyredouseless", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport sys\nprint(sys.version)\nimport shutil, os, sys, re\nimport subprocess\nfrom subprocess import Popen, PIPE\nfrom sys import argv\n\nimport ase.io\n\nimport HighThroughput.manage.calculation as HT\nimport HighThroughput.manage.template as template\nimport HighThroughput.io\n\nimport json\nimport time\nimport random\n\nfrom numpy.linalg import norm\nfrom numpy import dot, arccos, degrees, ceil\nsleep = random.randrange(20)\nprint('Random sleep... ZzZzZz for '+str(sleep)+' seconds, before starting script.')\ntime.sleep(sleep)\n\n\nqid = argv[1]\nsubmit_arg = '' + argv[2] + ' ' + argv[3] + ' ' + argv[4]\n\nqdir = os.getenv('VSC_SCRATCH') + '/queues/'+str(qid)\n\n\ndef execute(command):\n out, err = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()\n print out\n print >> sys.stderr, err\n \n if err in locals():\n #raise Exception('Error in executing bash-command.')\n return False\n else:\n return out\n\ndef mkdir(command):\n if not os.path.isdir(command):\n os.mkdir(command)\n\ndef remove(command):\n if os.path.isfile(command):\n os.remove(command)\n else:\n print(str(command)+' is not a file.')\n\ndef error_catch(command):\n try:\n execute(command)\n return True\n except:\n return False\n\n\nos.chdir(qdir + '/fetchid')\n\nstart = 0\ncounter = 0\n\nwhile start == 0:\n print('Fetching...')\n cid = HT.fetch(qid)\n print('Fetched ' + str(cid))\n\n cinfo = HT.get(cid)\n cfile = cinfo['file']\n status = cinfo['stat']\n\n if int(cid) <= 0:\n print('No calculations left')\n os.chdir(qdir +'/LOGFILES')\n execute('touch '+str(qid)+'_fetch0')\n sys.exit()\n\n if int(status)==0:\n fetchid = str(cid)+'_0'\n else:\n fetchid = str(cid)\n\n print('Calculation status: '+str(status))\n\n if not int(status)%2==0:\n print('Uneven status: refetch a calculation.')\n elif int(status)==20:\n print('Do not prioritize DOS calculation: exit.')\n execute('touch '+str(qid)+'_DOSexit')\n sys.exit()\n else:\n sleep = random.randrange(20)\n print('Random sleep... ZzZzZz for '+str(sleep)+' seconds, before touching fetchid.')\n time.sleep(sleep)\n\n if not os.path.isfile(fetchid):\n execute('touch ' + str(fetchid))\n print('Fetched a good one!')\n\n sleep = random.randrange(20)\n print('Random sleep... ZzZzZz for '+str(sleep)+' seconds, before actually starting.')\n time.sleep(sleep)\n\n# if int(status)==0 and not os.path.isfile(str(cid)+'_0'):\n# execute('mv '+str(cid) +' '+str(cid)+'_0') \n# print('Extra step for status 0: mv touched file.') \n\n start = HT.start(str(cid))\n \n if start==0:\n print('Calculation start failed. Removing fetchid and restarting fetch cycle.')\n remove(fetchid)\n else:\n print('Calculation started succesfully!')\n counter = 0\n else:\n sleep = random.randrange(60)\n print('fetchid '+str(cid)+' already exists, sleep '+str(sleep)+' seconds and restart fetch cycle.')\n time.sleep(sleep) \n \n counter = counter + 1\n if counter==25:\n print('Fetch-counter reached limit: exit!')\n sys.exit()\n\nremove(fetchid)\n\ncid = HT.calcid\n\ncinfo = HT.get(cid)\ncfile = cinfo['file']\nstatus = int(cinfo['stat'])\n\n#settings = json.loads(cinfo['settings'])\nprint 'THE STAT is' + str(status)\nif status == 1:\n settings = template.get(182)\nelse:\n settings = template.get(183)\n\nINCAR_dict = settings['INCAR']\nKPOINTS_dict = settings['KPOINTS']\n\nif int(qid) in [149,150,155,156]:\n INCAR_dict['ENCUT'] = 600\n\nprint('Server: '+cinfo['server'])\n\nif INCAR_dict['LHFCALC'] == '.TRUE.':\n test = 'junkstrign'\nelif cinfo['server']=='gastly' or cinfo['server']=='haunter':\n INCAR_dict['NCORE'] = '8'\nelif cinfo['server'] == 'golett' or cinfo['server'] == 'phanpy':\n INCAR_dict['NCORE'] = '24'\n print('NCORE = 24')\nelse:\n INCAR_dict['NCORE'] = '16'\n print('NCORE = 16')\n\nINCAR_dict['KPAR'] = argv[3]\n\n#print('Updating settings (NPAR)')\n#HT.updateSettings(settings, cid)\n\nif status== 1:\n print('STEP 1 started')\n print('ISIF7')\n\n step = 1\n\n os.chdir(qdir+'/CALCULATIONS')\n mkdir(str(cfile))\n \n os.chdir(qdir+'/CALCULATIONS/'+str(cfile))\n\n mkdir('./STEP1')\n os.chdir('./STEP1')\n\n shutil.copy(qdir + '/import/' + str(cfile) + '.vasp', './POSCAR')\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n\n magel = set(['O','Ni','Cr','Co','Fe','Mn','Ce','Nd','Sm','Eu','Gd','Tb','Dy','Ho','Er','Tm']);\n relel = set(['Cs','Ba','La','Lu','Hf','Ta','W','Re','Os','Ir','Pt','Au','Hg','Tl','Pb','Bi','Po','At','Rn']);\n##----------------------------------------------------------------------------------------------------------\n\n\n\n magnetic = False;\n relativistic = False;\n\n for magn in magel:\n if magn in elements.split(' '):\n magnetic = True;\n \n for rel in relel:\n if rel in elements.split(' '):\n relativistic = True; \n\n if magnetic:\n INCAR_dict['ISPIN'] = '2'\n INCAR_dict['MAGMOM'] = '1*1.5 63*0.0'\n\n settings = {'INCAR':INCAR_dict, 'KPOINTS':KPOINTS_dict} \n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n execute('POTgen ' + str(elements))\n \n poscar.close()\n\n execute('free -m; date; touch CHGCAR WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n \n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp')\n \n execute('free -m; date')\n\n print('END STATUS 1 / STEP 1')\n\nelif status==3:\n print('STEP 3 started')\n print('= STEP2: ISIF3')\n \n step = 2\n\n os.chdir(qdir+'/CALCULATIONS/'+str(cfile))\n if os.path.isdir('./STEP'+str(step)):\n os.rmdir('./STEP'+str(step))\n mkdir('./STEP'+str(step))\n os.chdir('./STEP'+str(step))\n\n shutil.copy('../STEP'+str(step-1)+'/CONTCAR', './POSCAR')\n shutil.copy('../STEP'+str(step-1)+'/POTCAR', './POTCAR')\n shutil.copy('../STEP'+str(step-1)+'/CHGCAR', './CHGCAR')\n #shutil.copy('../STEP'+str(step-1)+'/WAVECAR', './WAVECAR')\n poscar = open('POSCAR','r')\n lines = poscar.readlines()\n elements = lines[5][:-1].lstrip()\n\n magel = set(['O','Ni','Cr','Co','Fe','Mn','Ce','Nd','Sm','Eu','Gd','Tb','Dy','Ho','Er','Tm']);\n relel = set(['Cs','Ba','La','Lu','Hf','Ta','W','Re','Os','Ir','Pt','Au','Hg','Tl','Pb','Bi','Po','At','Rn']);\n##----------------------------------------------------------------------------------------------------------\n\n\n\n magnetic = False;\n relativistic = False;\n\n for magn in magel:\n if magn in elements.split(' '):\n magnetic = True;\n\n for rel in relel:\n if rel in elements.split(' '):\n relativistic = True; \n\n if magnetic:\n INCAR_dict['ISPIN'] = '2' \n\n settings = {'INCAR':INCAR_dict, 'KPOINTS':KPOINTS_dict}\n \n HighThroughput.io.VASP.writeKPOINTS(KPOINTS_dict, os.getcwd())\n HighThroughput.io.VASP.writeINCAR(INCAR_dict, os.getcwd())\n\n execute('free -m; date; touch CHGCAR WAVECAR')\n remove(''+ qdir+'/CALCULATIONS/' + str(cfile) + '/STEP' +str(step)+ '/tempout')\n\n execute('mympirun --output ' +qdir+'/CALCULATIONS/' + str(cfile) + '/STEP'+str(step)+'/tempout vasp')\n\n execute('free -m; date')\n\n print('END STATUS 3 / STEP 2')\n\nelse:\n print('Not a valid status. Calculation terminated.')\n sys.exit()\n\n\n#UPDATE POTCAR INFO\n\nPOTCAR_version = execute('grep -a \\'TITEL\\' POTCAR | awk \\'{ print $4 }\\'')\nsettings['POTCAR'] = POTCAR_version.strip().replace('\\n',', ')\n\n#END CALCULATION AND FETCH RESULTS\n\nenergy = execute('grep \\'energy without entropy\\' OUTCAR | tail -1 | awk \\'{ print $4 }\\'')\n\nif 'error' in locals():\n HT.updateResults({'error':error}, cid)\nelif energy=='' or not 'energy' in locals():\n HT.updateResults({'error':'Energy missing'}, cid)\n print('Energy missing! Error...')\nelif not os.path.isfile('CHGCAR') and not os.path.isfile('CHG'):\n HT.updateResults({'error':'CHGCAR and CHG missing. VASP Error?'}, cid)\n print('CHGCAR/CHG missing. VASP Error?')\nelse:\n print('Energy OK. Ending calculation, deleting junk files and fetching results.')\n HT.end(cid)\n\n if status<19:\n os.remove('CHG')\n\n results = HT.getResults(cid)\n\n #could leave this out when working with QZP's\n\n numberofatoms = lines[6][:-1].lstrip()\n numberofatoms = \" \".join(numberofatoms.split())\n numberofatoms = sum(map(int, numberofatoms.split(' ')))\n\n energy = float(energy)\n\n if step == 1:\n results['E0PBE'] = energy\n else:\n results['E0HSE06'] = energy\n results['E0PBE'] = float(execute('grep \\'energy without entropy\\' ../STEP1/OUTCAR | tail -1 | awk \\'{ print $4 }\\''))\n\n\n\n print('Updating results')\n print results\n HT.updateResults(results, cid)\n\nprint('Updating settings')\nprint settings\nHT.updateSettings(settings, cid)\n\n\n#RELOOP to the script\n\ncid_new = HT.fetch(qid)\n\nprint('Fetched calculation '+str(cid_new)+' from queue '+str(qid)+'.')\n\nos.chdir(qdir+'/LOGFILES')\n\nif int(cid_new) > 0:\n# print('Script ONCE: do not submit new job')\n execute('vaspsubmit ' + str(qid) + ' ' + str(submit_arg))\nelse:\n print('No calculations left; end script without submitting new job.')\n execute('touch '+str(qid)+'_'+str(cid)+'_fetch0')\n\nexecute('qstat')\n" }, { "alpha_fraction": 0.526089608669281, "alphanum_fraction": 0.6046654582023621, "avg_line_length": 17.724138259887695, "blob_id": "304f0b0cd375572ddaef66c74bf98fc81312c8b4", "content_id": "c5cd0e992b98f1637542a1d8bad3ff0a79cdaba7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1629, "license_type": "permissive", "max_line_length": 76, "num_lines": 87, "path": "/examples/IntelligentScreening/submitmatscreen", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#Michael Sluydts, Titus Crepain\nwalltime=$[$2-1]':59:00'\n\nif [ $2 -gt 72 ]; then\n queue='special'\nelif [ $2 -gt 12 ]; then\n queue='long'\nelif [ $2 -eq 1 ]; then\n queue='debug'\nelse\n queue='short'\nfi\njobs='scripts'\nif [ $VSC_INSTITUTE_CLUSTER == 'muk' ]; then\n if [ $2 -eq 1 ]; then\n queue='debug'\n else\n queue='batch'\n fi\n cd $pscratch/queues/$1/LOGFILES\nelif [ $VSC_INSTITUTE_CLUSTER == 'breniac' ]; then\n if [ $2 -gt 72 ]; then\n queue='q7d'\n elif [ $2 -gt 24 ]; then\n queue='q72h'\n elif [ $2 -gt 1 ]; then\n queue='q24h'\n else\n queue='q1h'\n fi\n cd $VSC_SCRATCH/queues/$1/LOGFILES\nelse\n cd $VSC_SCRATCH_VO/$USER/queues/$1/LOGFILES\nfi\n\nif [ $VSC_INSTITUTE_CLUSTER == 'breniac' ]; then\n VASP='5.4.1-intel-2016a-VTST'\n python='2.7.11-intel-2016a'\nelse\n VASP='5.4.1.05Feb16-intel-2016b-mt-vaspsol-20150914-O2-patched-03082016'\n python='2.7.12-intel-2016b'\nfi\n\n\nif [ \"$4\" -lt \"8\" ] && [ \"$2\" -lt \"72\" ]; then\n small='_S'\n version='1'\nelse\n small=''\n version='0'\nfi\n\nspace='_'\n\ndir=`pwd`\ncat > ${VSC_INSTITUTE_CLUSTER}.sh << !\n#!/bin/bash\n#PBS -N Q$1$space$2$space$3$space$4$small\n\n#PBS -A lt1_2017-61\n\n#PBS -m a\n\n#PBS -q $queue\n\n#PBS -l walltime=$walltime\n\n#PBS -l nodes=$3:ppn=28\n\n#PBS -l pmem=4gb\n\nulimit -s unlimited\n\n\nmodule load Python/$python\nmodule load VASP/$VASP\nmodule load ase\nmodule load HighThroughput/devel\nmodule load aconvasp\nmodule load vsc-mympirun/3.4.2-intel-2016a-Python-2.7.11-vsc-base-2.4.17\npython ~/bin/HTtools/main_vo_P63mmcleuven.py $1 $2 $3 $4 $version\n\nexit 0\n!\n\nqsub ${VSC_INSTITUTE_CLUSTER}.sh\n" }, { "alpha_fraction": 0.5737220048904419, "alphanum_fraction": 0.5817490220069885, "avg_line_length": 28.587499618530273, "blob_id": "d74ee4649c972ff2c9d4b1ea61172014265ef43a", "content_id": "e27676f028720afe33f79d2dbdb6decd5666cd71", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2367, "license_type": "permissive", "max_line_length": 120, "num_lines": 80, "path": "/utils/generic.py", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "import os, subprocess, sys, grp\nfrom HighThroughput.communication.mysql import mysql_query\n\ndef resubmit(qid = None, server = None, args = None):\n if qid == None:\n qid = sys.argv[1]\n\n if server == None:\n server = os.getenv('VSC_INSTITUTE_CLUSTER')\n\n script = mysql_query('SELECT `submit` FROM `queues` WHERE `id` = ' + str(qid))['submit']\n\n args = '' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys.argv[4]\n# args = '' + sys.argv[2] + ' ' + str(2) + ' ' + sys.argv[4]\n if server != 'breniac':\n execute(script + ' ' + str(qid) + ' ' + str(args))\n else:\n print('ssh login1 \"' + script + ' ' + str(qid) + ' ' + str(args) + '\"')\n execute('ssh login1 \"' + script + ' ' + str(qid) + ' ' + str(args) + '\"')\n print('Submitted new calculation in queue ' + str(qid) + ' on server ' + server + '.')\n return True\n\ndef execute(command):\n out, err = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()\n out = out.decode();\n err = err.decode();\n print(out)\n print(err, file=sys.stderr)\n\n if err in locals():\n #raise Exception('Error in executing bash-command.')\n return False\n else:\n return out\n\ndef isfloat(value):\n try:\n float(value)\n return True\n except ValueError:\n return False\n\ndef mkdir(command):\n if not os.path.isdir(command):\n stat_info = os.stat(os.getcwd())\n uid = stat_info.st_uid\n gid = stat_info.st_gid\n os.makedirs(command,0o774)\n os.chown(command,-1,gid)\n\n\ndef remove(command):\n if os.path.isfile(command):\n os.remove(command)\n else:\n print(str(command)+' is not a file.')\n\ndef error_catch(command):\n try:\n execute(command)\n return True\n except:\n return False\n\ndef getNodeInfo():\n from collections import Counter\n nodefile = subprocess.Popen('cat $PBS_NODEFILE',stdout=subprocess.PIPE,shell=True)\n nodefile = [x.split('.')[0].replace('node','') for x in filter(None,nodefile.communicate()[0].decode().split('\\n'))]\n corecount = Counter()\n for node in nodefile:\n corecount[node] += 1\n return corecount\n\ndef getClass( kls ):\n parts = kls.split('.')\n module = \".\".join(parts[:-1])\n m = __import__( module )\n for comp in parts[1:]:\n m = getattr(m, comp) \n return m\n" }, { "alpha_fraction": 0.6292135119438171, "alphanum_fraction": 0.6404494643211365, "avg_line_length": 26.8125, "blob_id": "91d86c1ead331a90e9f9bdaf53a46788d7f5b288", "content_id": "07ad47772f5ef5a6e018d76390870fc44d1a1153", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 445, "license_type": "permissive", "max_line_length": 104, "num_lines": 16, "path": "/HTtools/HTrunning", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport HighThroughput as HT\nimport os,subprocess,sys\nfrom HighThroughput.communication.mysql import *\n\nserver = ''\n\nif len(sys.argv) >= 2:\n server = ' AND `queue` = ' + sys.argv[1] + ''\n\nif len(sys.argv) == 3:\n server += ' AND `server` = \\'' + os.getenv('VSC_INSTITUTE_CLUSTER') + '\\''\n\nrunning = mysql_query('SELECT COUNT(`id`) AS `count` FROM `calculations` WHERE `stat` % 2 = 1' + server)\n\nprint(running['count'])\n" }, { "alpha_fraction": 0.7341772317886353, "alphanum_fraction": 0.7594936490058899, "avg_line_length": 30.600000381469727, "blob_id": "a8e2503b232ef42d2fd93a7c3e533a9a2d441069", "content_id": "adaa353810644319c5856d2cf09d07ecb2c123e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 158, "license_type": "permissive", "max_line_length": 44, "num_lines": 5, "path": "/HTtools/HTgap", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport sys\nfrom pymatgen.io.vasp.outputs import Vasprun\nvr = Vasprun(sys.argv[1],occu_tol=0.1)\nprint(vr.eigenvalue_band_properties[0]);\n" }, { "alpha_fraction": 0.51871657371521, "alphanum_fraction": 0.536287248134613, "avg_line_length": 35.25, "blob_id": "a9b3a1245a0d9a06eff86b57c1cda58454b3df19", "content_id": "ffd35aa1dc7f3395bf3f2588f5b9379ddc752361", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1309, "license_type": "permissive", "max_line_length": 176, "num_lines": 36, "path": "/HTtools/HTphasedb", "repo_name": "MichaelSluydts/QueueManager-client", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport re,sys,subprocess\nfrom HighThroughput.communication.mysql import *\nefile = sys.argv[1]\nmats = open(efile,'r')\nsql = open(efile + '.sql','w')\nfrom random import randint\nfrom time import sleep\n\nfor line in mats:\n counter = 0\n i=0\n while counter == 0:\n phase = subprocess.Popen('HTphase ' + sys.argv[2] + ' \\'' + line.strip() + '\\' | grep None',stdout=subprocess.PIPE,shell=True).communicate()[0]\n results = filter(None,phase.split('\\n'))\n counter = len(results)\n if i > 0:\n sleep(randint(3,30))\n i+=1\n for line in results:\n cols = line.split(' ', 4)\n #formlist = [a for a in re.split(r'([A-Z][a-z]*\\d*)', cols[0]) if a]\n #for i in range(0,len(formlist)):\n # if re.search(\"4\",formlist[i]) and i != 3:\n # formlist.append(formlist.pop(i))\n #text = ''.join(formlist)\n #formlist.insert(1,formlist.pop(2))\n #text2 = ''.join(formlist)\n text = cols[0]\n ehull = cols[3]\n if len(cols) == 5:\n path = cols[4]\n else:\n path = ''\n sql.write('UPDATE `zintlfinal` SET `ehull` = ' + str(ehull) + ', `path` = \"' + path + '\" WHERE `file` IN (SELECT `file` FROM `map` WHERE `text` = \\'' + text + '\\');\\n')\nsql.close()\n\n\n\n\n" } ]
82
eyalbi/college-python-projects
https://github.com/eyalbi/college-python-projects
3a36ec3033c956f6f9536687876ddbba16c2d492
88e1b0053bfdddc7438323ac17edb4b400792222
7d10507033ae669b80db73077c4de9356028bea5
refs/heads/master
2022-09-25T04:55:21.261150
2020-05-27T08:28:00
2020-05-27T08:28:00
267,135,917
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4950718283653259, "alphanum_fraction": 0.5124541521072388, "avg_line_length": 29.051008224487305, "blob_id": "029071401703bdf535fdd3e57287a1b787891c16", "content_id": "1d3c9016f03fde5d465a184c967de1269fb8bfcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26178, "license_type": "no_license", "max_line_length": 133, "num_lines": 843, "path": "/Python Enviorment 3.py", "repo_name": "eyalbi/college-python-projects", "src_encoding": "UTF-8", "text": "from functools import reduce\r\nfrom operator import mul, add\r\nfrom itertools import accumulate\r\n\r\n\r\n# ex 1.1\r\nclass Date:\r\n \"\"\"\r\n date class holds a year month and a day, uses months dictionary for str assignment\r\n \"\"\"\r\n months = {1: \"January\", 2: \"February\", 3: \"March\", 4: \"April\", 5: \"May\", 6: \"June\", 7: \"July\", 8: \"August\",\r\n 9: \"September\", 10: \"October\", 11: \"November\", 12: \"December\"}\r\n\r\n def __init__(self, year, month, day):\r\n self.year = year\r\n self.Day = day\r\n self.Month = month\r\n\r\n def __str__(self):\r\n return \"'{0}th of {1},{2}'\".format(self.Day, self.months[self.Month], self.year)\r\n\r\n def __repr__(self):\r\n return \"Date({0}, {1}, {2})\".format(self.year, self.Day, self.Month)\r\n\r\n\r\nclass Time:\r\n \"\"\"\r\n class Time holds hour and minute\r\n \"\"\"\r\n\r\n def __init__(self, hour, minute):\r\n self.Hour = hour\r\n self.Minute = minute\r\n\r\n def __str__(self):\r\n if self.Minute < 10 and self.Hour < 10:\r\n return \"0{0}:0{1}\".format(self.Hour, self.Minute)\r\n if self.Minute > 9 and self.Hour > 9:\r\n return \"{0}:{1}\".format(self.Hour, self.Minute)\r\n if self.Minute < 10 and self.Hour > 9:\r\n return \"{0}:0{1}\".format(self.Hour, self.Minute)\r\n if self.Minute > 9 and self.Hour < 10:\r\n return \"0{0}:{1}\".format(self.Hour, self.Minute)\r\n\r\n def __repr__(self):\r\n return \"Time({0},{1})\".format(self.Hour, self.Minute)\r\n\r\n\r\nclass CalendarEntry:\r\n \"\"\"\r\n class that represent a day and tasks of the day. using tasks dictionary to hold the tasks\r\n \"\"\"\r\n tasks = {}\r\n\r\n def __init__(self, year, month, day):\r\n self.date = Date(year, month, day)\r\n\r\n def addTask(self, lecture, start, end):\r\n check = (start.Hour, end.Hour)\r\n checkmin = (start.Minute, end.Minute)\r\n flag = True\r\n for x in self.tasks.keys():\r\n stri1 = x[0][:2]\r\n stri2 = x[1][:2]\r\n stri3 = x[0][3:]\r\n stri4 = x[1][3:]\r\n if int(stri1) < check[0] < int(stri2):\r\n flag = False\r\n if check[0] < int(stri1) < check[1]:\r\n flag = False\r\n if check[1] == stri2 and checkmin[1] < stri4:\r\n flag = False\r\n if flag:\r\n self.tasks[(start.__str__(), end.__str__())] = lecture\r\n else:\r\n print(\"there already a task in this hours\")\r\n\r\n def __str__(self):\r\n classes = \"Todo list for \" + self.date.__str__() + \"\\n\"\r\n i = 1\r\n for k, v in self.tasks.items():\r\n classes += \"{0}. {1}-{2} - {3}\".format(i, k[0], k[1], v) + \"\\n\"\r\n i = i + 1\r\n return classes\r\n\r\n\r\ntoday = Date(2017, 1, 20)\r\nprint(today.__repr__())\r\nprint(today.year)\r\nprint(today)\r\ntodo = CalendarEntry(2017, 1, 20)\r\nt = Time(10, 0)\r\nstr(t)\r\ntodo.addTask(\"PPL lecture\", t, Time(13, 0))\r\ntodo.addTask(\"PPL homework#4\", Time(14, 0), Time(16, 0))\r\nprint(todo.tasks)\r\nprint(todo)\r\n\r\n\r\n# ex 1.2\r\ndef make_class(attrs, base=None):\r\n \"\"\"Return a new class (a dispatch dictionary) with given class attributes\"\"\"\r\n\r\n def get(name):\r\n if name in attrs:\r\n return attrs[name]\r\n elif base:\r\n return base['get'](name)\r\n\r\n # Setter: class attribute (always sets in this class)\r\n def set(name, value):\r\n attrs[name] = value\r\n\r\n # Return a new initialized objec'aaa': 5.5t instance (a dispatch dictionary)\r\n def new(*args):\r\n # instance attributes (hides encapsulating function's attrs)\r\n attrs = {}\r\n\r\n # Getter: instance attribute (looks in object, then class (binds self if callable))\r\n def get(name):\r\n if name in attrs:\r\n return attrs[name]\r\n else:\r\n value = cls['get'](name)\r\n if callable(value):\r\n return lambda *args: value(obj, *args)\r\n else:\r\n return value\r\n\r\n # Setter: instance attribute (always sets in object)\r\n def set(name, value):\r\n attrs[name] = value\r\n\r\n # instance dictionary\r\n obj = {'get': get, 'set': set}\r\n\r\n # calls constructor if present\r\n init = get('__init__')\r\n if init:\r\n init(*args)\r\n\r\n return obj\r\n\r\n # class dictionary\r\n cls = {'get': get, 'set': set, 'new': new}\r\n return cls\r\n\r\n\r\ndef make_date_class():\r\n \"\"\"\r\n date shmyton class with init to initialize year mount day\r\n :return:make class with the init sent to make cls dictinary\r\n \"\"\"\r\n\r\n def __init__(Self, Year, Month, Day):\r\n Self['set'](\"year\", Year)\r\n Self['set'](\"month\", Month)\r\n Self['set'](\"day\", Day)\r\n\r\n return make_class({\"__init__\": __init__})\r\n\r\n\r\ndef make_time_class():\r\n \"\"\"\r\n time shmyton class holds hour and minute\r\n :return: make class with str and init sent to cls\r\n \"\"\"\r\n\r\n def __init__(Self, Hour, Minute):\r\n Self['set'](\"hour\", Hour)\r\n Self['set'](\"minute\", Minute)\r\n\r\n def __str__(self):\r\n string = None\r\n if self['get'](\"minute\") < 10 and self['get'](\"hour\") < 10:\r\n return \"0{0}:0{1}\".format(self['get'](\"hour\"), self['get'](\"minute\"))\r\n if self['get'](\"minute\") > 9 and self['get'](\"hour\") > 9:\r\n return \"{0}:{1}\".format(self['get'](\"hour\"), self['get'](\"minute\"))\r\n if self['get'](\"minute\") < 10 and self['get'](\"hour\") > 9:\r\n return \"{0}:0{1}\".format(self['get'](\"hour\"), self['get'](\"minute\"))\r\n if self['get'](\"minute\") > 9 and self['get'](\"hour\") < 10:\r\n return \"0{0}:{1}\".format(self['get'](\"hour\"), self['get'](\"minute\"))\r\n\r\n return make_class({\"__init__\": __init__, \"__str__\": __str__})\r\n\r\n\r\ndef make_calentry_class():\r\n \"\"\"\r\n calentry class holds a day and the tasks of that day\r\n :return: make class with tasks dict init and add task func sent to cls\r\n \"\"\"\r\n tasks = {}\r\n\r\n def __init__(Self, Year, month, day):\r\n Self['set'](\"year\", Year)\r\n Self['set'](\"month\", month)\r\n Self['set'](\"day\", day)\r\n\r\n def addTask(Self, name, start, end):\r\n check = (start[\"get\"](\"hour\"), end[\"get\"](\"hour\"))\r\n checkmin = (start[\"get\"](\"minute\"), end[\"get\"](\"minute\"))\r\n flag = True\r\n for x in tasks.keys():\r\n stri1 = x[0][:2]\r\n stri2 = x[1][:2]\r\n stri3 = x[0][3:]\r\n stri4 = x[1][3:]\r\n if int(stri1) < check[0] < int(stri2):\r\n flag = False\r\n if check[0] < int(stri1) < check[1]:\r\n flag = False\r\n if check[1] == stri2 and checkmin[1] < stri4:\r\n flag = False\r\n if flag:\r\n tasks[(start['get'](\"__str__\")(), end['get'](\"__str__\")())] = name\r\n else:\r\n print(\"there already a task in this hours\")\r\n\r\n # tasks[(start['get'](\"__str__\")(), end['get'](\"__str__\")())] = name\r\n\r\n return make_class({\"__init__\": __init__, \"tasks\": tasks, \"addTask\": addTask})\r\n\r\n\r\nCalendarEntry = make_calentry_class()\r\ntodo = CalendarEntry[\"new\"](2017, 1, 20)\r\ntodo[\"get\"](\"tasks\")\r\nTime = make_time_class()\r\nt = Time[\"new\"](10, 0)\r\nprint(t[\"get\"](\"__str__\")())\r\ntodo[\"get\"](\"addTask\")(\"PPL lecture\", t, Time[\"new\"](13, 0))\r\ntodo[\"get\"](\"addTask\")(\"PPL homework#4\", Time[\"new\"](14, 0), Time[\"new\"](16, 0))\r\nprint(todo[\"get\"](\"tasks\"))\r\n\r\n\r\n# generic exercise\r\n\r\n\r\nclass Dollar:\r\n \"\"\"\r\n dollar class represnt a dollar.\r\n with init:\r\n balance - dollar amount\r\n func:\r\n str\r\n repr\r\n amount - dollar in shekels amount\r\n add and sub to interact with euro and shekel class\r\n \"\"\"\r\n def __init__(self, amount):\r\n self.balance = amount\r\n\r\n def amount(self):\r\n return self.balance * rates[(\"dollar\", \"nis\")]\r\n\r\n def __str__(self):\r\n return \"{0}$\".format(self.balance)\r\n\r\n def __add__(self, other):\r\n return self.amount() + other.amount()\r\n\r\n def __repr__(self):\r\n return \"Dollar({0})\".format(self.balance)\r\n\r\n def __sub__(self, other):\r\n return self.amount() - other.amount()\r\n\r\n\r\nclass Euro:\r\n \"\"\"\r\n class euro class represnt a euro.\r\n with init:\r\n balance - euro amount\r\n func:\r\n str\r\n repr\r\n amount - euro in shekels amount\r\n add and sub to interact with euro and shekel class\r\n \"\"\"\r\n def __init__(self, amount):\r\n self.balance = amount\r\n\r\n def amount(self):\r\n return self.balance * rates[(\"euro\", \"nis\")]\r\n\r\n def __str__(self):\r\n return \"{0}€\".format(self.balance)\r\n\r\n def __add__(self, other):\r\n return self.amount() + other.amount()\r\n\r\n def __repr__(self):\r\n return \"Euro({0})\".format(self.balance)\r\n\r\n def __sub__(self, other):\r\n return self.amount() - other.amount()\r\n\r\n\r\nclass Shekel:\r\n \"\"\"\r\n class shekel class represnt a shekel.\r\n with init:\r\n balance - shekel amount\r\n func:\r\n str\r\n repr\r\n amount - shekel in shekels amount\r\n add and sub to interact with euro and dollar class\r\n \"\"\"\r\n def __init__(self, amount):\r\n self.balance = amount\r\n\r\n def amount(self):\r\n return self.balance\r\n\r\n def __str__(self):\r\n return \"{0}nis\".format(self.balance)\r\n\r\n def __add__(self, other):\r\n return self.amount() + other.amount()\r\n\r\n def __repr__(self):\r\n return \"Shekel({0})\".format(self.balance)\r\n\r\n def __sub__(self, other):\r\n return self.balance - other.amount()\r\n\r\n\r\ndef add(x, y):\r\n \"\"\"\r\n add generic func\r\n :param x: euro/dollar/shekel\r\n :param y: euro/dollar/shekel\r\n :return: amount in shekels.\r\n \"\"\"\r\n return x + y\r\n\r\n\r\ntype_tags = {Dollar: \"dollar\", Euro: \"euro\", Shekel: \"nis\"}\r\nrates = {(\"dollar\", \"nis\"): 3.82, (\"euro\", \"nis\"): 4.07}\r\ns = Shekel(50)\r\nd = Dollar(50)\r\ne = Euro(50)\r\nprint(d.amount())\r\nprint(e.amount())\r\nprint(d + s)\r\nprint(add(e, d))\r\nz = eval(repr(d))\r\nprint(z)\r\nprint(s)\r\nprint(e)\r\n\r\n\r\ndef apply(func_name, x, y):\r\n \"\"\"\r\n generic func that gets an opertion name and do the operation on x and y uses implementation to choose the necessary\r\n func and operate it on x and y\r\n :param func_name: add,sub\r\n :param x: euro/dollar/shekel\r\n :param y: euro/dollar/shekel\r\n :return: x+y,x-y in the type of x\r\n \"\"\"\r\n\r\n key = (func_name, type_tags[type(x)], type_tags[type(y)])\r\n\r\n def add_shekel_shekel(shekel1, shekel2):\r\n return Shekel(shekel1 + shekel2)\r\n\r\n def add_dollar_dollar(dollar1, dollar2):\r\n return Dollar((dollar1 + dollar2) / rates[(\"dollar\", \"nis\")])\r\n\r\n def add_euro_euro(euro1, euro2):\r\n return Euro((euro1 + euro2) / rates[(\"euro\", \"nis\")])\r\n\r\n def sub_shekel_shekel(shekel1, shekel2):\r\n return Shekel(shekel1 - shekel2)\r\n\r\n def sub_dollar_dollar(dollar1, dollar2):\r\n return Dollar((dollar1 - dollar2) / rates[(\"dollar\", \"nis\")])\r\n\r\n def sub_euro_euro(euro1, euro2):\r\n return Euro((euro1 - euro2) / rates[(\"euro\", \"nis\")])\r\n\r\n def add_dollar_shekel(dollar, shekel):\r\n return Dollar((dollar + shekel) / rates[(\"dollar\", \"nis\")])\r\n\r\n def add_shekel_dollar(shekel, dollar):\r\n return Shekel(shekel + dollar)\r\n\r\n def add_euro_shekel(euro, shekel):\r\n return Euro((euro + shekel) / rates[(\"euro\", \"nis\")])\r\n\r\n def add_shekel_euro(shekel, euro):\r\n return Shekel(shekel + euro)\r\n\r\n def add_dollar_euro(dollar, euro):\r\n return Dollar((dollar + euro) / rates[(\"dollar\", \"nis\")])\r\n\r\n def add_euro_dollar(euro, dollar):\r\n return Euro((dollar + euro) / rates[(\"euro\", \"nis\")])\r\n\r\n def sub_dollar_shekel(dollar, shekel):\r\n return Dollar((dollar - shekel) / rates[(\"dollar\", \"nis\")])\r\n\r\n def sub_shekel_dollar(shekel, dollar):\r\n return Shekel(shekel - dollar)\r\n\r\n def sub_euro_shekel(euro, shekel):\r\n return Euro((euro - shekel) / rates[(\"euro\", \"nis\")])\r\n\r\n def sub_shekel_euro(shekel, euro):\r\n return Shekel(shekel - euro)\r\n\r\n def sub_dollar_euro(dollar, euro):\r\n return Dollar((dollar - euro) / rates[(\"dollar\", \"nis\")])\r\n\r\n def sub_euro_dollar(euro, dollar):\r\n return Euro((dollar - euro) / rates[(\"euro\", \"nis\")])\r\n\r\n apply.implementations = {(\"add\", \"dollar\", \"dollar\"): add_dollar_dollar, (\"add\", \"euro\", \"euro\"): add_euro_euro,\r\n (\"add\", \"nis\", \"nis\"): add_shekel_shekel, (\"add\", \"dollar\", \"nis\"): add_dollar_shekel,\r\n (\"add\", \"dollar\", \"euro\"): add_dollar_euro, (\"add\", \"euro\", \"nis\"): add_euro_shekel,\r\n (\"add\", \"euro\", \"dollar\"): add_euro_dollar, (\"add\", \"nis\", \"dollar\"): add_shekel_dollar,\r\n (\"add\", \"nis\", \"euro\"): add_shekel_euro, (\"sub\", \"dollar\", \"dollar\"): sub_dollar_dollar,\r\n (\"sub\", \"euro\", \"euro\"): sub_euro_euro, (\"sub\", \"nis\", \"nis\"): sub_shekel_shekel,\r\n (\"sub\", \"dollar\", \"nis\"): sub_dollar_shekel, (\"sub\", \"dollar\", \"euro\"): sub_dollar_euro,\r\n (\"sub\", \"euro\", \"nis\"): sub_euro_shekel, (\"sub\", \"euro\", \"dollar\"): sub_euro_dollar,\r\n (\"sub\", \"nis\", \"dollar\"): sub_shekel_dollar, (\"sub\", \"nis\", \"euro\"): sub_shekel_euro,\r\n }\r\n\r\n return apply.implementations[key](x, y)\r\n\r\n\r\nprint(apply('add', Shekel(50), Dollar(20)).__repr__())\r\nrates[(\"euro\", \"dollar\")] = 1.06\r\nprint(apply('add', Dollar(50), Euro(20)).__repr__())\r\nprint(apply(\"sub\", Dollar(50), Euro(20)).__repr__())\r\n\r\n\r\ndef dollar_to_shekel(x):\r\n return Shekel(x.amount())\r\n\r\n\r\ndef euro_to_shekel(x):\r\n return Shekel(x.amount())\r\n\r\n\r\ndef coerce_apply(operation, x, y):\r\n \"\"\"\r\n coercion generic func gets opertion and operate it on x,y after coerce one of two of them with coercion dict\r\n :param operation: add,sub\r\n :param x: euro,dollar,shekel\r\n :param y: euro,dollar,shekel\r\n :return: amount in shekel\r\n \"\"\"\r\n def add_shekel(s1, s2):\r\n return Shekel(s1 + s2)\r\n\r\n def sub_shekel(s1, s2):\r\n return Shekel(s1 - s2)\r\n\r\n coerce_apply.implementations = {('add', 'nis'): add_shekel, ('sub', 'nis'): sub_shekel}\r\n\r\n flag = False\r\n tx, ty = type_tags[type(x)], type_tags[type(y)]\r\n if tx != ty:\r\n if (tx, ty) in coercions:\r\n flag = True\r\n tx, x = ty, coercions[(tx, ty)](x)\r\n elif (ty, tx) in coercions:\r\n ty, y = tx, coercions[(ty, tx)](y)\r\n flag = True\r\n if flag:\r\n key = (operation, tx)\r\n return coerce_apply.implementations[key](x, y)\r\n return coerce_apply.implementations[(operation, 'nis')](Shekel(x.amount()), Shekel(y.amount()))\r\n\r\n\r\ncoercions = {('dollar', 'nis'): dollar_to_shekel, ('euro', 'nis'): euro_to_shekel}\r\n\r\nprint(coercions[(\"dollar\", \"nis\")](Dollar(50)).__repr__())\r\nprint(coerce_apply('add', Shekel(50), Dollar(20)).__repr__())\r\nprint(coerce_apply('add', Dollar(50), Euro(20)).__repr__())\r\nprint(coerce_apply(\"sub\", Dollar(50), Euro(20)).__repr__())\r\n\r\n\r\n# ex 3 exceptions\r\ndef parking(price, reg, pri, vip):\r\n \"\"\"\r\n create an parking type object(uses exception to check user entry data or index error when iterating over cars dict)\r\n :param price: hour price\r\n :param reg: spaces in regular parking\r\n :param pri: spaces in priority parking\r\n :param vip: spaces in VIP parking\r\n :return: dictionary dispatch API\r\n \"\"\"\r\n cars = []\r\n Parks = {'Regular': reg, 'Priority': pri, 'VIP': vip}\r\n try:\r\n if price < 1:\r\n raise ValueError(\"the price value is bad\")\r\n elif reg <= 0 or pri <= 0 or vip <= 0:\r\n raise ValueError(\"parking places error\")\r\n\r\n def start_parking(Id, Ptype):\r\n \"\"\"\r\n add a car to a parking space\r\n :param id: car number\r\n :param Ptype: parking type\r\n :return:\r\n \"\"\"\r\n try:\r\n\r\n if type(Id) != int:\r\n raise TypeError(\"incorrect car number\")\r\n if Ptype not in (\"Regular\", \"VIP\", \"Priority\"):\r\n raise TypeError(Ptype + \" is incorrect parking type\")\r\n nonlocal cars\r\n nonlocal Parks\r\n if Parks[Ptype] > 0:\r\n cars.append([Id, Ptype, 1])\r\n Parks[Ptype] -= 1\r\n except TypeError as e:\r\n print(e.args[0])\r\n\r\n def next_time():\r\n \"\"\"\r\n a func that adding another hour for every car\r\n :return: nothing\r\n \"\"\"\r\n nonlocal cars\r\n for i in range(len(cars)):\r\n cars[i][2] += 1\r\n\r\n def end_parking(Id):\r\n \"\"\"\r\n ends car parking by a given id\r\n :param Id: car that we want to end parking for..\r\n :return:notihng\r\n \"\"\"\r\n nonlocal cars\r\n nonlocal Parks\r\n for i in range(len(cars)):\r\n if Id == cars[i][0]:\r\n print('car: ' + str(cars[i][0]) + ', ' + 'parking type: ' + str(\r\n cars[i][1]) + ', parking time: ' + str(\r\n cars[i][2]))\r\n if cars[i][1] == 'Regular':\r\n print('payment: ', price * cars[i][2])\r\n Parks['Regular'] += 1\r\n elif cars[i][1] == 'Priority':\r\n Parks['Priority'] += 1\r\n print('payment: ', price * 2 * cars[i][2])\r\n else:\r\n Parks['VIP'] += 1\r\n print('payment: ', price * 3 * cars[i][2])\r\n cars.pop(i)\r\n exit(1)\r\n print('car not found')\r\n\r\n def print_parking(ptype):\r\n \"\"\"\r\n print the parking of a specific parking type\r\n :param ptype:\r\n :return: nothing\r\n \"\"\"\r\n nonlocal cars\r\n flag = False\r\n for i in range(len(cars)):\r\n if cars[i][1] == ptype:\r\n print('car: ' + str(cars[i][0]) + ', ' + 'parking time: ' + str(cars[i][2]))\r\n flag = True\r\n if not flag:\r\n print(ptype + ' park is empty')\r\n\r\n def print_list():\r\n \"\"\"\r\n create an option to prints all parking cars\r\n :return: a dispatch dict\r\n \"\"\"\r\n counter = 0\r\n\r\n def next():\r\n \"\"\"\r\n iterates on car list\r\n :return: nothing\r\n \"\"\"\r\n try:\r\n nonlocal cars\r\n nonlocal counter\r\n print('car: ' + str(cars[counter][0]) + ', ' + 'parking type: ' + str(\r\n cars[counter][1]) + ', parking time: ' + str(cars[counter][2]))\r\n counter += 1\r\n except IndexError:\r\n print(\"no car\")\r\n\r\n return {'next': next}\r\n\r\n return {'print_list': print_list, 'print_parking': print_parking, 'next_time': next_time,\r\n 'start_parking': start_parking,\r\n 'end_parking': end_parking}\r\n except ValueError as e:\r\n print(e.args[0])\r\n except TypeError as e:\r\n print(e.args[0])\r\n except IndexError:\r\n print(\"no car\")\r\n\r\n\r\npark1 = parking(-10, 3, 3, 3)\r\npark1 = parking(10, 0, 3, 3)\r\nprint(park1)\r\npark1 = parking(10, 3, 3, 3)\r\npark1['start_parking'](\"aaa\", 'Regular')\r\npark1['start_parking'](223, 'VIP1')\r\npark1['start_parking'](222, 'Regular')\r\npark1['start_parking'](223, 'Regular')\r\npark1['next_time']()\r\npark1['start_parking'](224, 'Regular')\r\npark1['start_parking'](225, 'VIP')\r\nprn = park1['print_list']()\r\nprn = park1['print_list']()\r\nprint(prn)\r\nfor i in range(6):\r\n prn['next']()\r\n\r\n\r\n# ex recursive structures\r\n\r\n\r\nclass Expr(object):\r\n \"\"\"\r\n expression tree class\r\n \"\"\"\r\n def __init__(self, operator, operand1, operand2):\r\n self.operator = operator\r\n self.operand1 = operand1\r\n self.operand2 = operand2\r\n\r\n def __repr__(self):\r\n return \"Expr({0},{1},{2})\".format(repr(self.operator), repr(self.operand1), repr(self.operand2))\r\n\r\n\r\ndef build_expr_tree(line):\r\n \"\"\"\r\n func to take user expression and build a tree from it using Expr class\r\n :param line: user expression\r\n :return: Expr object that represnt the user expression\r\n \"\"\"\r\n operator = line[0]\r\n if type(line[1]) is tuple:\r\n operand1 = build_expr_tree(line[1])\r\n else:\r\n operand1 = line[1]\r\n if type(line[2]) is tuple:\r\n operand2 = build_expr_tree(line[2])\r\n else:\r\n operand2 = line[2]\r\n return Expr(operator, operand1, operand2)\r\n\r\n\r\nexp = build_expr_tree((\"add\", (\"mul\", 2, 3), 10))\r\nprint(repr(exp))\r\n\r\n\r\n# ex5\r\n\r\ndef read_eval_print_loop():\r\n \"\"\"Run a read-eval-print loop for calculator. (using exceptions to deal with zero division error and arguments amout errors)s \"\"\"\r\n while True:\r\n try:\r\n expression_tree = calc_parse(input('calc> '))\r\n print(calc_eval(expression_tree))\r\n except (SyntaxError, TypeError, ZeroDivisionError) as err:\r\n if type(err) is ZeroDivisionError:\r\n print(float(\"inf\"))\r\n else:\r\n print(type(err).__name__ + ':', err)\r\n except (KeyboardInterrupt, EOFError): # <Control>-D, etc. <ctrl-C>\r\n print('Calculation completed.')\r\n return\r\n\r\n\r\n# Eval & Apply\r\n\r\nclass Exp(object):\r\n \"\"\"A call expression in Calculator.\r\n\r\n >>> Exp('add', [1, 2])\r\n Exp('add', [1, 2])\r\n >>> str(Exp('add', [1, Exp('mul', [2, 3])]))\r\n 'add(1, mul(2, 3))'\r\n \"\"\"\r\n\r\n def __init__(self, operator, operands):\r\n self.operator = operator\r\n self.operands = operands\r\n\r\n def __repr__(self):\r\n return 'Exp({0}, {1})'.format(repr(self.operator), repr(self.operands))\r\n\r\n def __str__(self):\r\n operand_strs = ', '.join(map(str, self.operands))\r\n return '{0}({1})'.format(self.operator, operand_strs)\r\n\r\n\r\ndef calc_eval(exp):\r\n \"\"\"Evaluate a Calculator expression.\r\n\r\n >>> calc_eval(Exp('add', [2, Exp('mul', [4, 6])]))\r\n 26\r\n \"\"\"\r\n if type(exp) in (int, float):\r\n return exp\r\n if type(exp) == Exp:\r\n arguments = list(map(calc_eval, exp.operands))\r\n return calc_apply(exp.operator, arguments)\r\n\r\n\r\ndef calc_apply(operator, args):\r\n \"\"\"Apply the named operator to a list of args.\r\n\r\n >>> calc_apply('+', [1, 2, 3])\r\n 6\r\n >>> calc_apply('-', [10, 1, 2, 3])\r\n 4\r\n >>> calc_apply('*', [])\r\n 1\r\n >>> calc_apply('/', [40, 5])\r\n 8.0\r\n \"\"\"\r\n if operator in ('add', '+'):\r\n return sum(args)\r\n if operator in ('sub', '-'):\r\n if len(args) == 0:\r\n raise TypeError(operator + 'requires at least 1 argument')\r\n if len(args) == 1:\r\n return -args[0]\r\n return sum(args[:1] + [-arg for arg in args[1:]])\r\n if operator in ('mul', '*'):\r\n if len(args) == 0:\r\n raise TypeError(operator + ' requires at least 1 argument')\r\n return reduce(mul, args, 1)\r\n if operator in ('div', '/'):\r\n if len(args) != 2:\r\n raise TypeError(operator + ' requires exactly 2 arguments')\r\n numer, denom = args\r\n return numer / denom\r\n if operator == 'round':\r\n if len(args) != 2:\r\n raise TypeError(operator + \" requires exactly 2 arguments\")\r\n else:\r\n return \"{0:.{1}f}\".format(args[0], args[1])\r\n\r\n\r\n# Parsing\r\n\r\ndef calc_parse(line):\r\n \"\"\"Parse a line of calculator input and return an expression tree.\"\"\"\r\n tokens = tokenize(line)\r\n expression_tree = analyze(tokens)\r\n if len(tokens) > 0:\r\n raise SyntaxError('Extra token(s): ' + ' '.join(tokens))\r\n return expression_tree\r\n\r\n\r\ndef tokenize(line):\r\n \"\"\"Convert a string into a list of tokens.\r\n\r\n >>> tokenize('add(2, mul(4, 6))')\r\n ['add', '(', '2', ',', 'mul', '(', '4', ',', '6', ')', ')']\r\n \"\"\"\r\n spaced = line.replace('(', ' ( ').replace(')', ' ) ').replace(',', ' , ')\r\n return spaced.strip().split()\r\n\r\n\r\nknown_operators = ['add', 'sub', 'mul', 'div', '+', '-', '*', '/', \"round\"]\r\n\r\n\r\ndef analyze(tokens):\r\n \"\"\"Create a tree of nested lists from a sequence of tokens.\r\n\r\n Operand expressions can be separated by commas, spaces, or both.\r\n\r\n >>> analyze(tokenize('add(2, mul(4, 6))'))\r\n Exp('add', [2, Exp('mul', [4, 6])])\r\n >>> analyze(tokenize('mul(add(2, mul(4, 6)), add(3, 5))'))\r\n Exp('mul', [Exp('add', [2, Exp('mul', [4, 6])]), Exp('add', [3, 5])])\r\n \"\"\"\r\n assert_non_empty(tokens)\r\n token = analyze_token(tokens.pop(0))\r\n if type(token) in (int, float):\r\n return token\r\n if token in known_operators:\r\n if len(tokens) == 0 or tokens.pop(0) != '(':\r\n raise SyntaxError('expected ( after ' + token)\r\n return Exp(token, analyze_operands(tokens))\r\n else:\r\n raise SyntaxError('unexpected ' + token)\r\n\r\n\r\ndef analyze_operands(tokens):\r\n \"\"\"Analyze a sequence of comma-separated operands.\"\"\"\r\n assert_non_empty(tokens)\r\n operands = []\r\n while tokens[0] != ')':\r\n if operands and tokens.pop(0) != ',':\r\n raise SyntaxError('expected ,')\r\n operands.append(analyze(tokens))\r\n assert_non_empty(tokens)\r\n tokens.pop(0) # Remove )\r\n return operands\r\n\r\n\r\ndef assert_non_empty(tokens):\r\n \"\"\"Raise an exception if tokens is empty.\"\"\"\r\n if len(tokens) == 0:\r\n raise SyntaxError('unexpected end of line')\r\n\r\n\r\ndef analyze_token(token):\r\n \"\"\"Return the value of token if it can be analyzed as a number, or token.\r\n\r\n >>> analyze_token('12')\r\n 12\r\n >>> analyze_token('7.5')\r\n 7.5\r\n >>> analyze_token('add')\r\n 'add'\r\n \"\"\"\r\n try:\r\n return int(token)\r\n except (TypeError, ValueError):\r\n try:\r\n return float(token)\r\n except (TypeError, ValueError):\r\n return token\r\n\r\n\r\ndef run():\r\n read_eval_print_loop()\r\n\r\n\r\nrun()\r\n" }, { "alpha_fraction": 0.5882517695426941, "alphanum_fraction": 0.5969230532646179, "avg_line_length": 24.481481552124023, "blob_id": "84c16dacd1477a7584780c3a93d427ab178b089b", "content_id": "7d2c4dd677a3168a154acda7da8a3969a8e3ffb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3575, "license_type": "no_license", "max_line_length": 101, "num_lines": 135, "path": "/Python_environment_2.py", "repo_name": "eyalbi/college-python-projects", "src_encoding": "UTF-8", "text": "import math # import math module\r\n\r\n# ex 4\r\nfn = lambda n: 2 * n\r\n\r\n\r\ndef Make_iterator(fn):\r\n \"\"\"\r\n function that create an iterator that changes according to a function parameter with nonlocal use\r\n :param fn: parameter func that decide the count progress\r\n counter -- a parameter that counts for every run change from the inner func\r\n iterate -- inner func that uses non local counter and fn to create the wanted iteration\r\n :return: iterate the inner function\r\n \"\"\"\r\n counter = -1\r\n\r\n def iterate():\r\n \"\"\"\r\n inner func that uses non local counter and fn to create the wanted iteration\r\n :return:int number that calculate by operating fn on counter\r\n \"\"\"\r\n nonlocal counter\r\n counter += 1\r\n return fn(counter)\r\n\r\n return iterate\r\n\r\n\r\n''' func 1 tests\r\niterator = Make_iterator(fn)\r\nfor i in range(4):\r\n print(iterator())\r\n\r\nprint(iterator())\r\nit = Make_iterator(fn)\r\nfor i in range(4):\r\n print(it())\r\n'''\r\n\r\n\r\n# ex 5\r\ndef isPrime(x):\r\n \"\"\"\r\n\r\n :param x:a given number to check if is prime\r\n :return: false -- if it not prime\r\n true -- if the number is prime\r\n \"\"\"\r\n for i in range(2, x):\r\n if x % i == 0:\r\n return False\r\n return True\r\n\r\n\r\ndef PerfectSquare(x):\r\n \"\"\"\r\n checks if a number is perfect square -- the number sqrt is a integer.\r\n :param x: the number that we want to check\r\n :return: true -- if is perfect square\r\n false -- if isn't\r\n \"\"\"\r\n s = int(math.sqrt(x))\r\n return s * s == x\r\n\r\n\r\ndef isFib(x):\r\n \"\"\"\r\n check if a given number is in fibonacci sequence\r\n :param x: number to check\r\n :return: true -- if the number in fibo seq\r\n false -- if is not\r\n \"\"\"\r\n return PerfectSquare(5 * x * x + 4) or PerfectSquare(5 * x * x - 4)\r\n\r\n\r\ndef listFilter(List, f):\r\n \"\"\"\r\n create a new filtered list that stand in given function definition\r\n :param List: list that we want to filter\r\n :param f: a func that decide which list argo we want to filter\r\n :return: a new list that has been filtered\r\n \"\"\"\r\n size = len(List) - 1\r\n while size >= 0:\r\n if not f(List[size]):\r\n List.pop(size)\r\n size -= 1\r\n return List\r\n\r\n\r\ndef listFilterMulti(List, fList):\r\n \"\"\"\r\n a function that filtering a list with a couple of function\r\n :param List: sequence we want to filter\r\n :param fList: list of function we want to filter the list with\r\n :return: a new list that has been filtered by all the function in flist\r\n \"\"\"\r\n for i in range(len(fList)):\r\n List = list(listFilter(List, fList[i]))\r\n return List\r\n\r\n\r\n# ex 5 test\r\n# print(listFilterMulti([2, 4, 5, 6, 7, 13], [isPrime, isFib]))\r\n\r\n\r\n# ex6\r\ndef approx_eq(x, y, tolerance=1e-3):\r\n \"\"\"\r\n check if two numbers are approximately equale\r\n :param x: first number\r\n :param y: second number\r\n :param tolerance: the value that decide if the numbers are close enough\r\n :return: true -- if close enough\r\n false -- if not close enough\r\n \"\"\"\r\n return abs(x - y) < tolerance\r\n\r\n\r\ndef Fixed_point(f, x):\r\n \"\"\"\r\n check if x has a fixed point in relation to f function\r\n :param f: function to check converging point\r\n :param x: starting and improved guesses\r\n :return: the number that the function converge to(fixed point)\r\n or None if cant find\r\n \"\"\"\r\n for i in range(20):\r\n if approx_eq(x, f(x)):\r\n return x\r\n x = f(x)\r\n return None\r\n\r\n\r\nprint(Fixed_point(lambda n: math.sqrt(n), 2))\r\n" }, { "alpha_fraction": 0.837837815284729, "alphanum_fraction": 0.837837815284729, "avg_line_length": 36, "blob_id": "a532b3898df0d69efd5de9cfb3edd7e4f8450f5d", "content_id": "f62c8df28e6a0d0b956344025a37916aa8fd7712", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 148, "license_type": "no_license", "max_line_length": 61, "num_lines": 4, "path": "/README.md", "repo_name": "eyalbi/college-python-projects", "src_encoding": "UTF-8", "text": "# college-python-projects\npython code to parctice environments .\ncreate classes using only functions and environments modules.\npipeline programing.\n" }, { "alpha_fraction": 0.4706348776817322, "alphanum_fraction": 0.48860812187194824, "avg_line_length": 27.09384536743164, "blob_id": "395d9d91ceadbcab349e0b44c8a59be978281744", "content_id": "78224955dd377c8567ebb79572d995f4da55f414", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18917, "license_type": "no_license", "max_line_length": 119, "num_lines": 650, "path": "/Python_Environment.py", "repo_name": "eyalbi/college-python-projects", "src_encoding": "UTF-8", "text": "\r\nfrom math import *\r\nfrom functools import reduce\r\nimport random\r\n'''\r\n\r\ndef Make_Power(x, y):\r\n \"\"\"\r\n a func that create a power object with a base and power\r\n :param x: base\r\n :param y: power\r\n :return: dispatch function that returns the base or power acoording to a given index\r\n \"\"\"\r\n\r\n def dispatch(i):\r\n if i == 0:\r\n return x\r\n elif i == 1:\r\n return y\r\n else:\r\n return \"error\"\r\n\r\n return dispatch\r\n\r\n\r\ndef Base(x):\r\n \"\"\"\r\n\r\n :param x: make power object\r\n :return: the base of x object\r\n \"\"\"\r\n return x(0)\r\n\r\n\r\ndef Power(x):\r\n \"\"\"\r\n\r\n :param x: make power object\r\n :return: the power of x object\r\n \"\"\"\r\n return x(1)\r\n\r\n\r\ndef Print_Power(x):\r\n \"\"\"\r\n\r\n :param x: make power or int object\r\n :return: prints according to given paramater if make power prints base^power\r\n \"\"\"\r\n if type(x) is int:\r\n print(x)\r\n else:\r\n if Power(x) == 1:\r\n print(Base(x))\r\n elif Power(x) == 0:\r\n print(1)\r\n else:\r\n print(x(0), '^', x(1))\r\n\r\n\r\ndef Calc_Power(x):\r\n \"\"\"\r\n :param x:make power object\r\n :return: the number of the base**power\r\n \"\"\"\r\n Number = x(0) ** x(1)\r\n return Number\r\n\r\n\r\ndef Improve_Power(x):\r\n \"\"\"\r\n a function that check if we can improve are power object\r\n :param x: make power object\r\n :return: an improved make power object/ the old make power object\r\n \"\"\"\r\n oldBase = x(0)\r\n oldPower = x(1)\r\n if oldBase < 1:\r\n oldBase = int(1 / oldBase)\r\n oldPower *= -1\r\n newBase = 2\r\n newPower = 1\r\n for i in range(2, int(oldBase)):\r\n newBase = i\r\n newPower = 1\r\n while newBase < oldBase:\r\n newBase *= i\r\n newPower += 1\r\n if newBase == oldBase:\r\n return Make_Power(i, newPower * oldPower)\r\n return Make_Power(oldBase, oldPower)\r\n\r\n\r\ndef Mul_Power(x, y):\r\n \"\"\"\r\n a func that multiply two make power obj\r\n :param x: make power obj\r\n :param y: make power obj\r\n :return: int if the bases are different or make power obj if the bases are the same\r\n \"\"\"\r\n\r\n\r\n if (Base(x) != Base(y)):\r\n return Calc_Power(x) * Calc_Power(y)\r\n return Improve_Power(Make_Power(Calc_Power(x) * Calc_Power(y), 1))\r\n\r\n\r\ndef Div_Power(x, y):\r\n \"\"\"\r\n a func that divides between two make power obj\r\n :param x: make power obj\r\n :param y: make power obj\r\n :return: make power obj\r\n \"\"\"\r\n\r\n return Improve_Power(Make_Power(Calc_Power(x) / Calc_Power(y), 1))\r\n\r\n\r\nx = Make_Power(4, 5)\r\nx\r\nBase(x)\r\nPower(x)\r\nPrint_Power(x)\r\nPrint_Power(Improve_Power(x))\r\nPrint_Power(Mul_Power(Improve_Power(x), Make_Power(2, 5)))\r\ny = Make_Power(9, 2)\r\nPrint_Power(Improve_Power(y))\r\nPrint_Power(Mul_Power(x, y))\r\nPrint_Power(Mul_Power(Improve_Power(y), Make_Power(3, 5)))\r\nPrint_Power(Div_Power(Improve_Power(y), Make_Power(3, 5)))\r\nPrint_Power(Div_Power(Mul_Power(Make_Power(2, 3), Make_Power(2, 8)), Make_Power(2, 4)))\r\nPrint_Power(Make_Power(12,1))\r\nPrint_Power(Make_Power(12,0))\r\n\r\ndef make_tree(value, left, right):\r\n \"\"\"\r\n creats a tree object\r\n :param value: the key of a junction\r\n :param left: left son\r\n :param right: right son\r\n :return: dispatch function with different option(API)\r\n \"\"\"\r\n def Tdispatch(i):\r\n \"\"\"\r\n returns data of tree node by index\r\n :param i: index\r\n :return: value or left or right of a node\r\n \"\"\"\r\n if i == 0:\r\n return value\r\n elif i == 1:\r\n return left\r\n elif i == 2:\r\n return right\r\n else:\r\n return \"error\"\r\n\r\n return Tdispatch\r\n\r\n\r\ndef Value(x):\r\n \"\"\"\r\n return value of node\r\n :param x:tree node\r\n :return: value of x\r\n \"\"\"\r\n return x(0)\r\n\r\n\r\ndef Left(x):\r\n \"\"\"\r\n returns left son\r\n :param x: tree obj\r\n :return: left son of x\r\n \"\"\"\r\n return x(1)\r\n\r\n\r\ndef Right(x):\r\n \"\"\"\r\n returns right son\r\n :param x:tree obj\r\n :return: x right son\r\n \"\"\"\r\n return x(2)\r\n\r\n\r\ndef print_tree(root):\r\n \"\"\"\r\n prints tree in inorder way\r\n :param root: tree root\r\n :return: no return only print\r\n \"\"\"\r\n if root is None:\r\n return None\r\n print_tree(Left(root))\r\n print(Value(root), end=\" \")\r\n print_tree(Right(root))\r\n\r\n\r\ndef count_value(root, key):\r\n \"\"\"\r\n count how many times a value appears in a tree\r\n :param root: tree obj\r\n :param key: value to search\r\n :return: int that represent the amount of time the value appears in the tree\r\n \"\"\"\r\n if root is None:\r\n return 0\r\n if Value(root) == key:\r\n return 1 + count_value(Left(root), key) + count_value(Right(root), key)\r\n else:\r\n return count_value(Left(root), key) + count_value(Right(root), key)\r\n\r\n\r\ndef MaxTree(root):\r\n \"\"\"\r\n returns max value in a tree\r\n :param root: tree node\r\n :return: max tree value\r\n \"\"\"\r\n if root is None:\r\n return -100000000\r\n res = Value(root)\r\n Lres = MaxTree(Left(root))\r\n Rres = MaxTree((Right(root)))\r\n if Lres > res:\r\n res = Lres\r\n elif Rres > res:\r\n res = Rres\r\n return res\r\n\r\n\r\ndef MinTree(root):\r\n \"\"\"\r\n return min tree value\r\n :param root: tree node\r\n :return: minimum tree value\r\n \"\"\"\r\n if root is None:\r\n return 100000000\r\n res = Value(root)\r\n Lres = MinTree(Left(root))\r\n Rres = MinTree((Right(root)))\r\n if Lres < res:\r\n res = Lres\r\n elif Rres < res:\r\n res = Rres\r\n return res\r\n\r\n\r\ndef tree_BST(root):\r\n \"\"\"\r\n checks if a tree is a binary tree\r\n :param root: tree node\r\n :return: true/false\r\n \"\"\"\r\n if root is None:\r\n return True\r\n if Value(root) < MaxTree(Left(root)):\r\n return False\r\n if Value(root) > MinTree(Right(root)):\r\n return False\r\n return tree_BST(Left(root)) and tree_BST(Right(root))\r\n\r\n\r\ndef tree_depth(root):\r\n \"\"\"\r\n returns tree depth(root.height)\r\n :param root: tree node\r\n :return: tree height\r\n \"\"\"\r\n if root is None:\r\n return -1\r\n if (Left(root) is None) and (Right(root) is None):\r\n return 0\r\n return 1 + max(tree_depth(Left(root)), tree_depth(Right(root)))\r\n\r\n\r\ndef tree_balanced(root):\r\n \"\"\"\r\n checks if a tree is avl\r\n :param root: tree node\r\n :return: true/false\r\n \"\"\"\r\n if root is None:\r\n return True\r\n return abs(tree_depth(Left(root)) - tree_depth(Right(root))) <= 1 and tree_balanced(Left(root)) and tree_balanced(\r\n Right(root))\r\n\r\n\r\ntree1 = make_tree(12, make_tree(6, make_tree(8, None, None), None),\r\n make_tree(7, make_tree(8, None, None), make_tree(15, None, None)))\r\ntree2 = make_tree(12, make_tree(6, make_tree(3, make_tree(1, None, None), None),\r\n make_tree(8, make_tree(7, None, None), None)),\r\n make_tree(15, None, make_tree(20, make_tree(17, None, None), None)))\r\nprint(Value(tree1))\r\nprint(Value(Left(tree1)))\r\nprint(Value(Right(Left(tree2))))\r\nprint_tree(tree1)\r\nprint(end=\"\\n\")\r\nprint_tree(tree2)\r\nprint(count_value(tree1, 8))\r\nprint(tree_BST(tree1))\r\nprint(tree_BST(tree2))\r\nprint(tree_depth(tree1))\r\nprint(tree_depth(tree2))\r\nprint(tree_balanced(tree1))\r\nprint(tree_balanced(tree2))\r\n\r\n\r\ndef get_prices(name, products, sales):\r\n \"\"\"\r\n\r\n :param name:store name\r\n :param products: list of prod and prices\r\n :param sales: list of stores and discounts\r\n :return: a list of products with discount update on base of store name\r\n \"\"\"\r\n return tuple(map(lambda x: (x[0], x[1] - x[1] * tuple(filter(lambda x: x[0] == name, sales))[0][1]), products))\r\n\r\n\r\nproducts = (('p1', 1000), ('p2', 2000), ('p3', 5000), ('p4', 100))\r\nsales = (('s1', 0.2), ('s2', 0.3), ('s3', 0.1))\r\nprod = dict(products)\r\nsa = dict(sales)\r\nprint(sa)\r\n\r\nprint(get_prices('s1', products, sales))\r\n\r\n\r\ndef get_prices_dict(name, products, sales):\r\n \"\"\"\r\n return a dict with updated discounts\r\n :param name:store name\r\n :param products: products dictionary\r\n :param sales: store dictionary\r\n :return:\r\n \"\"\"\r\n return dict(map(lambda x: (x[0], x[1] - x[1] * list(filter(lambda x: x[0] == name, sales.items()))[0][1]),\r\n products.items()))\r\n\r\n\r\nprint(get_prices_dict('s1', prod, sa))\r\n'''\r\n'''\r\nproducts = (('p1', 1000), ('p2', 2000), ('p3', 5000), ('p4', 100))\r\nprod = dict(products)\r\nsales = {'s1': {'t1': 0.2, 't2': 0.1}, 's2': {'t1': 0.1, 't2': 0.2}, 's3': {'t1': 0.3, 't2': 0.5}}\r\ntypes = {'t1': ('p2', 'p4'), 't2': ('p1', 'p3')}\r\n'''\r\n'''\r\n\r\n\r\ndef get_price_by_type(name, prod, sales, types):\r\n \"\"\"\r\n calculate discount from given store by prod types\r\n :param name: store name\r\n :param prod: products dictionary\r\n :param sales: stores discount types\r\n :param types: prod types\r\n :return: a dicitionary with the updated discount for the given store\r\n \"\"\"\r\n return dict(map(lambda x: (x[0], x[1] - x[1] * {k: v for k, v in sales.items() if k == name}[name][\r\n tuple({y: z for y, z in types.items() if z[0] == x[0] or z[1] == x[0]})[0]]), prod.items()))\r\n\r\n\r\nprint(get_price_by_type('s1', prod, sales, types))\r\n\r\n\r\n\r\n\r\n\r\ndef accumulate_prices(name, prod, sales, types, add):\r\n \"\"\"\r\n caculate total prices of all stores products\r\n :param name: storename\r\n :param prod: products dict\r\n :param sales: stores by types\r\n :param types: type of productes\r\n :param add: function that add the number\r\n :return: a total price for all products\r\n \"\"\"\r\n return reduce(add, map(lambda x: x[1], map(lambda x: (x[0], x[1] - x[1] *\r\n {k: v for k, v in sales.items() if k == name}[name][tuple(\r\n {y: z for y, z in types.items() if\r\n z[0] == x[0] or z[1] == x[0]})[0]]), prod.items())))\r\n\r\n\r\nprint(accumulate_prices('s1', prod, sales, types, lambda x, y: x + y))\r\n\r\n\r\ndef coding():\r\n \"\"\"\r\n a func that create an objects the encode and decodes messages\r\n :return: dispatch API\r\n \"\"\"\r\n key = None\r\n\r\n def dispatch(*s):\r\n \"\"\"\r\n dispatch with an API\r\n :param s: s[0] message ,s[1...] argumantes for other activities\r\n :return:\r\n \"\"\"\r\n nonlocal key\r\n if s[0] == 'set_key':\r\n key = {'reverse_word': True, 'reverse_string': True, 'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd', 'e': 'e',\r\n 'f': 'f', 'g': 'g',\r\n 'h': 'h', 'i': 'i', 'j': 'j', 'k': 'k', 'l': 'l', 'm': 'm', 'n': 'n', 'o': 'o', 'p': 'p', 'q': 'q',\r\n 'r': 'r', 's': 's',\r\n 't': 't', 'u': 'u', 'v': 'v', 'w': 'w', 'x': 'x', 'y': 'y', 'z': 'z'}\r\n\r\n alphabet = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',\r\n 't', 'u', 'v', 'w', 'x', 'y', 'z']\r\n if s[1][1] == 'yes':\r\n key['reverse_word'] = True\r\n else:\r\n key['reverse_word'] = False\r\n if s[1][2] == 'yes':\r\n key['reverse_string'] = True\r\n else:\r\n key['reverse_string'] = False\r\n if s[1][0] != 0:\r\n for k in key:\r\n if k != 'reverse_string' and k != 'reverse_word':\r\n key[k] = alphabet[(alphabet.index(key[k]) + s[1][0]) % 26]\r\n else:\r\n for k in key:\r\n if k != 'reverse_string' and k != 'reverse_word':\r\n key[k] = alphabet[(alphabet.index(key[k]) + random.randint(1, 1000)) % 26]\r\n print('done')\r\n if s[0] == 'export_key':\r\n if key == None:\r\n print(\"key empty\")\r\n return key\r\n if s[0] == 'empty_key':\r\n key = None\r\n print('done')\r\n if s[0] == 'import_key':\r\n key = s[1]\r\n print('done')\r\n\r\n if s[0] == 'encoding':\r\n if key == None:\r\n return 'key empty'\r\n ogu = ' '\r\n for i in range(len(s[1])):\r\n if s[1][i] != ' ':\r\n ogu += key[s[1][i]]\r\n else:\r\n ogu += ' '\r\n ogu = ogu[1:]\r\n if key['reverse_string'] == True and key['reverse_word'] == True:\r\n return ogu[::-1]\r\n if key['reverse_string'] == False and key['reverse_word'] == False:\r\n return ogu\r\n if key['reverse_string'] == False and key['reverse_word'] == True:\r\n li = list(ogu.split(\" \"))\r\n for i in range(len(li)):\r\n li[i] = li[i][::-1]\r\n ogu = ' '.join(li)\r\n return ogu\r\n if key['reverse_string'] == True and key['reverse_word'] == False:\r\n ogu = ogu[::-1]\r\n li = list(ogu.split(\" \"))\r\n for i in range(len(li)):\r\n li[i] = li[i][::-1]\r\n ogu = ' '.join(li)\r\n return ogu\r\n if s[0] == 'decoding':\r\n if key is None:\r\n return 'key empty'\r\n tony = {v: k for k, v in key.items() if k != 'reverse_string' and k != 'reverse_word'}\r\n tony.update({'reverse_word': key['reverse_word']})\r\n tony.update({'reverse_string': key['reverse_string']})\r\n # key = tony\r\n ogu = ' '\r\n for i in range(len(s[1])):\r\n if s[1][i] != ' ':\r\n ogu += tony[s[1][i]]\r\n else:\r\n ogu += ' '\r\n ogu = ogu[1:]\r\n if tony['reverse_string'] == True and tony['reverse_word'] == True:\r\n return ogu[::-1]\r\n if tony['reverse_string'] == False and tony['reverse_word'] == False:\r\n return ogu\r\n if tony['reverse_string'] == False and tony['reverse_word'] == True:\r\n li = list(ogu.split(\" \"))\r\n for i in range(len(li)):\r\n li[i] = li[i][::-1]\r\n ogu = ' '.join(li)\r\n return ogu\r\n if tony['reverse_string'] == True and tony['reverse_word'] == False:\r\n ogu = ogu[::-1]\r\n li = list(ogu.split(\" \"))\r\n for i in range(len(li)):\r\n li[i] = li[i][::-1]\r\n ogu = ' '.join(li)\r\n return ogu\r\n\r\n return dispatch\r\n\r\n\r\ncode1 = coding()\r\ncode1('set_key', (-3, 'yes', 'yes'))\r\nkey = code1('export_key')\r\nprint(key)\r\ncstr = code1('encoding', 'the london is the capital of great britain')\r\nprint(cstr)\r\ndstr = code1('decoding', cstr)\r\nprint(dstr)\r\ncode2 = coding()\r\ndstr = code2('decoding', cstr)\r\nprint(dstr)\r\ncode2('import_key', key)\r\ndstr = code2('decoding', cstr)\r\nprint(dstr)\r\ncode2('empty_key')\r\ncode2('export_key')\r\n'''\r\n\r\ndef parking(price, reg, pri, vip):\r\n \"\"\"\r\n create an parking type object\r\n :param price: hour price\r\n :param reg: spaces in regular parking\r\n :param pri: spaces in priority parking\r\n :param vip: spaces in VIP parking\r\n :return: dictionary dispatch API\r\n \"\"\"\r\n cars = []\r\n Parks = {'Regular': reg, 'Priority': pri, 'VIP': vip}\r\n\r\n def start_parking(id, Ptype):\r\n \"\"\"\r\n add a car to a parking space\r\n :param id: car number\r\n :param Ptype: parking type\r\n :return:\r\n \"\"\"\r\n nonlocal cars\r\n nonlocal Parks\r\n if Parks[Ptype] > 0:\r\n cars.append([id, Ptype, 1])\r\n Parks[Ptype] -= 1\r\n else:\r\n print(Ptype + ' parking is full')\r\n\r\n def next_time():\r\n \"\"\"\r\n a func that adding another hour for every car\r\n :return: nothing\r\n \"\"\"\r\n nonlocal cars\r\n for i in range(len(cars)):\r\n cars[i][2] += 1\r\n\r\n def end_parking(Id):\r\n \"\"\"\r\n ends car parking by a given id\r\n :param Id: car that we want to end parking for..\r\n :return:notihng\r\n \"\"\"\r\n nonlocal cars\r\n nonlocal Parks\r\n for i in range(len(cars)):\r\n if Id == cars[i][0]:\r\n print('car: ' + str(cars[i][0]) + ', ' + 'parking type: ' + str(cars[i][1]) + ', parking time: ' + str(\r\n cars[i][2]))\r\n if cars[i][1] == 'Regular':\r\n print('payment: ', price * cars[i][2])\r\n Parks['Regular'] += 1\r\n elif cars[i][1] == 'Priority':\r\n Parks['Priority'] += 1\r\n print('payment: ', price * 2 * cars[i][2])\r\n else:\r\n Parks['VIP'] += 1\r\n print('payment: ', price * 3 * cars[i][2])\r\n cars.pop(i)\r\n exit(1)\r\n print('car not found')\r\n\r\n def print_parking(ptype):\r\n \"\"\"\r\n print the parking of a specific parking type\r\n :param ptype:\r\n :return: nothing\r\n \"\"\"\r\n nonlocal cars\r\n flag = False\r\n for i in range(len(cars)):\r\n if cars[i][1] == ptype:\r\n print('car: ' + str(cars[i][0]) + ', ' + 'parking time: ' + str(cars[i][2]))\r\n flag = True\r\n if not flag:\r\n print(ptype + ' park is empty')\r\n\r\n def print_list():\r\n \"\"\"\r\n create an option to prints all parking cars\r\n :return: a dispatch dict\r\n \"\"\"\r\n counter = 0\r\n\r\n def end():\r\n \"\"\"\r\n checks if all cars were printed\r\n :return: false/true\r\n \"\"\"\r\n nonlocal cars\r\n nonlocal counter\r\n if counter < len(cars):\r\n return False\r\n return True\r\n\r\n def next():\r\n \"\"\"\r\n iterates on car list\r\n :return: nothing\r\n \"\"\"\r\n nonlocal cars\r\n nonlocal counter\r\n print('car: ' + str(cars[counter][0]) + ', ' + 'parking type: ' + str(\r\n cars[counter][1]) + ', parking time: ' + str(cars[counter][2]))\r\n counter += 1\r\n\r\n return {'end': end, 'next': next}\r\n\r\n return {'print_list': print_list, 'print_parking': print_parking, 'next_time': next_time,\r\n 'start_parking': start_parking,\r\n 'end_parking': end_parking}\r\n\r\n\r\npark1 = parking(10, 3, 3, 3)\r\nprint(park1)\r\npark1['start_parking'](222, 'Regular')\r\npark1['start_parking'](223, 'Regular')\r\npark1['next_time']()\r\npark1['start_parking'](224, 'Regular')\r\npark1['start_parking'](225, 'Regular')\r\npark1['start_parking'](225, 'VIP')\r\nprn = park1['print_list']()\r\nprint(prn)\r\nwhile not prn['end']():\r\n prn['next']()\r\npark1['print_parking']('VIP')\r\npark1['end_parking'](100)\r\npark1['end_parking'](223)\r\npark1['print_parking']('Regular')\r\n\r\n\r\n" } ]
4
pkucmus/ttl
https://github.com/pkucmus/ttl
bfe4207daed239aee1f0b8485339839b41fcd989
5d3dac417f74800fc7692186d4a72dd5f34a2e7b
a71dca7b0f0d6c94a3d2cabb6016a1b9e2894c05
refs/heads/master
2021-08-23T22:44:00.388903
2017-12-06T23:17:12
2017-12-06T23:17:12
113,375,431
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5014605522155762, "alphanum_fraction": 0.510223925113678, "avg_line_length": 22.340909957885742, "blob_id": "93aa72e9759b7b5b096fb50a7ca92ae7dc1e6d72", "content_id": "0524ef20f09810634e9539d73021cf8f6e3261b1", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1027, "license_type": "permissive", "max_line_length": 78, "num_lines": 44, "path": "/setup.py", "repo_name": "pkucmus/ttl", "src_encoding": "UTF-8", "text": "import os\nfrom setuptools import find_packages, setup\n\nos.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))\n\n\nsetup(\n name='time-to-live',\n version='1.0.0',\n package_dir={'': 'src'},\n packages=find_packages('src'),\n include_package_data=True,\n license='BSD', # example license\n description='',\n long_description='',\n url='',\n author_email='[email protected]',\n extras_require={\n 'develop': [\n 'readline',\n 'pdbpp',\n 'ipdb',\n 'ipython',\n 'mock',\n 'coverage',\n ]\n },\n install_requires=[\n 'requests',\n 'click',\n ],\n entry_points={\n 'console_scripts': [\n 'ttl = ttl.main:cli',\n ],\n },\n classifiers=[\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.6.3',\n ],\n)\n" }, { "alpha_fraction": 0.535179853439331, "alphanum_fraction": 0.5383635759353638, "avg_line_length": 29.201923370361328, "blob_id": "f29f727152f41c52022516b73dffd0b145929b2b", "content_id": "11d942939f5faf889b33a23b112f56c7faaa439d", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3141, "license_type": "permissive", "max_line_length": 79, "num_lines": 104, "path": "/src/ttl/main.py", "repo_name": "pkucmus/ttl", "src_encoding": "UTF-8", "text": "import os\nimport time\nfrom datetime import datetime, timedelta\nfrom pathlib import Path\n\nimport click\n\n\nTTL_FILE_PATH = os.path.expanduser(os.path.join('~', '.config', '.ttl-{}'))\n\n\[email protected]()\ndef cli():\n pass\n\n\[email protected]()\[email protected]('who')\[email protected]('ticket')\ndef help(who, ticket):\n start_time = datetime.now()\n\n click.echo('Helping {} with ticket {}.'.format(who, ticket), color='white')\n\n try:\n while True:\n elapsed_time = datetime.now() - start_time\n click.echo(\n '\\rTime elapsed: {} [ctrl] + [c] to stop'.format(elapsed_time),\n nl=False\n )\n time.sleep(0.1)\n except KeyboardInterrupt:\n description = click.prompt('\\nDescription')\n save_entry(who, ticket, elapsed_time, description)\n\n\[email protected]()\[email protected]('date', default=datetime.now().strftime('%Y-%m-%d'))\ndef report(date):\n click.echo('Generating report for {}.'.format(date))\n entries = {}\n with ttl_file_open(date, read=True) as ttl_file:\n for line in ttl_file.readlines():\n who, ticket, elapsed_time, description = line.split(';;')\n hours, minutes, seconds = elapsed_time.split(':')\n seconds, microseconds = seconds.split('.')\n elapsed_timedelta = timedelta(\n hours=int(hours),\n minutes=int(minutes),\n seconds=int(seconds),\n microseconds=int(microseconds)\n )\n if ticket in entries:\n entries[ticket]['entries'].append(\n (who, elapsed_timedelta, description.strip())\n )\n entries[ticket]['total_time'] += elapsed_timedelta\n else:\n entries[ticket] = {\n 'entries': [\n (who, elapsed_timedelta, description.strip())\n ],\n 'total_time': elapsed_timedelta\n }\n\n for ticket, data in entries.items():\n click.echo()\n click.echo('{} https://jira/issue/{}'.format(ticket, ticket))\n click.echo('{} : {}'.format(\n strfdelta(\n data['total_time'],\n '{hours}:{minutes} ({hours}h {minutes}m)'\n ),\n ', '.join({desc[0] for desc in data['entries']})\n ))\n for desc in data['entries']:\n click.echo(desc[2])\n\n\ndef ttl_file_open(date=datetime.now().strftime('%Y-%m-%d'), read=False):\n file_path = TTL_FILE_PATH.format(date)\n my_file = Path(file_path)\n if read:\n file_mode = 'r'\n elif my_file.is_file():\n file_mode = 'a+'\n else:\n file_mode = 'w+'\n return open(file_path, file_mode)\n\n\ndef save_entry(who, ticket, elapsed_time, description):\n with ttl_file_open() as ttl_file:\n ttl_file.write(\n '{};;{};;{};;{}\\n'.format(who, ticket, elapsed_time, description)\n )\n\n\ndef strfdelta(tdelta, fmt):\n data = {'days': tdelta.days}\n data['hours'], rem = divmod(tdelta.seconds, 3600)\n data['minutes'], data['seconds'] = divmod(rem, 60)\n return fmt.format(**data)\n" }, { "alpha_fraction": 0.6782007217407227, "alphanum_fraction": 0.6955017447471619, "avg_line_length": 16, "blob_id": "6f3c6843023e11a2c6eba51d824c1f770972a797", "content_id": "03b05fc9d72be9a8bc45ee1c3104dd6f0ad507eb", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 289, "license_type": "permissive", "max_line_length": 93, "num_lines": 17, "path": "/README.md", "repo_name": "pkucmus/ttl", "src_encoding": "UTF-8", "text": "# Time To Live\n\nA possibly dumb attempt to track Technical Team Lead's context switching through out the day.\n\nIt was written in ~30 minutes - don't expect miracles :)\n\n## Installation\n\n```\n$ pip install git+https://github.com/pkucmus/ttl.git\n```\n\n## Usage\n\n```\n$ ttl help Ewa TIX-123\n```\n" } ]
3