text
stringlengths 5
22M
| id
stringlengths 12
177
| metadata
dict | __index_level_0__
int64 0
1.37k
|
---|---|---|---|
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="format-align-top" format="2">
<advance width="1200"/>
<unicode hex="F0755"/>
<note>
format-align-top
</note>
<outline>
<contour>
<point x="663" y="130" type="line"/>
<point x="663" y="517" type="line"/>
<point x="827" y="357" type="line"/>
<point x="917" y="447" type="line"/>
<point x="600" y="764" type="line"/>
<point x="283" y="447" type="line"/>
<point x="373" y="357" type="line"/>
<point x="537" y="517" type="line"/>
<point x="537" y="130" type="line"/>
</contour>
<contour>
<point x="20" y="1160" type="line"/>
<point x="1180" y="1160" type="line"/>
<point x="1180" y="1290" type="line"/>
<point x="20" y="1290" type="line"/>
</contour>
<contour>
<point x="20" y="903" type="line"/>
<point x="663" y="903" type="line"/>
<point x="663" y="1033" type="line"/>
<point x="20" y="1033" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-align-top.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-align-top.glif",
"repo_id": "cascadia-code",
"token_count": 485
}
| 524 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="format-italic" format="2">
<advance width="1200"/>
<unicode hex="F0277"/>
<note>
format-italic
</note>
<outline>
<contour>
<point x="1180" y="1387" type="line"/>
<point x="405" y="1387" type="line"/>
<point x="405" y="1097" type="line"/>
<point x="618" y="1097" type="line"/>
<point x="292" y="323" type="line"/>
<point x="20" y="323" type="line"/>
<point x="20" y="33" type="line"/>
<point x="795" y="33" type="line"/>
<point x="795" y="323" type="line"/>
<point x="582" y="323" type="line"/>
<point x="908" y="1097" type="line"/>
<point x="1180" y="1097" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-italic.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-italic.glif",
"repo_id": "cascadia-code",
"token_count": 347
}
| 525 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="format-text-wrapping-overflow" format="2">
<advance width="1200"/>
<unicode hex="F0D0F"/>
<note>
format-text-wrapping-overflow
</note>
<outline>
<contour>
<point x="204" y="1420" type="line"/>
<point x="49" y="1420" type="line"/>
<point x="49" y="0" type="line"/>
<point x="204" y="0" type="line"/>
</contour>
<contour>
<point x="759" y="947" type="line"/>
<point x="759" y="1420" type="line"/>
<point x="600" y="1420" type="line"/>
<point x="600" y="947" type="line"/>
</contour>
<contour>
<point x="759" y="0" type="line"/>
<point x="759" y="473" type="line"/>
<point x="600" y="473" type="line"/>
<point x="600" y="0" type="line"/>
</contour>
<contour>
<point x="914" y="473" type="line"/>
<point x="1151" y="710" type="line"/>
<point x="914" y="947" type="line"/>
<point x="914" y="788" type="line"/>
<point x="363" y="788" type="line"/>
<point x="363" y="632" type="line"/>
<point x="914" y="632" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-text-wrapping-overflow.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/format-text-wrapping-overflow.glif",
"repo_id": "cascadia-code",
"token_count": 554
}
| 526 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="gamepad-down" format="2">
<advance width="1200"/>
<unicode hex="F0E39"/>
<note>
gamepad-down
</note>
<outline>
<contour>
<point x="774" y="1290" type="line"/>
<point x="426" y="1290" type="line"/>
<point x="426" y="971" type="line"/>
<point x="600" y="797" type="line"/>
<point x="774" y="971" type="line"/>
</contour>
<contour>
<point x="339" y="884" type="line"/>
<point x="20" y="884" type="line"/>
<point x="20" y="536" type="line"/>
<point x="339" y="536" type="line"/>
<point x="513" y="710" type="line"/>
</contour>
<contour>
<point x="1180" y="884" type="line"/>
<point x="861" y="884" type="line"/>
<point x="687" y="710" type="line"/>
<point x="861" y="536" type="line"/>
<point x="1180" y="536" type="line"/>
</contour>
<contour>
<point x="774" y="449" type="line"/>
<point x="600" y="623" type="line"/>
<point x="426" y="449" type="line"/>
<point x="426" y="130" type="line"/>
<point x="774" y="130" type="line"/>
</contour>
<contour>
<point x="543" y="244" type="line"/>
<point x="543" y="361" type="line"/>
<point x="657" y="361" type="line"/>
<point x="657" y="244" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/gamepad-down.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/gamepad-down.glif",
"repo_id": "cascadia-code",
"token_count": 653
}
| 527 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="gate-alert" format="2">
<advance width="1200"/>
<unicode hex="F17F8"/>
<note>
gate-alert
</note>
<outline>
<contour>
<point x="1180" y="392" type="line"/>
<point x="1180" y="506" type="line"/>
<point x="1063" y="506" type="line"/>
<point x="1063" y="392" type="line"/>
</contour>
<contour>
<point x="1180" y="623" type="line"/>
<point x="1180" y="971" type="line"/>
<point x="1063" y="971" type="line"/>
<point x="1063" y="623" type="line"/>
</contour>
<contour>
<point x="832" y="740" type="line"/>
<point x="949" y="740" type="line"/>
<point x="949" y="854" type="line"/>
<point x="832" y="854" type="line"/>
<point x="832" y="1145" type="line"/>
<point x="715" y="1145" type="line"/>
<point x="715" y="854" type="line"/>
<point x="601" y="854" type="line"/>
<point x="601" y="1145" type="line"/>
<point x="485" y="1145" type="line"/>
<point x="485" y="854" type="line"/>
<point x="368" y="854" type="line"/>
<point x="368" y="1088" type="line"/>
<point x="254" y="1088" type="line"/>
<point x="254" y="854" type="line"/>
<point x="137" y="854" type="line"/>
<point x="137" y="971" type="line"/>
<point x="20" y="971" type="line"/>
<point x="20" y="275" type="line"/>
<point x="137" y="275" type="line"/>
<point x="137" y="392" type="line"/>
<point x="254" y="392" type="line"/>
<point x="254" y="275" type="line"/>
<point x="368" y="275" type="line"/>
<point x="368" y="392" type="line"/>
<point x="485" y="392" type="line"/>
<point x="485" y="275" type="line"/>
<point x="601" y="275" type="line"/>
<point x="601" y="392" type="line"/>
<point x="715" y="392" type="line"/>
<point x="715" y="275" type="line"/>
<point x="832" y="275" type="line"/>
<point x="832" y="392" type="line"/>
<point x="949" y="392" type="line"/>
<point x="949" y="506" type="line"/>
<point x="832" y="506" type="line"/>
</contour>
<contour>
<point x="254" y="740" type="line"/>
<point x="254" y="506" type="line"/>
<point x="137" y="506" type="line"/>
<point x="137" y="740" type="line"/>
</contour>
<contour>
<point x="485" y="740" type="line"/>
<point x="485" y="506" type="line"/>
<point x="368" y="506" type="line"/>
<point x="368" y="740" type="line"/>
</contour>
<contour>
<point x="715" y="740" type="line"/>
<point x="715" y="506" type="line"/>
<point x="601" y="506" type="line"/>
<point x="601" y="740" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/gate-alert.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/gate-alert.glif",
"repo_id": "cascadia-code",
"token_count": 1335
}
| 528 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="glass-stange" format="2">
<advance width="1200"/>
<unicode hex="F02A7"/>
<note>
glass-stange
</note>
<outline>
<contour>
<point x="317" y="0" type="line"/>
<point x="883" y="0" type="line"/>
<point x="883" y="1420" type="line"/>
<point x="317" y="1420" type="line"/>
</contour>
<contour>
<point x="743" y="1280" type="line"/>
<point x="743" y="1067" type="line"/>
<point x="457" y="1067" type="line"/>
<point x="457" y="1280" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/glass-stange.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/glass-stange.glif",
"repo_id": "cascadia-code",
"token_count": 283
}
| 529 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="google-earth" format="2">
<advance width="1200"/>
<unicode hex="F02B7"/>
<note>
google-earth
</note>
<outline>
<contour>
<point x="674" y="587"/>
<point x="859" y="489"/>
<point x="995" y="522"/>
<point x="1036" y="577" type="qcurve" smooth="yes"/>
<point x="1052" y="596"/>
<point x="1044" y="571" type="qcurve" smooth="yes"/>
<point x="1033" y="538"/>
<point x="1003" y="479" type="qcurve"/>
<point x="905" y="329"/>
<point x="712" y="356" type="qcurve"/>
<point x="562" y="389"/>
<point x="450" y="538" type="qcurve" smooth="yes"/>
<point x="385" y="623"/>
<point x="257" y="675"/>
<point x="178" y="626"/>
<point x="167" y="571" type="qcurve" smooth="yes"/>
<point x="164" y="560"/>
<point x="159" y="560"/>
<point x="156" y="571" type="qcurve" smooth="yes"/>
<point x="134" y="639"/>
<point x="134" y="710" type="qcurve" smooth="yes"/>
<point x="134" y="721" type="line" smooth="yes"/>
<point x="134" y="789"/>
<point x="172" y="838" type="qcurve" smooth="yes"/>
<point x="216" y="895"/>
<point x="347" y="925"/>
<point x="521" y="822"/>
<point x="600" y="702" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="938" y="860"/>
<point x="1003" y="827"/>
<point x="1047" y="838" type="qcurve"/>
<point x="1066" y="781"/>
<point x="1066" y="724" type="qcurve"/>
<point x="1038" y="699"/>
<point x="973" y="677"/>
<point x="886" y="694"/>
<point x="791" y="775"/>
<point x="747" y="849" type="qcurve" smooth="yes"/>
<point x="546" y="1178"/>
<point x="328" y="1072" type="qcurve" smooth="yes"/>
<point x="306" y="1061"/>
<point x="303" y="1064"/>
<point x="322" y="1078" type="qcurve" smooth="yes"/>
<point x="469" y="1184"/>
<point x="595" y="1165" type="qcurve" smooth="yes"/>
<point x="755" y="1135"/>
<point x="889" y="933" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="363" y="1290"/>
<point x="20" y="947"/>
<point x="20" y="473"/>
<point x="363" y="130"/>
<point x="837" y="130"/>
<point x="1180" y="473"/>
<point x="1180" y="947"/>
<point x="837" y="1290"/>
<point x="600" y="1290" type="qcurve" smooth="yes"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/google-earth.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/google-earth.glif",
"repo_id": "cascadia-code",
"token_count": 1239
}
| 530 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="home-circle" format="2">
<advance width="1200"/>
<unicode hex="F07D2"/>
<note>
home-circle
</note>
<outline>
<contour>
<point x="1093" y="1038"/>
<point x="1009" y="1119" type="qcurve" smooth="yes"/>
<point x="928" y="1203"/>
<point x="717" y="1290"/>
<point x="483" y="1290"/>
<point x="272" y="1203"/>
<point x="109" y="1038"/>
<point x="20" y="827"/>
<point x="20" y="593"/>
<point x="107" y="382"/>
<point x="272" y="219"/>
<point x="483" y="130"/>
<point x="717" y="130"/>
<point x="928" y="217"/>
<point x="1091" y="382"/>
<point x="1180" y="593"/>
<point x="1180" y="827"/>
</contour>
<contour>
<point x="1034" y="710" type="line"/>
<point x="890" y="710" type="line"/>
<point x="890" y="363" type="line"/>
<point x="687" y="363" type="line"/>
<point x="687" y="653" type="line"/>
<point x="513" y="653" type="line"/>
<point x="513" y="363" type="line"/>
<point x="310" y="363" type="line"/>
<point x="310" y="710" type="line"/>
<point x="196" y="710" type="line"/>
<point x="600" y="1114" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-circle.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-circle.glif",
"repo_id": "cascadia-code",
"token_count": 637
}
| 531 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="home-floor-l" format="2">
<advance width="1200"/>
<unicode hex="F0D86"/>
<note>
home-floor-l
</note>
<outline>
<contour>
<point x="1180" y="681" type="line"/>
<point x="600" y="1204" type="line"/>
<point x="20" y="681" type="line"/>
<point x="194" y="681" type="line"/>
<point x="194" y="216" type="line"/>
<point x="1006" y="216" type="line"/>
<point x="1006" y="681" type="line"/>
</contour>
<contour>
<point x="426" y="333" type="line"/>
<point x="426" y="913" type="line"/>
<point x="543" y="913" type="line"/>
<point x="543" y="450" type="line"/>
<point x="774" y="450" type="line"/>
<point x="774" y="333" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-floor-l.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-floor-l.glif",
"repo_id": "cascadia-code",
"token_count": 385
}
| 532 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="home-variant-outline" format="2">
<advance width="1200"/>
<unicode hex="F0BA7"/>
<note>
home-variant-outline
</note>
<outline>
<contour>
<point x="383" y="205" type="line"/>
<point x="166" y="205" type="line"/>
<point x="166" y="856" type="line"/>
<point x="600" y="1181" type="line"/>
<point x="1034" y="856" type="line"/>
<point x="1034" y="205" type="line"/>
<point x="817" y="205" type="line"/>
<point x="817" y="639" type="line"/>
<point x="383" y="639" type="line"/>
</contour>
<contour>
<point x="1180" y="59" type="line"/>
<point x="1180" y="927" type="line"/>
<point x="600" y="1361" type="line"/>
<point x="20" y="927" type="line"/>
<point x="20" y="59" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-variant-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/home-variant-outline.glif",
"repo_id": "cascadia-code",
"token_count": 415
}
| 533 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="hospital-building" format="2">
<advance width="1200"/>
<unicode hex="F02E1"/>
<note>
hospital-building
</note>
<outline>
<contour>
<point x="483" y="130" type="line"/>
<point x="483" y="419" type="line"/>
<point x="717" y="419" type="line"/>
<point x="717" y="130" type="line"/>
<point x="1180" y="130" type="line"/>
<point x="1180" y="1001" type="line" smooth="yes"/>
<point x="1180" y="1026"/>
<point x="1147" y="1059"/>
<point x="1123" y="1059" type="qcurve" smooth="yes"/>
<point x="891" y="1059" type="line"/>
<point x="891" y="1290" type="line"/>
<point x="309" y="1290" type="line"/>
<point x="309" y="1059" type="line"/>
<point x="77" y="1059" type="line" smooth="yes"/>
<point x="53" y="1059"/>
<point x="20" y="1026"/>
<point x="20" y="1001" type="qcurve" smooth="yes"/>
<point x="20" y="130" type="line"/>
</contour>
<contour>
<point x="543" y="1176" type="line"/>
<point x="543" y="1059" type="line"/>
<point x="657" y="1059" type="line"/>
<point x="657" y="1176" type="line"/>
<point x="774" y="1176" type="line"/>
<point x="774" y="827" type="line"/>
<point x="657" y="827" type="line"/>
<point x="657" y="941" type="line"/>
<point x="543" y="941" type="line"/>
<point x="543" y="827" type="line"/>
<point x="426" y="827" type="line"/>
<point x="426" y="1176" type="line"/>
</contour>
<contour>
<point x="134" y="419" type="line"/>
<point x="369" y="419" type="line"/>
<point x="369" y="244" type="line"/>
<point x="134" y="244" type="line"/>
</contour>
<contour>
<point x="134" y="710" type="line"/>
<point x="369" y="710" type="line"/>
<point x="369" y="536" type="line"/>
<point x="134" y="536" type="line"/>
</contour>
<contour>
<point x="831" y="419" type="line"/>
<point x="1066" y="419" type="line"/>
<point x="1066" y="244" type="line"/>
<point x="831" y="244" type="line"/>
</contour>
<contour>
<point x="831" y="710" type="line"/>
<point x="1066" y="710" type="line"/>
<point x="1066" y="536" type="line"/>
<point x="831" y="536" type="line"/>
</contour>
<contour>
<point x="483" y="710" type="line"/>
<point x="717" y="710" type="line"/>
<point x="717" y="536" type="line"/>
<point x="483" y="536" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/hospital-building.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/hospital-building.glif",
"repo_id": "cascadia-code",
"token_count": 1242
}
| 534 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="invert-colors" format="2">
<advance width="1200"/>
<unicode hex="F0301"/>
<note>
invert-colors
</note>
<outline>
<contour>
<point x="600" y="1210" type="line"/>
<point x="600" y="145" type="line"/>
<point x="600" y="145" type="line"/>
<point x="510" y="145"/>
<point x="352" y="214"/>
<point x="290" y="276" type="qcurve" smooth="yes"/>
<point x="159" y="403"/>
<point x="159" y="586" type="qcurve" smooth="yes"/>
<point x="159" y="676"/>
<point x="224" y="838"/>
<point x="290" y="900" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="1127" y="889"/>
<point x="1017" y="1003" type="qcurve" smooth="yes"/>
<point x="600" y="1420" type="line"/>
<point x="183" y="1003" type="line" smooth="yes"/>
<point x="73" y="889"/>
<point x="-7" y="586"/>
<point x="73" y="283"/>
<point x="183" y="172" type="qcurve" smooth="yes"/>
<point x="269" y="86"/>
<point x="486" y="0"/>
<point x="714" y="0"/>
<point x="931" y="86"/>
<point x="1017" y="172" type="qcurve" smooth="yes"/>
<point x="1127" y="283"/>
<point x="1207" y="586"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/invert-colors.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/invert-colors.glif",
"repo_id": "cascadia-code",
"token_count": 628
}
| 535 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="language-kotlin" format="2">
<advance width="1200"/>
<unicode hex="F1219"/>
<note>
language-kotlin
</note>
<outline>
<contour>
<point x="20" y="130" type="line"/>
<point x="1180" y="130" type="line"/>
<point x="600" y="710" type="line"/>
<point x="1180" y="1290" type="line"/>
<point x="20" y="1290" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/language-kotlin.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/language-kotlin.glif",
"repo_id": "cascadia-code",
"token_count": 204
}
| 536 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="laravel" format="2">
<advance width="1200"/>
<unicode hex="F0AD0"/>
<note>
laravel
</note>
<outline>
<contour>
<point x="1177" y="1043" type="line"/>
<point x="1174" y="1046" type="line"/>
<point x="947" y="1178" type="line" smooth="yes"/>
<point x="936" y="1184"/>
<point x="928" y="1178" type="qcurve" smooth="yes"/>
<point x="698" y="1043" type="line"/>
<point x="695" y="1038" type="line"/>
<point x="692" y="786" type="line"/>
<point x="508" y="679" type="line"/>
<point x="505" y="1170" type="line"/>
<point x="499" y="1175" type="line"/>
<point x="272" y="1304" type="line" smooth="yes"/>
<point x="264" y="1310"/>
<point x="253" y="1304" type="qcurve" smooth="yes"/>
<point x="23" y="1170" type="line"/>
<point x="20" y="1164" type="line"/>
<point x="20" y="391" type="line" smooth="yes"/>
<point x="20" y="379"/>
<point x="28" y="374" type="qcurve" smooth="yes"/>
<point x="477" y="116" type="line"/>
<point x="480" y="113" type="line"/>
<point x="494" y="113" type="line"/>
<point x="947" y="374" type="line" smooth="yes"/>
<point x="956" y="379"/>
<point x="956" y="391" type="qcurve" smooth="yes"/>
<point x="956" y="634" type="line"/>
<point x="1172" y="760" type="line"/>
<point x="1180" y="763"/>
<point x="1180" y="774" type="qcurve" smooth="yes"/>
<point x="1180" y="1032" type="line" smooth="yes"/>
<point x="1180" y="1035"/>
<point x="1180" y="1038" type="qcurve"/>
</contour>
<contour>
<point x="956" y="679" type="line"/>
<point x="956" y="892" type="line"/>
<point x="1144" y="999" type="line"/>
<point x="1144" y="786" type="line"/>
</contour>
<contour>
<point x="508" y="164" type="line"/>
<point x="508" y="379" type="line"/>
<point x="919" y="615" type="line"/>
<point x="919" y="399" type="line"/>
</contour>
<contour>
<point x="244" y="1021" type="line"/>
<point x="247" y="505" type="line"/>
<point x="253" y="503" type="line"/>
<point x="253" y="500"/>
<point x="253" y="500" type="qcurve"/>
<point x="468" y="379" type="line"/>
<point x="468" y="164" type="line"/>
<point x="56" y="399" type="line"/>
<point x="56" y="1127" type="line"/>
</contour>
<contour>
<point x="451" y="1158" type="line"/>
<point x="264" y="1052" type="line"/>
<point x="76" y="1158" type="line"/>
<point x="264" y="1268" type="line"/>
</contour>
<contour>
<point x="281" y="550" type="line"/>
<point x="281" y="1021" type="line"/>
<point x="468" y="1127" type="line"/>
<point x="468" y="657" type="line"/>
</contour>
<contour>
<point x="1124" y="1032" type="line"/>
<point x="936" y="926" type="line"/>
<point x="749" y="1032" type="line"/>
<point x="936" y="1141" type="line"/>
</contour>
<contour>
<point x="919" y="679" type="line"/>
<point x="732" y="786" type="line"/>
<point x="732" y="999" type="line"/>
<point x="919" y="892" type="line"/>
</contour>
<contour>
<point x="300" y="517" type="line"/>
<point x="712" y="755" type="line"/>
<point x="900" y="646" type="line"/>
<point x="488" y="410" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/laravel.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/laravel.glif",
"repo_id": "cascadia-code",
"token_count": 1687
}
| 537 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="layers-triple-outline" format="2">
<advance width="1200"/>
<unicode hex="F0F59"/>
<note>
layers-triple-outline
</note>
<outline>
<contour>
<point x="165" y="779" type="line"/>
<point x="68" y="707" type="line"/>
<point x="600" y="291" type="line"/>
<point x="1133" y="707" type="line"/>
<point x="1035" y="782" type="line"/>
<point x="600" y="441" type="line"/>
</contour>
<contour>
<point x="1133" y="1007" type="line"/>
<point x="600" y="1420" type="line"/>
<point x="68" y="1007" type="line"/>
<point x="600" y="591" type="line"/>
</contour>
<contour>
<point x="938" y="1007" type="line"/>
<point x="600" y="741" type="line"/>
<point x="262" y="1007" type="line"/>
<point x="600" y="1270" type="line"/>
</contour>
<contour>
<point x="165" y="488" type="line"/>
<point x="68" y="413" type="line"/>
<point x="600" y="0" type="line"/>
<point x="1133" y="413" type="line"/>
<point x="1035" y="488" type="line"/>
<point x="600" y="150" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/layers-triple-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/layers-triple-outline.glif",
"repo_id": "cascadia-code",
"token_count": 564
}
| 538 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="led-strip-variant-off" format="2">
<advance width="1200"/>
<unicode hex="F1A4B"/>
<note>
led-strip-variant-off
</note>
<outline>
<contour>
<point x="1110" y="130" type="line"/>
<point x="1180" y="200" type="line"/>
<point x="90" y="1290" type="line"/>
<point x="20" y="1220" type="line"/>
<point x="100" y="1137" type="line"/>
<point x="69" y="1005" type="line"/>
<point x="292" y="948" type="line"/>
<point x="460" y="780" type="line"/>
<point x="302" y="718" type="line"/>
<point x="595" y="643" type="line"/>
<point x="673" y="568" type="line"/>
<point x="214" y="682" type="line"/>
<point x="162" y="467" type="line"/>
<point x="978" y="262" type="line"/>
</contour>
<contour>
<point x="416" y="575" type="line"/>
<point x="388" y="467" type="line"/>
<point x="282" y="493" type="line"/>
<point x="307" y="601" type="line"/>
</contour>
<contour>
<point x="628" y="521" type="line"/>
<point x="603" y="412" type="line"/>
<point x="496" y="441" type="line"/>
<point x="522" y="547" type="line"/>
</contour>
<contour>
<point x="828" y="412" type="line"/>
<point x="815" y="360" type="line"/>
<point x="709" y="386" type="line"/>
<point x="735" y="495" type="line"/>
<point x="748" y="490" type="line"/>
</contour>
<contour>
<point x="1154" y="366" type="line"/>
<point x="1172" y="441" type="line"/>
<point x="1048" y="472" type="line"/>
</contour>
<contour>
<point x="939" y="728" type="line"/>
<point x="742" y="777" type="line"/>
<point x="833" y="687" type="line"/>
</contour>
<contour>
<point x="437" y="1085" type="line"/>
<point x="533" y="1060" type="line"/>
<point x="520" y="1003" type="line"/>
<point x="665" y="855" type="line"/>
<point x="1027" y="764" type="line"/>
<point x="1079" y="979" type="line"/>
<point x="362" y="1161" type="line"/>
</contour>
<contour>
<point x="825" y="871" type="line"/>
<point x="854" y="979" type="line"/>
<point x="960" y="953" type="line"/>
<point x="934" y="845" type="line"/>
</contour>
<contour>
<point x="613" y="925" type="line"/>
<point x="639" y="1034" type="line"/>
<point x="745" y="1005" type="line"/>
<point x="719" y="899" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/led-strip-variant-off.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/led-strip-variant-off.glif",
"repo_id": "cascadia-code",
"token_count": 1228
}
| 539 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="lightning-bolt" format="2">
<advance width="1200"/>
<unicode hex="F140B"/>
<note>
lightning-bolt
</note>
<outline>
<contour>
<point x="537" y="0" type="line"/>
<point x="987" y="903" type="line"/>
<point x="663" y="903" type="line"/>
<point x="663" y="1420" type="line"/>
<point x="213" y="517" type="line"/>
<point x="537" y="517" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/lightning-bolt.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/lightning-bolt.glif",
"repo_id": "cascadia-code",
"token_count": 221
}
| 540 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="menu-left" format="2">
<advance width="1200"/>
<unicode hex="F035E"/>
<note>
menu-left
</note>
<outline>
<contour>
<point x="955" y="0" type="line"/>
<point x="955" y="1420" type="line"/>
<point x="245" y="710" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/menu-left.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/menu-left.glif",
"repo_id": "cascadia-code",
"token_count": 159
}
| 541 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="message-lock" format="2">
<advance width="1200"/>
<unicode hex="F0FCC"/>
<note>
message-lock
</note>
<outline>
<contour>
<point x="1049" y="1290"/>
<point x="994" y="1290" type="qcurve" smooth="yes"/>
<point x="940" y="1290"/>
<point x="863" y="1213"/>
<point x="863" y="1159" type="qcurve" smooth="yes"/>
<point x="863" y="1132" type="line"/>
<point x="841" y="1132"/>
<point x="811" y="1102"/>
<point x="811" y="1080" type="qcurve" smooth="yes"/>
<point x="811" y="867" type="line" smooth="yes"/>
<point x="811" y="845"/>
<point x="841" y="815"/>
<point x="863" y="815" type="qcurve" smooth="yes"/>
<point x="1128" y="815" type="line" smooth="yes"/>
<point x="1150" y="815"/>
<point x="1180" y="845"/>
<point x="1180" y="867" type="qcurve" smooth="yes"/>
<point x="1180" y="1080" type="line" smooth="yes"/>
<point x="1180" y="1102"/>
<point x="1150" y="1132"/>
<point x="1128" y="1132" type="qcurve"/>
<point x="1128" y="1159" type="line" smooth="yes"/>
<point x="1128" y="1213"/>
</contour>
<contour>
<point x="962" y="1238"/>
<point x="994" y="1238" type="qcurve" smooth="yes"/>
<point x="1029" y="1238"/>
<point x="1074" y="1191"/>
<point x="1074" y="1159" type="qcurve" smooth="yes"/>
<point x="1074" y="1132" type="line"/>
<point x="915" y="1132" type="line"/>
<point x="915" y="1159" type="line" smooth="yes"/>
<point x="915" y="1191"/>
</contour>
<contour>
<point x="705" y="1184" type="line"/>
<point x="124" y="1184" type="line" smooth="yes"/>
<point x="82" y="1184"/>
<point x="20" y="1122"/>
<point x="20" y="1080" type="qcurve" smooth="yes"/>
<point x="20" y="130" type="line"/>
<point x="230" y="340" type="line"/>
<point x="970" y="340" type="line" smooth="yes"/>
<point x="1012" y="340"/>
<point x="1074" y="402"/>
<point x="1074" y="447" type="qcurve" smooth="yes"/>
<point x="1074" y="709" type="line"/>
<point x="811" y="709" type="line" smooth="yes"/>
<point x="767" y="709"/>
<point x="705" y="771"/>
<point x="705" y="815" type="qcurve" smooth="yes"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/message-lock.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/message-lock.glif",
"repo_id": "cascadia-code",
"token_count": 1156
}
| 542 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="molecule-co2" format="2">
<advance width="1200"/>
<unicode hex="F07E4"/>
<note>
molecule-co2
</note>
<outline>
<contour>
<point x="353" y="1075" type="line"/>
<point x="154" y="1075" type="line" smooth="yes"/>
<point x="98" y="1075"/>
<point x="20" y="998"/>
<point x="20" y="942" type="qcurve" smooth="yes"/>
<point x="20" y="544" type="line" smooth="yes"/>
<point x="20" y="488"/>
<point x="98" y="410"/>
<point x="154" y="410" type="qcurve" smooth="yes"/>
<point x="353" y="410" type="line"/>
<point x="353" y="544" type="line"/>
<point x="154" y="544" type="line"/>
<point x="154" y="942" type="line"/>
<point x="353" y="942" type="line"/>
</contour>
<contour>
<point x="682" y="1075" type="qcurve" smooth="yes"/>
<point x="552" y="1075" type="line" smooth="yes"/>
<point x="496" y="1075"/>
<point x="418" y="998"/>
<point x="418" y="942" type="qcurve" smooth="yes"/>
<point x="418" y="544" type="line" smooth="yes"/>
<point x="418" y="488"/>
<point x="496" y="410"/>
<point x="552" y="410" type="qcurve" smooth="yes"/>
<point x="682" y="410" type="line" smooth="yes"/>
<point x="738" y="410"/>
<point x="816" y="488"/>
<point x="816" y="544" type="qcurve" smooth="yes"/>
<point x="816" y="942" type="line" smooth="yes"/>
<point x="816" y="998"/>
<point x="738" y="1075"/>
</contour>
<contour>
<point x="552" y="544" type="line"/>
<point x="552" y="942" type="line"/>
<point x="682" y="942" type="line"/>
<point x="682" y="544" type="line"/>
</contour>
<contour>
<point x="1080" y="842" type="qcurve" smooth="yes"/>
<point x="881" y="842" type="line"/>
<point x="881" y="743" type="line"/>
<point x="1080" y="743" type="line"/>
<point x="1080" y="643" type="line"/>
<point x="981" y="643" type="line" smooth="yes"/>
<point x="941" y="643"/>
<point x="881" y="584"/>
<point x="881" y="544" type="qcurve" smooth="yes"/>
<point x="881" y="345" type="line"/>
<point x="1180" y="345" type="line"/>
<point x="1180" y="444" type="line"/>
<point x="981" y="444" type="line"/>
<point x="981" y="544" type="line"/>
<point x="1080" y="544" type="line" smooth="yes"/>
<point x="1124" y="544"/>
<point x="1180" y="603"/>
<point x="1180" y="643" type="qcurve" smooth="yes"/>
<point x="1180" y="743" type="line" smooth="yes"/>
<point x="1180" y="783"/>
<point x="1124" y="842"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/molecule-co2.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/molecule-co2.glif",
"repo_id": "cascadia-code",
"token_count": 1312
}
| 543 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="monitor-small" format="2">
<advance width="1200"/>
<unicode hex="F1876"/>
<note>
monitor-small
</note>
<outline>
<contour>
<point x="1104" y="1353"/>
<point x="1050" y="1353" type="qcurve" smooth="yes"/>
<point x="150" y="1353" type="line" smooth="yes"/>
<point x="96" y="1353"/>
<point x="20" y="1278"/>
<point x="20" y="1227" type="qcurve" smooth="yes"/>
<point x="20" y="453" type="line" smooth="yes"/>
<point x="20" y="399"/>
<point x="96" y="323"/>
<point x="150" y="323" type="qcurve" smooth="yes"/>
<point x="470" y="323" type="line"/>
<point x="470" y="193" type="line"/>
<point x="343" y="193" type="line"/>
<point x="343" y="67" type="line"/>
<point x="857" y="67" type="line"/>
<point x="857" y="193" type="line"/>
<point x="730" y="193" type="line"/>
<point x="730" y="323" type="line"/>
<point x="1050" y="323" type="line" smooth="yes"/>
<point x="1104" y="323"/>
<point x="1180" y="399"/>
<point x="1180" y="453" type="qcurve" smooth="yes"/>
<point x="1180" y="1227" type="line" smooth="yes"/>
<point x="1180" y="1278"/>
</contour>
<contour>
<point x="1050" y="1227" type="line"/>
<point x="1050" y="453" type="line"/>
<point x="150" y="453" type="line"/>
<point x="150" y="1227" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/monitor-small.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/monitor-small.glif",
"repo_id": "cascadia-code",
"token_count": 702
}
| 544 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="nature" format="2">
<advance width="1200"/>
<unicode hex="F038E"/>
<note>
nature
</note>
<outline>
<contour>
<point x="664" y="141" type="line"/>
<point x="664" y="420" type="line"/>
<point x="788" y="433"/>
<point x="989" y="574"/>
<point x="1107" y="792"/>
<point x="1107" y="916" type="qcurve" smooth="yes"/>
<point x="1107" y="1054"/>
<point x="973" y="1286"/>
<point x="741" y="1420"/>
<point x="469" y="1420"/>
<point x="237" y="1286"/>
<point x="103" y="1054"/>
<point x="103" y="916" type="qcurve" smooth="yes"/>
<point x="103" y="732"/>
<point x="342" y="453"/>
<point x="523" y="423" type="qcurve"/>
<point x="523" y="141" type="line"/>
<point x="93" y="141" type="line"/>
<point x="93" y="0" type="line"/>
<point x="1093" y="0" type="line"/>
<point x="1093" y="141" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/nature.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/nature.glif",
"repo_id": "cascadia-code",
"token_count": 500
}
| 545 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="newspaper-variant-multiple" format="2">
<advance width="1200"/>
<unicode hex="F1002"/>
<note>
newspaper-variant-multiple
</note>
<outline>
<contour>
<point x="20" y="987" type="line"/>
<point x="20" y="324" type="line" smooth="yes"/>
<point x="20" y="296"/>
<point x="33" y="267" type="qcurve" smooth="yes"/>
<point x="61" y="213"/>
<point x="129" y="213" type="qcurve" smooth="yes"/>
<point x="957" y="213" type="line"/>
<point x="957" y="324" type="line"/>
<point x="129" y="324" type="line"/>
<point x="129" y="987" type="line"/>
</contour>
<contour>
<point x="1123" y="1207"/>
<point x="1084" y="1207" type="qcurve" smooth="yes"/>
<point x="333" y="1207" type="line" smooth="yes"/>
<point x="294" y="1207"/>
<point x="240" y="1158"/>
<point x="240" y="1122" type="qcurve" smooth="yes"/>
<point x="240" y="518" type="line" smooth="yes"/>
<point x="240" y="485"/>
<point x="294" y="433"/>
<point x="333" y="433" type="qcurve" smooth="yes"/>
<point x="1084" y="433" type="line" smooth="yes"/>
<point x="1123" y="433"/>
<point x="1180" y="485"/>
<point x="1180" y="518" type="qcurve" smooth="yes"/>
<point x="1180" y="1122" type="line" smooth="yes"/>
<point x="1180" y="1158"/>
</contour>
<contour>
<point x="351" y="764" type="line"/>
<point x="351" y="1096" type="line"/>
<point x="626" y="1096" type="line"/>
<point x="626" y="764" type="line"/>
</contour>
<contour>
<point x="1069" y="656" type="line"/>
<point x="1069" y="544" type="line"/>
<point x="351" y="544" type="line"/>
<point x="351" y="656" type="line"/>
</contour>
<contour>
<point x="1069" y="876" type="line"/>
<point x="1069" y="764" type="line"/>
<point x="737" y="764" type="line"/>
<point x="737" y="876" type="line"/>
</contour>
<contour>
<point x="1069" y="1096" type="line"/>
<point x="1069" y="987" type="line"/>
<point x="737" y="987" type="line"/>
<point x="737" y="1096" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/newspaper-variant-multiple.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/newspaper-variant-multiple.glif",
"repo_id": "cascadia-code",
"token_count": 1080
}
| 546 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="page-first" format="2">
<advance width="1200"/>
<unicode hex="F0600"/>
<note>
page-first
</note>
<outline>
<contour>
<point x="1049" y="150" type="line"/>
<point x="1180" y="281" type="line"/>
<point x="751" y="710" type="line"/>
<point x="1180" y="1139" type="line"/>
<point x="1049" y="1270" type="line"/>
<point x="488" y="710" type="line"/>
</contour>
<contour>
<point x="20" y="150" type="line"/>
<point x="208" y="150" type="line"/>
<point x="208" y="1270" type="line"/>
<point x="20" y="1270" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/page-first.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/page-first.glif",
"repo_id": "cascadia-code",
"token_count": 320
}
| 547 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="pan-horizontal" format="2">
<advance width="1200"/>
<unicode hex="F0BB8"/>
<note>
pan-horizontal
</note>
<outline>
<contour>
<point x="294" y="467" type="line"/>
<point x="294" y="953" type="line"/>
<point x="20" y="710" type="line"/>
</contour>
<contour>
<point x="1180" y="710" type="line"/>
<point x="906" y="953" type="line"/>
<point x="906" y="467" type="line"/>
</contour>
<contour>
<point x="651" y="833"/>
<point x="600" y="833" type="qcurve" smooth="yes"/>
<point x="549" y="833"/>
<point x="477" y="761"/>
<point x="477" y="659"/>
<point x="549" y="587"/>
<point x="651" y="587"/>
<point x="723" y="659"/>
<point x="723" y="761"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pan-horizontal.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pan-horizontal.glif",
"repo_id": "cascadia-code",
"token_count": 412
}
| 548 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="pencil-ruler" format="2">
<advance width="1200"/>
<unicode hex="F1353"/>
<note>
pencil-ruler
</note>
<outline>
<contour>
<point x="713" y="1028" type="line"/>
<point x="107" y="423" type="line"/>
<point x="107" y="217" type="line"/>
<point x="313" y="217" type="line"/>
<point x="918" y="823" type="line"/>
</contour>
<contour>
<point x="1085" y="458" type="line"/>
<point x="1005" y="382" type="line"/>
<point x="949" y="435" type="line"/>
<point x="1029" y="515" type="line"/>
<point x="952" y="592" type="line"/>
<point x="816" y="456" type="line"/>
<point x="762" y="510" type="line"/>
<point x="664" y="415" type="line"/>
<point x="949" y="130" type="line"/>
<point x="1180" y="361" type="line"/>
</contour>
<contour>
<point x="461" y="926" type="line"/>
<point x="405" y="982" type="line"/>
<point x="485" y="1059" type="line"/>
<point x="405" y="1136" type="line"/>
<point x="272" y="1000" type="line"/>
<point x="212" y="1059" type="line"/>
<point x="348" y="1195" type="line"/>
<point x="251" y="1290" type="line"/>
<point x="20" y="1059" type="line"/>
<point x="305" y="774" type="line"/>
</contour>
<contour>
<point x="977" y="882" type="line"/>
<point x="1077" y="985" type="line" smooth="yes"/>
<point x="1093" y="1000"/>
<point x="1093" y="1044"/>
<point x="1077" y="1062" type="qcurve" smooth="yes"/>
<point x="949" y="1187" type="line" smooth="yes"/>
<point x="934" y="1203"/>
<point x="887" y="1203"/>
<point x="872" y="1187" type="qcurve" smooth="yes"/>
<point x="772" y="1087" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pencil-ruler.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pencil-ruler.glif",
"repo_id": "cascadia-code",
"token_count": 883
}
| 549 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="pine-tree-box" format="2">
<advance width="1200"/>
<unicode hex="F0406"/>
<note>
pine-tree-box
</note>
<outline>
<contour>
<point x="88" y="1290"/>
<point x="20" y="1222"/>
<point x="20" y="1176" type="qcurve" smooth="yes"/>
<point x="20" y="244" type="line" smooth="yes"/>
<point x="20" y="198"/>
<point x="88" y="130"/>
<point x="134" y="130" type="qcurve" smooth="yes"/>
<point x="1066" y="130" type="line" smooth="yes"/>
<point x="1112" y="130"/>
<point x="1180" y="198"/>
<point x="1180" y="244" type="qcurve" smooth="yes"/>
<point x="1180" y="1176" type="line" smooth="yes"/>
<point x="1180" y="1222"/>
<point x="1112" y="1290"/>
<point x="1066" y="1290" type="qcurve" smooth="yes"/>
<point x="134" y="1290" type="line" smooth="yes"/>
</contour>
<contour>
<point x="543" y="419" type="line"/>
<point x="251" y="419" type="line"/>
<point x="483" y="653" type="line"/>
<point x="309" y="653" type="line"/>
<point x="543" y="884" type="line"/>
<point x="369" y="884" type="line"/>
<point x="600" y="1116" type="line"/>
<point x="831" y="884" type="line"/>
<point x="657" y="884" type="line"/>
<point x="891" y="653" type="line"/>
<point x="717" y="653" type="line"/>
<point x="949" y="419" type="line"/>
<point x="657" y="419" type="line"/>
<point x="657" y="304" type="line"/>
<point x="543" y="304" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pine-tree-box.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/pine-tree-box.glif",
"repo_id": "cascadia-code",
"token_count": 767
}
| 550 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="play" format="2">
<advance width="1200"/>
<unicode hex="F040A"/>
<note>
play
</note>
<outline>
<contour>
<point x="1158" y="710" type="line"/>
<point x="42" y="1420" type="line"/>
<point x="42" y="0" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/play.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/play.glif",
"repo_id": "cascadia-code",
"token_count": 154
}
| 551 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="podium-gold" format="2">
<advance width="1200"/>
<unicode hex="F0D27"/>
<note>
podium-gold
</note>
<outline>
<contour>
<point x="469" y="890" type="line"/>
<point x="600" y="969" type="line"/>
<point x="728" y="890" type="line"/>
<point x="694" y="1038" type="line"/>
<point x="810" y="1137" type="line"/>
<point x="659" y="1149" type="line"/>
<point x="600" y="1290" type="line"/>
<point x="541" y="1149" type="line"/>
<point x="390" y="1137" type="line"/>
<point x="504" y="1038" type="line"/>
</contour>
<contour>
<point x="758" y="816" type="line"/>
<point x="442" y="816" type="line"/>
<point x="442" y="130" type="line"/>
<point x="758" y="130" type="line"/>
</contour>
<contour>
<point x="336" y="446" type="line"/>
<point x="20" y="446" type="line"/>
<point x="20" y="130" type="line"/>
<point x="336" y="130" type="line"/>
</contour>
<contour>
<point x="232" y="342" type="line"/>
<point x="232" y="236" type="line"/>
<point x="126" y="236" type="line"/>
<point x="126" y="342" type="line"/>
</contour>
<contour>
<point x="1180" y="658" type="line"/>
<point x="864" y="658" type="line"/>
<point x="864" y="130" type="line"/>
<point x="1180" y="130" type="line"/>
</contour>
<contour>
<point x="1074" y="552" type="line"/>
<point x="1074" y="236" type="line"/>
<point x="968" y="236" type="line"/>
<point x="968" y="552" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/podium-gold.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/podium-gold.glif",
"repo_id": "cascadia-code",
"token_count": 795
}
| 552 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="power-on" format="2">
<advance width="1200"/>
<unicode hex="F0903"/>
<note>
power-on
</note>
<outline>
<contour>
<point x="522" y="0" type="line"/>
<point x="678" y="0" type="line"/>
<point x="678" y="1420" type="line"/>
<point x="522" y="1420" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/power-on.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/power-on.glif",
"repo_id": "cascadia-code",
"token_count": 178
}
| 553 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="power-socket" format="2">
<advance width="1200"/>
<unicode hex="F0427"/>
<note>
power-socket
</note>
<outline>
<contour>
<point x="793" y="773" type="line"/>
<point x="793" y="517" type="line"/>
<point x="923" y="517" type="line"/>
<point x="923" y="773" type="line"/>
</contour>
<contour>
<point x="277" y="773" type="line"/>
<point x="277" y="517" type="line"/>
<point x="407" y="517" type="line"/>
<point x="407" y="773" type="line"/>
</contour>
<contour>
<point x="537" y="903" type="line"/>
<point x="537" y="647" type="line"/>
<point x="663" y="647" type="line"/>
<point x="663" y="903" type="line"/>
</contour>
<contour>
<point x="150" y="789" type="line"/>
<point x="395" y="1033" type="line"/>
<point x="805" y="1033" type="line"/>
<point x="1050" y="789" type="line"/>
<point x="1050" y="387" type="line"/>
<point x="150" y="387" type="line"/>
</contour>
<contour>
<point x="857" y="1160" type="line"/>
<point x="343" y="1160" type="line"/>
<point x="20" y="840" type="line"/>
<point x="20" y="260" type="line"/>
<point x="1180" y="260" type="line"/>
<point x="1180" y="840" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/power-socket.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/power-socket.glif",
"repo_id": "cascadia-code",
"token_count": 653
}
| 554 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="qrcode-minus" format="2">
<advance width="1200"/>
<unicode hex="F118C"/>
<note>
qrcode-minus
</note>
<outline>
<contour>
<point x="232" y="974" type="line"/>
<point x="336" y="974" type="line"/>
<point x="336" y="1078" type="line"/>
<point x="232" y="1078" type="line"/>
</contour>
<contour>
<point x="20" y="762" type="line"/>
<point x="548" y="762" type="line"/>
<point x="548" y="1290" type="line"/>
<point x="20" y="1290" type="line"/>
</contour>
<contour>
<point x="442" y="1184" type="line"/>
<point x="442" y="868" type="line"/>
<point x="126" y="868" type="line"/>
<point x="126" y="1184" type="line"/>
</contour>
<contour>
<point x="232" y="342" type="line"/>
<point x="336" y="342" type="line"/>
<point x="336" y="446" type="line"/>
<point x="232" y="446" type="line"/>
</contour>
<contour>
<point x="20" y="130" type="line"/>
<point x="548" y="130" type="line"/>
<point x="548" y="658" type="line"/>
<point x="20" y="658" type="line"/>
</contour>
<contour>
<point x="442" y="552" type="line"/>
<point x="442" y="236" type="line"/>
<point x="126" y="236" type="line"/>
<point x="126" y="552" type="line"/>
</contour>
<contour>
<point x="652" y="342" type="line"/>
<point x="758" y="342" type="line"/>
<point x="758" y="236" type="line"/>
<point x="652" y="236" type="line"/>
<point x="652" y="130" type="line"/>
<point x="758" y="130" type="line"/>
<point x="758" y="236" type="line"/>
<point x="968" y="236" type="line"/>
<point x="968" y="130" type="line"/>
<point x="1180" y="130" type="line"/>
<point x="1180" y="446" type="line"/>
<point x="968" y="446" type="line"/>
<point x="968" y="552" type="line"/>
<point x="1180" y="552" type="line"/>
<point x="1180" y="658" type="line"/>
<point x="968" y="658" type="line"/>
<point x="968" y="552" type="line"/>
<point x="864" y="552" type="line"/>
<point x="864" y="658" type="line"/>
<point x="652" y="658" type="line"/>
</contour>
<contour>
<point x="968" y="236" type="line"/>
<point x="1074" y="236" type="line"/>
<point x="1074" y="342" type="line"/>
<point x="968" y="342" type="line"/>
</contour>
<contour>
<point x="968" y="342" type="line"/>
<point x="968" y="446" type="line"/>
<point x="864" y="446" type="line"/>
<point x="864" y="552" type="line"/>
<point x="758" y="552" type="line"/>
<point x="758" y="342" type="line"/>
</contour>
<contour>
<point x="1126" y="1078" type="line"/>
<point x="706" y="1078" type="line"/>
<point x="706" y="974" type="line"/>
<point x="1126" y="974" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/qrcode-minus.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/qrcode-minus.glif",
"repo_id": "cascadia-code",
"token_count": 1426
}
| 555 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="record" format="2">
<advance width="1200"/>
<unicode hex="F044A"/>
<note>
record
</note>
<outline>
<contour>
<point x="1180" y="866"/>
<point x="1024" y="1134"/>
<point x="756" y="1290"/>
<point x="444" y="1290"/>
<point x="176" y="1134"/>
<point x="20" y="866"/>
<point x="20" y="554"/>
<point x="176" y="286"/>
<point x="444" y="130"/>
<point x="756" y="130"/>
<point x="1024" y="286"/>
<point x="1180" y="554"/>
<point x="1180" y="710" type="qcurve" smooth="yes"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/record.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/record.glif",
"repo_id": "cascadia-code",
"token_count": 317
}
| 556 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="relation-one-to-only-one" format="2">
<advance width="1200"/>
<unicode hex="F14A5"/>
<note>
relation-one-to-only-one
</note>
<outline>
<contour>
<point x="1123" y="536" type="line"/>
<point x="1123" y="653" type="line"/>
<point x="1006" y="653" type="line"/>
<point x="1006" y="536" type="line"/>
<point x="949" y="536" type="line"/>
<point x="949" y="653" type="line"/>
<point x="831" y="653" type="line"/>
<point x="831" y="536" type="line"/>
<point x="657" y="536" type="line"/>
<point x="657" y="1001" type="line"/>
<point x="309" y="1001" type="line"/>
<point x="309" y="1116" type="line"/>
<point x="194" y="1116" type="line"/>
<point x="194" y="1001" type="line"/>
<point x="20" y="1001" type="line"/>
<point x="20" y="884" type="line"/>
<point x="194" y="884" type="line"/>
<point x="194" y="767" type="line"/>
<point x="309" y="767" type="line"/>
<point x="309" y="884" type="line"/>
<point x="543" y="884" type="line"/>
<point x="543" y="419" type="line"/>
<point x="831" y="419" type="line"/>
<point x="831" y="304" type="line"/>
<point x="949" y="304" type="line"/>
<point x="949" y="419" type="line"/>
<point x="1006" y="419" type="line"/>
<point x="1006" y="304" type="line"/>
<point x="1123" y="304" type="line"/>
<point x="1123" y="419" type="line"/>
<point x="1180" y="419" type="line"/>
<point x="1180" y="536" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/relation-one-to-only-one.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/relation-one-to-only-one.glif",
"repo_id": "cascadia-code",
"token_count": 767
}
| 557 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="rhombus-split-outline" format="2">
<advance width="1200"/>
<unicode hex="F14DD"/>
<note>
rhombus-split-outline
</note>
<outline>
<contour>
<point x="646" y="1290"/>
<point x="600" y="1290" type="qcurve" smooth="yes"/>
<point x="554" y="1290"/>
<point x="518" y="1257" type="qcurve" smooth="yes"/>
<point x="53" y="792" type="line" smooth="yes"/>
<point x="20" y="759"/>
<point x="20" y="661"/>
<point x="53" y="628" type="qcurve" smooth="yes"/>
<point x="518" y="163" type="line" smooth="yes"/>
<point x="551" y="130"/>
<point x="649" y="130"/>
<point x="682" y="163" type="qcurve" smooth="yes"/>
<point x="1147" y="628" type="line" smooth="yes"/>
<point x="1180" y="661"/>
<point x="1180" y="759"/>
<point x="1147" y="792" type="qcurve" smooth="yes"/>
<point x="682" y="1257" type="line" smooth="yes"/>
</contour>
<contour>
<point x="409" y="982" type="line"/>
<point x="600" y="1176" type="line"/>
<point x="791" y="982" type="line"/>
<point x="600" y="792" type="line"/>
</contour>
<contour>
<point x="134" y="710" type="line"/>
<point x="328" y="901" type="line"/>
<point x="518" y="710" type="line"/>
<point x="328" y="519" type="line"/>
</contour>
<contour>
<point x="682" y="710" type="line"/>
<point x="872" y="901" type="line"/>
<point x="1066" y="710" type="line"/>
<point x="872" y="519" type="line"/>
</contour>
<contour>
<point x="409" y="438" type="line"/>
<point x="600" y="628" type="line"/>
<point x="791" y="438" type="line"/>
<point x="600" y="244" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/rhombus-split-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/rhombus-split-outline.glif",
"repo_id": "cascadia-code",
"token_count": 861
}
| 558 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="scanner" format="2">
<advance width="1200"/>
<unicode hex="F06AB"/>
<note>
scanner
</note>
<outline>
<contour>
<point x="65" y="819"/>
<point x="20" y="749"/>
<point x="20" y="710" type="qcurve" smooth="yes"/>
<point x="20" y="357" type="line" smooth="yes"/>
<point x="20" y="302"/>
<point x="96" y="227"/>
<point x="150" y="227" type="qcurve" smooth="yes"/>
<point x="1050" y="227" type="line" smooth="yes"/>
<point x="1104" y="227"/>
<point x="1180" y="302"/>
<point x="1180" y="357" type="qcurve" smooth="yes"/>
<point x="1180" y="613" type="line" smooth="yes"/>
<point x="1180" y="668"/>
<point x="1104" y="743"/>
<point x="1050" y="743" type="qcurve" smooth="yes"/>
<point x="241" y="743" type="line"/>
<point x="1147" y="1073" type="line"/>
<point x="1101" y="1193" type="line"/>
<point x="99" y="828" type="line"/>
</contour>
<contour>
<point x="923" y="550" type="line"/>
<point x="1050" y="550" type="line"/>
<point x="1050" y="420" type="line"/>
<point x="923" y="420" type="line"/>
</contour>
<contour>
<point x="150" y="550" type="line"/>
<point x="793" y="550" type="line"/>
<point x="793" y="420" type="line"/>
<point x="150" y="420" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/scanner.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/scanner.glif",
"repo_id": "cascadia-code",
"token_count": 692
}
| 559 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="send-check-outline" format="2">
<advance width="1200"/>
<unicode hex="F1162"/>
<note>
send-check-outline
</note>
<outline>
<contour>
<point x="1155" y="737" type="line"/>
<point x="20" y="1223" type="line"/>
<point x="20" y="846" type="line"/>
<point x="830" y="737" type="line"/>
<point x="20" y="628" type="line"/>
<point x="20" y="250" type="line"/>
</contour>
<contour>
<point x="126" y="939" type="line"/>
<point x="126" y="1058" type="line"/>
<point x="534" y="886" type="line"/>
</contour>
<contour>
<point x="126" y="534" type="line"/>
<point x="534" y="587" type="line"/>
<point x="126" y="415" type="line"/>
</contour>
<contour>
<point x="1180" y="466" type="line"/>
<point x="1099" y="547" type="line"/>
<point x="912" y="359" type="line"/>
<point x="803" y="466" type="line"/>
<point x="722" y="385" type="line"/>
<point x="912" y="197" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/send-check-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/send-check-outline.glif",
"repo_id": "cascadia-code",
"token_count": 526
}
| 560 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="skew-more" format="2">
<advance width="1200"/>
<unicode hex="F0D37"/>
<note>
skew-more
</note>
<outline>
<contour>
<point x="316" y="773" type="line"/>
<point x="633" y="773" type="line"/>
<point x="497" y="193" type="line"/>
<point x="180" y="193" type="line"/>
</contour>
<contour>
<point x="600" y="67" type="line"/>
<point x="793" y="903" type="line"/>
<point x="213" y="903" type="line"/>
<point x="20" y="67" type="line"/>
</contour>
<contour>
<point x="923" y="840" type="line"/>
<point x="1180" y="1097" type="line"/>
<point x="923" y="1353" type="line"/>
<point x="923" y="1160" type="line"/>
<point x="407" y="1160" type="line"/>
<point x="407" y="1033" type="line"/>
<point x="923" y="1033" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/skew-more.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/skew-more.glif",
"repo_id": "cascadia-code",
"token_count": 447
}
| 561 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="sort-variant-remove" format="2">
<advance width="1200"/>
<unicode hex="F1147"/>
<note>
sort-variant-remove
</note>
<outline>
<contour>
<point x="20" y="903" type="line"/>
<point x="20" y="786" type="line"/>
<point x="732" y="786" type="line"/>
<point x="732" y="903" type="line"/>
</contour>
<contour>
<point x="1088" y="1201" type="line"/>
<point x="20" y="1201" type="line"/>
<point x="20" y="1081" type="line"/>
<point x="1088" y="1081" type="line"/>
</contour>
<contour>
<point x="20" y="608" type="line"/>
<point x="20" y="489" type="line"/>
<point x="376" y="489" type="line"/>
<point x="376" y="608" type="line"/>
</contour>
<contour>
<point x="1097" y="639" type="line"/>
<point x="969" y="514" type="line"/>
<point x="843" y="639" type="line"/>
<point x="760" y="556" type="line"/>
<point x="885" y="430" type="line"/>
<point x="760" y="302" type="line"/>
<point x="843" y="219" type="line"/>
<point x="969" y="347" type="line"/>
<point x="1097" y="219" type="line"/>
<point x="1180" y="302" type="line"/>
<point x="1052" y="430" type="line"/>
<point x="1180" y="556" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/sort-variant-remove.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/sort-variant-remove.glif",
"repo_id": "cascadia-code",
"token_count": 649
}
| 562 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="square-opacity" format="2">
<advance width="1200"/>
<unicode hex="F1854"/>
<note>
square-opacity
</note>
<outline>
<contour>
<point x="1180" y="647" type="line"/>
<point x="1180" y="773" type="line"/>
<point x="1050" y="773" type="line"/>
<point x="1050" y="647" type="line"/>
</contour>
<contour>
<point x="1050" y="903" type="line"/>
<point x="1180" y="903" type="line"/>
<point x="1180" y="1033" type="line"/>
<point x="1050" y="1033" type="line"/>
</contour>
<contour>
<point x="1180" y="1160" type="line"/>
<point x="1180" y="1290" type="line"/>
<point x="1050" y="1290" type="line"/>
<point x="1050" y="1160" type="line"/>
</contour>
<contour>
<point x="923" y="647" type="line"/>
<point x="923" y="517" type="line"/>
<point x="1050" y="517" type="line"/>
<point x="1050" y="647" type="line"/>
</contour>
<contour>
<point x="1050" y="387" type="line"/>
<point x="1180" y="387" type="line"/>
<point x="1180" y="517" type="line"/>
<point x="1050" y="517" type="line"/>
</contour>
<contour>
<point x="793" y="387" type="line"/>
<point x="663" y="387" type="line"/>
<point x="663" y="517" type="line"/>
<point x="793" y="517" type="line"/>
<point x="793" y="647" type="line"/>
<point x="663" y="647" type="line"/>
<point x="663" y="773" type="line"/>
<point x="793" y="773" type="line"/>
<point x="793" y="903" type="line"/>
<point x="663" y="903" type="line"/>
<point x="663" y="1033" type="line"/>
<point x="793" y="1033" type="line"/>
<point x="793" y="1160" type="line"/>
<point x="663" y="1160" type="line"/>
<point x="663" y="1290" type="line"/>
<point x="20" y="1290" type="line"/>
<point x="20" y="130" type="line"/>
<point x="663" y="130" type="line"/>
<point x="663" y="260" type="line"/>
<point x="793" y="260" type="line"/>
</contour>
<contour>
<point x="793" y="260" type="line"/>
<point x="793" y="130" type="line"/>
<point x="923" y="130" type="line"/>
<point x="923" y="260" type="line"/>
</contour>
<contour>
<point x="923" y="1160" type="line"/>
<point x="923" y="1290" type="line"/>
<point x="793" y="1290" type="line"/>
<point x="793" y="1160" type="line"/>
</contour>
<contour>
<point x="923" y="903" type="line"/>
<point x="923" y="773" type="line"/>
<point x="1050" y="773" type="line"/>
<point x="1050" y="903" type="line"/>
</contour>
<contour>
<point x="793" y="517" type="line"/>
<point x="793" y="387" type="line"/>
<point x="923" y="387" type="line"/>
<point x="923" y="517" type="line"/>
</contour>
<contour>
<point x="923" y="387" type="line"/>
<point x="923" y="260" type="line"/>
<point x="1050" y="260" type="line"/>
<point x="1050" y="387" type="line"/>
</contour>
<contour>
<point x="1050" y="130" type="line"/>
<point x="1180" y="130" type="line"/>
<point x="1180" y="260" type="line"/>
<point x="1050" y="260" type="line"/>
</contour>
<contour>
<point x="793" y="1033" type="line"/>
<point x="793" y="903" type="line"/>
<point x="923" y="903" type="line"/>
<point x="923" y="1033" type="line"/>
</contour>
<contour>
<point x="793" y="773" type="line"/>
<point x="793" y="647" type="line"/>
<point x="923" y="647" type="line"/>
<point x="923" y="773" type="line"/>
</contour>
<contour>
<point x="1050" y="1160" type="line"/>
<point x="923" y="1160" type="line"/>
<point x="923" y="1033" type="line"/>
<point x="1050" y="1033" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/square-opacity.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/square-opacity.glif",
"repo_id": "cascadia-code",
"token_count": 1918
}
| 563 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="step-forward" format="2">
<advance width="1200"/>
<unicode hex="F04D7"/>
<note>
step-forward
</note>
<outline>
<contour>
<point x="238" y="1217" type="line"/>
<point x="20" y="1217" type="line"/>
<point x="20" y="203" type="line"/>
<point x="238" y="203" type="line"/>
</contour>
<contour>
<point x="1180" y="710" type="line"/>
<point x="381" y="1217" type="line"/>
<point x="381" y="203" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/step-forward.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/step-forward.glif",
"repo_id": "cascadia-code",
"token_count": 257
}
| 564 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="tally-mark-2" format="2">
<advance width="1200"/>
<unicode hex="F1ABD"/>
<note>
tally-mark-2
</note>
<outline>
<contour>
<point x="500" y="1420" type="line"/>
<point x="295" y="1420" type="line"/>
<point x="295" y="0" type="line"/>
<point x="500" y="0" type="line"/>
</contour>
<contour>
<point x="905" y="0" type="line"/>
<point x="905" y="1420" type="line"/>
<point x="700" y="1420" type="line"/>
<point x="700" y="0" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/tally-mark-2.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/tally-mark-2.glif",
"repo_id": "cascadia-code",
"token_count": 283
}
| 565 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="tennis-ball" format="2">
<advance width="1200"/>
<unicode hex="F0507"/>
<note>
tennis-ball
</note>
<outline>
<contour>
<point x="494" y="1290"/>
<point x="300" y="1219"/>
<point x="222" y="1151" type="qcurve"/>
<point x="317" y="1069"/>
<point x="426" y="841"/>
<point x="426" y="579"/>
<point x="317" y="351"/>
<point x="222" y="269" type="qcurve"/>
<point x="300" y="201"/>
<point x="494" y="130"/>
<point x="706" y="130"/>
<point x="900" y="201"/>
<point x="978" y="269" type="qcurve"/>
<point x="883" y="351"/>
<point x="774" y="579"/>
<point x="774" y="841"/>
<point x="883" y="1069"/>
<point x="978" y="1151" type="qcurve"/>
<point x="900" y="1219"/>
<point x="706" y="1290"/>
<point x="600" y="1290" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="1180" y="808"/>
<point x="1117" y="990"/>
<point x="1057" y="1067" type="qcurve"/>
<point x="978" y="1001"/>
<point x="891" y="816"/>
<point x="891" y="604"/>
<point x="978" y="419"/>
<point x="1057" y="353" type="qcurve"/>
<point x="1117" y="430"/>
<point x="1180" y="612"/>
<point x="1180" y="710" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="20" y="612"/>
<point x="83" y="430"/>
<point x="143" y="353" type="qcurve"/>
<point x="222" y="419"/>
<point x="309" y="604"/>
<point x="309" y="816"/>
<point x="222" y="1001"/>
<point x="143" y="1067" type="qcurve"/>
<point x="83" y="990"/>
<point x="20" y="808"/>
<point x="20" y="710" type="qcurve" smooth="yes"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/tennis-ball.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/tennis-ball.glif",
"repo_id": "cascadia-code",
"token_count": 920
}
| 566 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="text-box-outline" format="2">
<advance width="1200"/>
<unicode hex="F09ED"/>
<note>
text-box-outline
</note>
<outline>
<contour>
<point x="1050" y="1290" type="qcurve" smooth="yes"/>
<point x="150" y="1290" type="line" smooth="yes"/>
<point x="96" y="1290"/>
<point x="20" y="1214"/>
<point x="20" y="1160" type="qcurve" smooth="yes"/>
<point x="20" y="260" type="line" smooth="yes"/>
<point x="20" y="206"/>
<point x="96" y="130"/>
<point x="150" y="130" type="qcurve" smooth="yes"/>
<point x="1050" y="130" type="line" smooth="yes"/>
<point x="1104" y="130"/>
<point x="1180" y="206"/>
<point x="1180" y="260" type="qcurve" smooth="yes"/>
<point x="1180" y="1160" type="line" smooth="yes"/>
<point x="1180" y="1214"/>
<point x="1104" y="1290"/>
</contour>
<contour>
<point x="150" y="260" type="line"/>
<point x="150" y="1160" type="line"/>
<point x="1050" y="1160" type="line"/>
<point x="1050" y="260" type="line"/>
</contour>
<contour>
<point x="923" y="1033" type="line"/>
<point x="277" y="1033" type="line"/>
<point x="277" y="903" type="line"/>
<point x="923" y="903" type="line"/>
</contour>
<contour>
<point x="923" y="773" type="line"/>
<point x="277" y="773" type="line"/>
<point x="277" y="647" type="line"/>
<point x="923" y="647" type="line"/>
</contour>
<contour>
<point x="730" y="517" type="line"/>
<point x="277" y="517" type="line"/>
<point x="277" y="387" type="line"/>
<point x="730" y="387" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/text-box-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/text-box-outline.glif",
"repo_id": "cascadia-code",
"token_count": 842
}
| 567 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="timer-sand" format="2">
<advance width="1200"/>
<unicode hex="F051F"/>
<note>
timer-sand
</note>
<outline>
<contour>
<point x="173" y="993" type="line"/>
<point x="457" y="710" type="line"/>
<point x="173" y="427" type="line"/>
<point x="173" y="0" type="line"/>
<point x="1027" y="0" type="line"/>
<point x="1027" y="427" type="line"/>
<point x="743" y="710" type="line"/>
<point x="1027" y="993" type="line"/>
<point x="1027" y="1420" type="line"/>
<point x="173" y="1420" type="line"/>
</contour>
<contour>
<point x="883" y="140" type="line"/>
<point x="317" y="140" type="line"/>
<point x="317" y="390" type="line"/>
<point x="600" y="673" type="line"/>
<point x="883" y="390" type="line"/>
</contour>
<contour>
<point x="317" y="1030" type="line"/>
<point x="317" y="1280" type="line"/>
<point x="883" y="1280" type="line"/>
<point x="883" y="1030" type="line"/>
<point x="600" y="747" type="line"/>
</contour>
<contour>
<point x="457" y="1083" type="line"/>
<point x="600" y="940" type="line"/>
<point x="743" y="1083" type="line"/>
<point x="743" y="1137" type="line"/>
<point x="457" y="1137" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/timer-sand.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/timer-sand.glif",
"repo_id": "cascadia-code",
"token_count": 658
}
| 568 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="toy-brick-remove" format="2">
<advance width="1200"/>
<unicode hex="F1290"/>
<note>
toy-brick-remove
</note>
<outline>
<contour>
<point x="1088" y="1112" type="line"/>
<point x="969" y="1112" type="line"/>
<point x="969" y="1170" type="line" smooth="yes"/>
<point x="969" y="1220"/>
<point x="899" y="1290"/>
<point x="852" y="1290" type="qcurve" smooth="yes"/>
<point x="732" y="1290" type="line" smooth="yes"/>
<point x="682" y="1290"/>
<point x="613" y="1220"/>
<point x="613" y="1170" type="qcurve" smooth="yes"/>
<point x="613" y="1112" type="line"/>
<point x="496" y="1112" type="line"/>
<point x="496" y="1170" type="line" smooth="yes"/>
<point x="496" y="1220"/>
<point x="426" y="1290"/>
<point x="376" y="1290" type="qcurve" smooth="yes"/>
<point x="256" y="1290" type="line" smooth="yes"/>
<point x="209" y="1290"/>
<point x="140" y="1220"/>
<point x="140" y="1170" type="qcurve" smooth="yes"/>
<point x="140" y="1112" type="line"/>
<point x="20" y="1112" type="line"/>
<point x="20" y="280" type="line"/>
<point x="618" y="280" type="line"/>
<point x="613" y="311"/>
<point x="613" y="341" type="qcurve" smooth="yes"/>
<point x="613" y="428"/>
<point x="693" y="581"/>
<point x="835" y="681"/>
<point x="1008" y="706"/>
<point x="1088" y="675" type="qcurve"/>
</contour>
<contour>
<point x="1097" y="550" type="line"/>
<point x="969" y="425" type="line"/>
<point x="843" y="550" type="line"/>
<point x="760" y="467" type="line"/>
<point x="885" y="341" type="line"/>
<point x="760" y="213" type="line"/>
<point x="843" y="130" type="line"/>
<point x="969" y="258" type="line"/>
<point x="1097" y="130" type="line"/>
<point x="1180" y="213" type="line"/>
<point x="1052" y="341" type="line"/>
<point x="1180" y="467" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/toy-brick-remove.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/toy-brick-remove.glif",
"repo_id": "cascadia-code",
"token_count": 1011
}
| 569 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="triangle" format="2">
<advance width="1200"/>
<unicode hex="F0536"/>
<note>
triangle
</note>
<outline>
<contour>
<point x="600" y="1210" type="line"/>
<point x="20" y="210" type="line"/>
<point x="1180" y="210" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/triangle.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/triangle.glif",
"repo_id": "cascadia-code",
"token_count": 156
}
| 570 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="twitch" format="2">
<advance width="1200"/>
<unicode hex="F0543"/>
<note>
twitch
</note>
<outline>
<contour>
<point x="575" y="830" type="line"/>
<point x="673" y="830" type="line"/>
<point x="673" y="1119" type="line"/>
<point x="575" y="1119" type="line"/>
</contour>
<contour>
<point x="841" y="830" type="line"/>
<point x="939" y="830" type="line"/>
<point x="939" y="1119" type="line"/>
<point x="841" y="1119" type="line"/>
</contour>
<contour>
<point x="1180" y="1385" type="line"/>
<point x="261" y="1385" type="line"/>
<point x="20" y="1144" type="line"/>
<point x="20" y="276" type="line"/>
<point x="308" y="276" type="line"/>
<point x="308" y="35" type="line"/>
<point x="552" y="276" type="line"/>
<point x="746" y="276" type="line"/>
<point x="1180" y="710" type="line"/>
</contour>
<contour>
<point x="1082" y="1290" type="line"/>
<point x="1082" y="758" type="line"/>
<point x="892" y="564" type="line"/>
<point x="698" y="564" type="line"/>
<point x="527" y="396" type="line"/>
<point x="527" y="564" type="line"/>
<point x="308" y="564" type="line"/>
<point x="308" y="1290" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/twitch.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/twitch.glif",
"repo_id": "cascadia-code",
"token_count": 665
}
| 571 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="valve" format="2">
<advance width="1200"/>
<unicode hex="F1066"/>
<note>
valve
</note>
<outline>
<contour>
<point x="134" y="1290" type="line"/>
<point x="20" y="1290" type="line"/>
<point x="20" y="130" type="line"/>
<point x="134" y="130" type="line"/>
</contour>
<contour>
<point x="1180" y="130" type="line"/>
<point x="1180" y="1290" type="line"/>
<point x="1066" y="1290" type="line"/>
<point x="1066" y="130" type="line"/>
</contour>
<contour>
<point x="987" y="1015" type="line"/>
<point x="905" y="1097" type="line"/>
<point x="671" y="865" type="line"/>
<point x="622" y="890"/>
<point x="513" y="871"/>
<point x="434" y="792"/>
<point x="415" y="683"/>
<point x="439" y="631" type="qcurve"/>
<point x="208" y="400" type="line"/>
<point x="290" y="318" type="line"/>
<point x="521" y="549" type="line"/>
<point x="573" y="525"/>
<point x="682" y="544"/>
<point x="761" y="623"/>
<point x="780" y="732"/>
<point x="755" y="781" type="qcurve"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/valve.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/valve.glif",
"repo_id": "cascadia-code",
"token_count": 596
}
| 572 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="view-array-outline" format="2">
<advance width="1200"/>
<unicode hex="F1485"/>
<note>
view-array-outline
</note>
<outline>
<contour>
<point x="429" y="1018" type="line"/>
<point x="771" y="1018" type="line"/>
<point x="771" y="405" type="line"/>
<point x="429" y="405" type="line"/>
</contour>
<contour>
<point x="1180" y="267" type="line"/>
<point x="1180" y="1153" type="line"/>
<point x="975" y="1153" type="line"/>
<point x="975" y="267" type="line"/>
</contour>
<contour>
<point x="908" y="267" type="line"/>
<point x="908" y="1153" type="line"/>
<point x="295" y="1153" type="line"/>
<point x="295" y="267" type="line"/>
</contour>
<contour>
<point x="225" y="267" type="line"/>
<point x="225" y="1153" type="line"/>
<point x="20" y="1153" type="line"/>
<point x="20" y="267" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/view-array-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/view-array-outline.glif",
"repo_id": "cascadia-code",
"token_count": 482
}
| 573 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="view-day-outline" format="2">
<advance width="1200"/>
<unicode hex="F148A"/>
<note>
view-day-outline
</note>
<outline>
<contour>
<point x="1180" y="220" type="line"/>
<point x="1180" y="343" type="line"/>
<point x="20" y="343" type="line"/>
<point x="20" y="220" type="line"/>
</contour>
<contour>
<point x="140" y="833" type="line"/>
<point x="1057" y="833" type="line"/>
<point x="1057" y="587" type="line"/>
<point x="140" y="587" type="line"/>
</contour>
<contour>
<point x="1143" y="953"/>
<point x="1120" y="953" type="qcurve" smooth="yes"/>
<point x="80" y="953" type="line" smooth="yes"/>
<point x="54" y="953"/>
<point x="20" y="919"/>
<point x="20" y="893" type="qcurve" smooth="yes"/>
<point x="20" y="527" type="line" smooth="yes"/>
<point x="20" y="501"/>
<point x="54" y="467"/>
<point x="80" y="467" type="qcurve" smooth="yes"/>
<point x="1120" y="467" type="line" smooth="yes"/>
<point x="1143" y="467"/>
<point x="1180" y="501"/>
<point x="1180" y="527" type="qcurve" smooth="yes"/>
<point x="1180" y="893" type="line" smooth="yes"/>
<point x="1180" y="919"/>
</contour>
<contour>
<point x="1180" y="1077" type="line"/>
<point x="1180" y="1200" type="line"/>
<point x="20" y="1200" type="line"/>
<point x="20" y="1077" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/view-day-outline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/view-day-outline.glif",
"repo_id": "cascadia-code",
"token_count": 738
}
| 574 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="wall" format="2">
<advance width="1200"/>
<unicode hex="F07FE"/>
<note>
wall
</note>
<outline>
<contour>
<point x="77" y="216" type="line"/>
<point x="600" y="216" type="line"/>
<point x="600" y="507" type="line"/>
<point x="77" y="507" type="line"/>
</contour>
<contour>
<point x="20" y="564" type="line"/>
<point x="369" y="564" type="line"/>
<point x="369" y="856" type="line"/>
<point x="20" y="856" type="line"/>
</contour>
<contour>
<point x="426" y="564" type="line"/>
<point x="774" y="564" type="line"/>
<point x="774" y="856" type="line"/>
<point x="426" y="856" type="line"/>
</contour>
<contour>
<point x="831" y="564" type="line"/>
<point x="1180" y="564" type="line"/>
<point x="1180" y="856" type="line"/>
<point x="831" y="856" type="line"/>
</contour>
<contour>
<point x="657" y="216" type="line"/>
<point x="1123" y="216" type="line"/>
<point x="1123" y="507" type="line"/>
<point x="657" y="507" type="line"/>
</contour>
<contour>
<point x="77" y="913" type="line"/>
<point x="543" y="913" type="line"/>
<point x="543" y="1204" type="line"/>
<point x="77" y="1204" type="line"/>
</contour>
<contour>
<point x="600" y="913" type="line"/>
<point x="1123" y="913" type="line"/>
<point x="1123" y="1204" type="line"/>
<point x="600" y="1204" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/wall.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/wall.glif",
"repo_id": "cascadia-code",
"token_count": 778
}
| 575 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="widgets" format="2">
<advance width="1200"/>
<unicode hex="F072C"/>
<note>
widgets
</note>
<outline>
<contour>
<point x="20" y="730" type="line"/>
<point x="501" y="730" type="line"/>
<point x="501" y="949" type="line"/>
<point x="839" y="611" type="line"/>
<point x="620" y="611" type="line"/>
<point x="620" y="130" type="line"/>
<point x="1101" y="130" type="line"/>
<point x="1101" y="611" type="line"/>
<point x="839" y="611" type="line"/>
<point x="1180" y="949" type="line"/>
<point x="839" y="1290" type="line"/>
<point x="501" y="949" type="line"/>
<point x="501" y="1211" type="line"/>
<point x="20" y="1211" type="line"/>
</contour>
<contour>
<point x="20" y="130" type="line"/>
<point x="501" y="130" type="line"/>
<point x="501" y="611" type="line"/>
<point x="20" y="611" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/widgets.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/widgets.glif",
"repo_id": "cascadia-code",
"token_count": 486
}
| 576 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="windsock" format="2">
<advance width="1200"/>
<unicode hex="F15FA"/>
<note>
windsock
</note>
<outline>
<contour>
<point x="1180" y="1016" type="line"/>
<point x="263" y="1137" type="line"/>
<point x="263" y="650" type="line"/>
<point x="1180" y="770" type="line"/>
</contour>
<contour>
<point x="447" y="764" type="line"/>
<point x="447" y="1022" type="line"/>
<point x="630" y="996" type="line"/>
<point x="630" y="790" type="line"/>
</contour>
<contour>
<point x="813" y="813" type="line"/>
<point x="813" y="974" type="line"/>
<point x="997" y="948" type="line"/>
<point x="997" y="839" type="line"/>
</contour>
<contour>
<point x="143" y="1016" type="line"/>
<point x="203" y="1016" type="line"/>
<point x="203" y="1077" type="line"/>
<point x="143" y="1077" type="line"/>
<point x="143" y="1200" type="line" smooth="yes"/>
<point x="143" y="1223"/>
<point x="106" y="1260"/>
<point x="57" y="1260"/>
<point x="20" y="1223"/>
<point x="20" y="1200" type="qcurve" smooth="yes"/>
<point x="20" y="160" type="line"/>
<point x="143" y="160" type="line"/>
<point x="143" y="710" type="line"/>
<point x="203" y="710" type="line"/>
<point x="203" y="770" type="line"/>
<point x="143" y="770" type="line"/>
<point x="143" y="833" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/windsock.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/MaterialDesignIconsDesktop.ufo/glyphs/windsock.glif",
"repo_id": "cascadia-code",
"token_count": 735
}
| 577 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="EXTERNAL_INTERRUPTION" format="2">
<advance width="1200"/>
<unicode hex="E00A"/>
<note>
EXTERNAL_INTERRUPTION
</note>
<outline>
<contour>
<point x="543" y="702" type="line"/>
<point x="402" y="0" type="line"/>
<point x="483" y="0" type="line"/>
<point x="988" y="998" type="line"/>
<point x="968" y="1030" type="line"/>
<point x="611" y="922" type="line"/>
<point x="753" y="1418" type="line"/>
<point x="308" y="1420" type="line"/>
<point x="212" y="620" type="line"/>
<point x="245" y="596" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/Pomicons.ufo/glyphs/E_X_T_E_R_N_A_L__I_N_T_E_R_R_U_P_T_I_O_N_.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/Pomicons.ufo/glyphs/E_X_T_E_R_N_A_L__I_N_T_E_R_R_U_P_T_I_O_N_.glif",
"repo_id": "cascadia-code",
"token_count": 313
}
| 578 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="uni2630" format="2">
<advance width="1200"/>
<unicode hex="2630"/>
<note>
uni2630
</note>
<outline>
<contour>
<point x="120" y="356" type="line"/>
<point x="120" y="151" type="line"/>
<point x="1080" y="151" type="line"/>
<point x="1080" y="356" type="line"/>
</contour>
<contour>
<point x="120" y="765" type="line"/>
<point x="120" y="561" type="line"/>
<point x="1080" y="561" type="line"/>
<point x="1080" y="765" type="line"/>
</contour>
<contour>
<point x="120" y="1175" type="line"/>
<point x="120" y="970" type="line"/>
<point x="1080" y="970" type="line"/>
<point x="1080" y="1175" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uni2630.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uni2630.glif",
"repo_id": "cascadia-code",
"token_count": 369
}
| 579 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="uniE0BB" format="2">
<advance width="1200"/>
<unicode hex="E0BB"/>
<note>
uniE0BB
</note>
<outline>
<contour>
<point x="1888" y="2280" type="line"/>
<point x="-820" y="-426" type="line"/>
<point x="-688" y="-426" type="line"/>
<point x="2020" y="2280" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uniE_0B_B_.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uniE_0B_B_.glif",
"repo_id": "cascadia-code",
"token_count": 185
}
| 580 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="uniE0CD" format="2">
<advance width="1200"/>
<unicode hex="E0CD"/>
<note>
uniE0CD
</note>
<outline>
<contour>
<point x="1397" y="1431" type="line"/>
<point x="1692" y="921" type="line"/>
<point x="1397" y="410" type="line"/>
<point x="808" y="410" type="line"/>
<point x="514" y="921" type="line"/>
<point x="808" y="1431" type="line"/>
</contour>
<contour>
<point x="1429" y="356" type="line"/>
<point x="1755" y="921" type="line"/>
<point x="1429" y="1486" type="line"/>
<point x="777" y="1486" type="line"/>
<point x="451" y="921" type="line"/>
<point x="777" y="356" type="line"/>
</contour>
<contour>
<point x="77" y="662" type="line"/>
<point x="372" y="152" type="line"/>
<point x="77" y="-358" type="line"/>
<point x="-500" y="-358" type="line"/>
<point x="-500" y="662" type="line"/>
</contour>
<contour>
<point x="109" y="-413" type="line"/>
<point x="435" y="152" type="line"/>
<point x="109" y="717" type="line"/>
<point x="-555" y="717" type="line"/>
<point x="-555" y="-413" type="line"/>
</contour>
<contour>
<point x="77" y="2238" type="line"/>
<point x="372" y="1728" type="line"/>
<point x="77" y="1218" type="line"/>
<point x="-500" y="1218" type="line"/>
<point x="-500" y="2238" type="line"/>
</contour>
<contour>
<point x="109" y="1163" type="line"/>
<point x="435" y="1728" type="line"/>
<point x="109" y="2293" type="line"/>
<point x="-555" y="2293" type="line"/>
<point x="-555" y="1163" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uniE_0C_D_.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/PowerlineExtraSymbols.ufo/glyphs/uniE_0C_D_.glif",
"repo_id": "cascadia-code",
"token_count": 856
}
| 581 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>HEAVY CIRCLE</key>
<string>H_E_A_V_Y_ C_I_R_C_L_E_.glif</string>
<key>POWER ON SYMBOL</key>
<string>P_O_W_E_R_ O_N_ S_Y_M_B_O_L_.glif</string>
<key>POWER ON-OFF SYMBOL</key>
<string>P_O_W_E_R_ O_N_-O_F_F_ S_Y_M_B_O_L_.glif</string>
<key>POWER SLEEP SYMBOL</key>
<string>P_O_W_E_R_ S_L_E_E_P_ S_Y_M_B_O_L_.glif</string>
<key>POWER SYMBOL</key>
<string>P_O_W_E_R_ S_Y_M_B_O_L_.glif</string>
</dict>
</plist>
|
cascadia-code/sources/nerdfonts/full/processed/Unicode_IEC_symbol_font.ufo/glyphs/contents.plist/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/Unicode_IEC_symbol_font.ufo/glyphs/contents.plist",
"repo_id": "cascadia-code",
"token_count": 365
}
| 582 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="arrow-small-right" format="2">
<advance width="1200"/>
<unicode hex="EA9F"/>
<note>
arrow-small-right
</note>
<outline>
<contour>
<point x="633" y="901" type="line"/>
<point x="774" y="755" type="line"/>
<point x="284" y="755" type="line"/>
<point x="284" y="665" type="line"/>
<point x="774" y="665" type="line"/>
<point x="633" y="524" type="line"/>
<point x="694" y="458" type="line"/>
<point x="916" y="679" type="line"/>
<point x="916" y="741" type="line"/>
<point x="694" y="962" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/arrow-small-right.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/arrow-small-right.glif",
"repo_id": "cascadia-code",
"token_count": 304
}
| 583 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="chrome-minimize" format="2">
<advance width="1200"/>
<unicode hex="EABA"/>
<note>
chrome-minimize
</note>
<outline>
<contour>
<point x="20" y="763" type="line"/>
<point x="20" y="657" type="line"/>
<point x="1180" y="657" type="line"/>
<point x="1180" y="763" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/chrome-minimize.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/chrome-minimize.glif",
"repo_id": "cascadia-code",
"token_count": 179
}
| 584 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="collapse-all" format="2">
<advance width="1200"/>
<unicode hex="EAC5"/>
<note>
collapse-all
</note>
<outline>
<contour>
<point x="695" y="517" type="line"/>
<point x="695" y="609" type="line"/>
<point x="211" y="609" type="line"/>
<point x="211" y="517" type="line"/>
</contour>
<contour>
<point x="309" y="996" type="line"/>
<point x="113" y="996" type="line"/>
<point x="20" y="903" type="line"/>
<point x="20" y="223" type="line"/>
<point x="113" y="130" type="line"/>
<point x="793" y="130" type="line"/>
<point x="886" y="223" type="line"/>
<point x="886" y="419" type="line"/>
<point x="1082" y="419" type="line"/>
<point x="1180" y="517" type="line"/>
<point x="1180" y="1192" type="line"/>
<point x="1082" y="1290" type="line"/>
<point x="407" y="1290" type="line"/>
<point x="309" y="1192" type="line"/>
</contour>
<contour>
<point x="407" y="1192" type="line"/>
<point x="1082" y="1192" type="line"/>
<point x="1082" y="517" type="line"/>
<point x="886" y="517" type="line"/>
<point x="886" y="903" type="line"/>
<point x="793" y="996" type="line"/>
<point x="407" y="996" type="line"/>
</contour>
<contour>
<point x="793" y="223" type="line"/>
<point x="113" y="223" type="line"/>
<point x="113" y="903" type="line"/>
<point x="793" y="903" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/collapse-all.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/collapse-all.glif",
"repo_id": "cascadia-code",
"token_count": 742
}
| 585 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="debug-pause" format="2">
<advance width="1200"/>
<unicode hex="EAD1"/>
<note>
debug-pause
</note>
<outline>
<contour>
<point x="101" y="0" type="line"/>
<point x="321" y="0" type="line"/>
<point x="321" y="1420" type="line"/>
<point x="101" y="1420" type="line"/>
</contour>
<contour>
<point x="887" y="1420" type="line"/>
<point x="887" y="0" type="line"/>
<point x="1099" y="0" type="line"/>
<point x="1099" y="1420" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/debug-pause.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/debug-pause.glif",
"repo_id": "cascadia-code",
"token_count": 278
}
| 586 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="device-camera-video" format="2">
<advance width="1200"/>
<unicode hex="EAD9"/>
<note>
device-camera-video
</note>
<outline>
<contour>
<point x="1180" y="945" type="line"/>
<point x="1118" y="980" type="line"/>
<point x="848" y="825" type="line"/>
<point x="848" y="1002" type="line"/>
<point x="808" y="1042" type="line"/>
<point x="60" y="1042" type="line"/>
<point x="20" y="1002" type="line"/>
<point x="20" y="418" type="line"/>
<point x="60" y="378" type="line"/>
<point x="808" y="378" type="line"/>
<point x="848" y="418" type="line"/>
<point x="848" y="586" type="line"/>
<point x="1118" y="431" type="line"/>
<point x="1180" y="471" type="line"/>
</contour>
<contour>
<point x="768" y="958" type="line"/>
<point x="768" y="462" type="line"/>
<point x="104" y="462" type="line"/>
<point x="104" y="958" type="line"/>
</contour>
<contour>
<point x="1100" y="878" type="line"/>
<point x="1100" y="546" type="line"/>
<point x="848" y="683" type="line"/>
<point x="848" y="728" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/device-camera-video.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/device-camera-video.glif",
"repo_id": "cascadia-code",
"token_count": 590
}
| 587 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="filter-filled" format="2">
<advance width="1200"/>
<unicode hex="EBCE"/>
<note>
filter-filled
</note>
<outline>
<contour>
<point x="20" y="1208" type="line"/>
<point x="20" y="1066" type="line"/>
<point x="436" y="672" type="line"/>
<point x="436" y="212" type="line"/>
<point x="768" y="212" type="line"/>
<point x="768" y="672" type="line"/>
<point x="1180" y="1066" type="line"/>
<point x="1180" y="1208" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/filter-filled.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/filter-filled.glif",
"repo_id": "cascadia-code",
"token_count": 262
}
| 588 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="git-pull-request-closed" format="2">
<advance width="1200"/>
<unicode hex="EBDA"/>
<note>
git-pull-request-closed
</note>
<outline>
<contour>
<point x="435" y="1056"/>
<point x="435" y="1122" type="qcurve" smooth="yes"/>
<point x="435" y="1162"/>
<point x="405" y="1241"/>
<point x="347" y="1298"/>
<point x="268" y="1329"/>
<point x="228" y="1329" type="qcurve" smooth="yes"/>
<point x="162" y="1329"/>
<point x="61" y="1259"/>
<point x="12" y="1144"/>
<point x="34" y="1021"/>
<point x="123" y="933"/>
<point x="184" y="919" type="qcurve"/>
<point x="184" y="501" type="line"/>
<point x="123" y="488"/>
<point x="34" y="400"/>
<point x="12" y="276"/>
<point x="61" y="162"/>
<point x="162" y="91"/>
<point x="228" y="91" type="qcurve" smooth="yes"/>
<point x="268" y="91"/>
<point x="347" y="122"/>
<point x="405" y="184"/>
<point x="435" y="259"/>
<point x="435" y="303" type="qcurve" smooth="yes"/>
<point x="435" y="364"/>
<point x="365" y="470"/>
<point x="308" y="492" type="qcurve" smooth="yes"/>
<point x="290" y="501"/>
<point x="272" y="505" type="qcurve"/>
<point x="272" y="919" type="line"/>
<point x="308" y="928" type="line"/>
<point x="365" y="955"/>
<point x="400" y="1008" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="356" y="325"/>
<point x="352" y="259"/>
<point x="312" y="197"/>
<point x="242" y="170"/>
<point x="171" y="184"/>
<point x="114" y="236"/>
<point x="101" y="311"/>
<point x="131" y="377"/>
<point x="193" y="422"/>
<point x="264" y="422"/>
<point x="321" y="386"/>
<point x="338" y="355" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="131" y="1043"/>
<point x="101" y="1113"/>
<point x="114" y="1184"/>
<point x="167" y="1237"/>
<point x="242" y="1254"/>
<point x="312" y="1223"/>
<point x="352" y="1166"/>
<point x="356" y="1096"/>
<point x="321" y="1034"/>
<point x="264" y="999"/>
<point x="193" y="999"/>
<point x="162" y="1021" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="1013" y="796" type="line"/>
<point x="933" y="796" type="line"/>
<point x="933" y="501" type="line"/>
<point x="889" y="492"/>
<point x="858" y="470" type="qcurve" smooth="yes"/>
<point x="805" y="435"/>
<point x="757" y="320"/>
<point x="783" y="197"/>
<point x="872" y="109"/>
<point x="995" y="82"/>
<point x="1110" y="131"/>
<point x="1180" y="236"/>
<point x="1180" y="298" type="qcurve" smooth="yes"/>
<point x="1180" y="382"/>
<point x="1118" y="444" type="qcurve" smooth="yes"/>
<point x="1074" y="488"/>
<point x="1013" y="501" type="qcurve"/>
</contour>
<contour>
<point x="1030" y="179"/>
<point x="942" y="170"/>
<point x="872" y="219"/>
<point x="845" y="285"/>
<point x="858" y="360"/>
<point x="911" y="413"/>
<point x="986" y="426"/>
<point x="1057" y="400"/>
<point x="1101" y="329"/>
<point x="1092" y="241"/>
<point x="1061" y="210" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="766" y="1272" type="line"/>
<point x="902" y="1131" type="line"/>
<point x="766" y="994" type="line"/>
<point x="823" y="933" type="line"/>
<point x="960" y="1074" type="line"/>
<point x="1101" y="933" type="line"/>
<point x="1158" y="994" type="line"/>
<point x="1021" y="1131" type="line"/>
<point x="1158" y="1272" type="line"/>
<point x="1101" y="1329" type="line"/>
<point x="960" y="1193" type="line"/>
<point x="823" y="1329" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/git-pull-request-closed.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/git-pull-request-closed.glif",
"repo_id": "cascadia-code",
"token_count": 2029
}
| 589 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="group-by-ref-type" format="2">
<advance width="1200"/>
<unicode hex="EB97"/>
<note>
group-by-ref-type
</note>
<outline>
<contour>
<point x="20" y="1290" type="line"/>
<point x="20" y="130" type="line"/>
<point x="63" y="87" type="line"/>
<point x="243" y="87" type="line"/>
<point x="243" y="178" type="line"/>
<point x="110" y="178" type="line"/>
<point x="110" y="1247" type="line"/>
<point x="243" y="1247" type="line"/>
<point x="243" y="1333" type="line"/>
<point x="63" y="1333" type="line"/>
</contour>
<contour>
<point x="643" y="843" type="line"/>
<point x="643" y="1024" type="line"/>
<point x="600" y="1067" type="line"/>
<point x="419" y="1067" type="line"/>
<point x="377" y="1024" type="line"/>
<point x="377" y="843" type="line"/>
<point x="419" y="800" type="line"/>
<point x="600" y="800" type="line"/>
</contour>
<contour>
<point x="467" y="976" type="line"/>
<point x="552" y="976" type="line"/>
<point x="552" y="891" type="line"/>
<point x="467" y="891" type="line"/>
</contour>
<contour>
<point x="1180" y="843" type="line"/>
<point x="1180" y="1109" type="line"/>
<point x="1132" y="1157" type="line"/>
<point x="866" y="1157" type="line"/>
<point x="823" y="1109" type="line"/>
<point x="823" y="843" type="line"/>
<point x="866" y="800" type="line"/>
<point x="1132" y="800" type="line"/>
</contour>
<contour>
<point x="909" y="1067" type="line"/>
<point x="1090" y="1067" type="line"/>
<point x="1090" y="891" type="line"/>
<point x="909" y="891" type="line"/>
</contour>
<contour>
<point x="643" y="396" type="line"/>
<point x="643" y="577" type="line"/>
<point x="600" y="620" type="line"/>
<point x="419" y="620" type="line"/>
<point x="377" y="577" type="line"/>
<point x="377" y="396" type="line"/>
<point x="419" y="353" type="line"/>
<point x="600" y="353" type="line"/>
</contour>
<contour>
<point x="467" y="534" type="line"/>
<point x="552" y="534" type="line"/>
<point x="552" y="444" type="line"/>
<point x="467" y="444" type="line"/>
</contour>
<contour>
<point x="1180" y="311" type="line"/>
<point x="1180" y="577" type="line"/>
<point x="1132" y="620" type="line"/>
<point x="866" y="620" type="line"/>
<point x="823" y="577" type="line"/>
<point x="823" y="311" type="line"/>
<point x="866" y="263" type="line"/>
<point x="1132" y="263" type="line"/>
</contour>
<contour>
<point x="909" y="534" type="line"/>
<point x="1090" y="534" type="line"/>
<point x="1090" y="353" type="line"/>
<point x="909" y="353" type="line"/>
</contour>
<contour>
<point x="823" y="444" type="line"/>
<point x="823" y="534" type="line"/>
<point x="643" y="534" type="line"/>
<point x="643" y="444" type="line"/>
</contour>
<contour>
<point x="823" y="891" type="line"/>
<point x="823" y="976" type="line"/>
<point x="643" y="976" type="line"/>
<point x="643" y="891" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/group-by-ref-type.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/group-by-ref-type.glif",
"repo_id": "cascadia-code",
"token_count": 1636
}
| 590 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="list-unordered" format="2">
<advance width="1200"/>
<unicode hex="EB17"/>
<note>
list-unordered
</note>
<outline>
<contour>
<point x="104" y="1042" type="line"/>
<point x="104" y="1126" type="line"/>
<point x="20" y="1126" type="line"/>
<point x="20" y="1042" type="line"/>
</contour>
<contour>
<point x="104" y="794" type="line"/>
<point x="104" y="878" type="line"/>
<point x="20" y="878" type="line"/>
<point x="20" y="794" type="line"/>
</contour>
<contour>
<point x="20" y="546" type="line"/>
<point x="104" y="546" type="line"/>
<point x="104" y="626" type="line"/>
<point x="20" y="626" type="line"/>
</contour>
<contour>
<point x="104" y="294" type="line"/>
<point x="104" y="378" type="line"/>
<point x="20" y="378" type="line"/>
<point x="20" y="294" type="line"/>
</contour>
<contour>
<point x="268" y="1042" type="line"/>
<point x="1180" y="1042" type="line"/>
<point x="1180" y="1126" type="line"/>
<point x="268" y="1126" type="line"/>
</contour>
<contour>
<point x="1180" y="794" type="line"/>
<point x="1180" y="878" type="line"/>
<point x="268" y="878" type="line"/>
<point x="268" y="794" type="line"/>
</contour>
<contour>
<point x="268" y="546" type="line"/>
<point x="1180" y="546" type="line"/>
<point x="1180" y="626" type="line"/>
<point x="268" y="626" type="line"/>
</contour>
<contour>
<point x="1180" y="294" type="line"/>
<point x="1180" y="378" type="line"/>
<point x="268" y="378" type="line"/>
<point x="268" y="294" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/list-unordered.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/list-unordered.glif",
"repo_id": "cascadia-code",
"token_count": 869
}
| 591 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="pulse" format="2">
<advance width="1200"/>
<unicode hex="EB31"/>
<note>
pulse
</note>
<outline>
<contour>
<point x="1180" y="584" type="line"/>
<point x="914" y="584" type="line"/>
<point x="768" y="1084" type="line"/>
<point x="684" y="1084" type="line"/>
<point x="529" y="531" type="line"/>
<point x="432" y="942" type="line"/>
<point x="352" y="942" type="line"/>
<point x="255" y="584" type="line"/>
<point x="20" y="584" type="line"/>
<point x="20" y="504" type="line"/>
<point x="286" y="504" type="line"/>
<point x="330" y="535" type="line"/>
<point x="387" y="756" type="line"/>
<point x="485" y="336" type="line"/>
<point x="569" y="336" type="line"/>
<point x="724" y="920" type="line"/>
<point x="844" y="531" type="line"/>
<point x="883" y="504" type="line"/>
<point x="1180" y="504" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/pulse.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/pulse.glif",
"repo_id": "cascadia-code",
"token_count": 485
}
| 592 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="run-below" format="2">
<advance width="1200"/>
<unicode hex="EBBE"/>
<note>
run-below
</note>
<outline>
<contour>
<point x="813" y="792" type="line"/>
<point x="85" y="1275" type="line"/>
<point x="20" y="1240" type="line"/>
<point x="20" y="270" type="line"/>
<point x="85" y="240" type="line"/>
<point x="813" y="723" type="line"/>
</contour>
<contour>
<point x="714" y="757" type="line"/>
<point x="102" y="348" type="line"/>
<point x="102" y="1167" type="line"/>
</contour>
<contour>
<point x="1180" y="343" type="line"/>
<point x="1124" y="404" type="line"/>
<point x="990" y="270" type="line"/>
<point x="990" y="710" type="line"/>
<point x="908" y="710" type="line"/>
<point x="908" y="270" type="line"/>
<point x="775" y="404" type="line"/>
<point x="719" y="348" type="line"/>
<point x="921" y="145" type="line"/>
<point x="977" y="145" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/run-below.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/run-below.glif",
"repo_id": "cascadia-code",
"token_count": 520
}
| 593 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="sign-out" format="2">
<advance width="1200"/>
<unicode hex="EA6E"/>
<note>
sign-out
</note>
<outline>
<contour>
<point x="823" y="1221" type="line"/>
<point x="823" y="1150" type="line"/>
<point x="823" y="1012" type="line"/>
<point x="909" y="1102" type="line"/>
<point x="909" y="1264" type="line"/>
<point x="866" y="1311" type="line"/>
<point x="63" y="1311" type="line"/>
<point x="20" y="1264" type="line"/>
<point x="20" y="1221" type="line"/>
<point x="20" y="1221" type="line"/>
<point x="20" y="304" type="line"/>
<point x="49" y="261" type="line"/>
<point x="495" y="109" type="line"/>
<point x="552" y="151" type="line"/>
<point x="552" y="242" type="line"/>
<point x="866" y="242" type="line"/>
<point x="909" y="285" type="line"/>
<point x="909" y="446" type="line"/>
<point x="823" y="536" type="line"/>
<point x="823" y="327" type="line"/>
<point x="552" y="327" type="line"/>
<point x="552" y="1069" type="line"/>
<point x="524" y="1107" type="line"/>
<point x="201" y="1221" type="line"/>
</contour>
<contour>
<point x="462" y="1036" type="line"/>
<point x="462" y="213" type="line"/>
<point x="106" y="332" type="line"/>
<point x="106" y="1155" type="line"/>
</contour>
<contour>
<point x="900" y="584" type="line"/>
<point x="961" y="522" type="line"/>
<point x="1180" y="741" type="line"/>
<point x="1180" y="807" type="line"/>
<point x="961" y="1026" type="line"/>
<point x="900" y="964" type="line"/>
<point x="1042" y="822" type="line"/>
<point x="600" y="822" type="line"/>
<point x="600" y="731" type="line"/>
<point x="1047" y="731" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/sign-out.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/sign-out.glif",
"repo_id": "cascadia-code",
"token_count": 921
}
| 594 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="symbol-enum-member" format="2">
<advance width="1200"/>
<unicode hex="EB5E"/>
<note>
symbol-enum-member
</note>
<outline>
<contour>
<point x="516" y="876" type="line"/>
<point x="600" y="876" type="line"/>
<point x="600" y="1124" type="line"/>
<point x="1100" y="1124" type="line"/>
<point x="1100" y="708" type="line"/>
<point x="768" y="708" type="line"/>
<point x="768" y="624" type="line"/>
<point x="1100" y="624" type="line"/>
<point x="1180" y="708" type="line"/>
<point x="1180" y="1124" type="line"/>
<point x="1100" y="1208" type="line"/>
<point x="600" y="1208" type="line"/>
<point x="516" y="1124" type="line"/>
</contour>
<contour>
<point x="684" y="292" type="line"/>
<point x="684" y="624" type="line"/>
<point x="684" y="708" type="line"/>
<point x="600" y="792" type="line"/>
<point x="104" y="792" type="line"/>
<point x="20" y="708" type="line"/>
<point x="20" y="292" type="line"/>
<point x="104" y="212" type="line"/>
<point x="600" y="212" type="line"/>
</contour>
<contour>
<point x="104" y="708" type="line"/>
<point x="600" y="708" type="line"/>
<point x="600" y="292" type="line"/>
<point x="104" y="292" type="line"/>
</contour>
<contour>
<point x="1016" y="792" type="line"/>
<point x="1016" y="876" type="line"/>
<point x="684" y="876" type="line"/>
<point x="684" y="827" type="line"/>
<point x="720" y="792" type="line"/>
</contour>
<contour>
<point x="684" y="956" type="line"/>
<point x="1016" y="956" type="line"/>
<point x="1016" y="1040" type="line"/>
<point x="684" y="1040" type="line"/>
</contour>
<contour>
<point x="516" y="460" type="line"/>
<point x="516" y="544" type="line"/>
<point x="184" y="544" type="line"/>
<point x="184" y="460" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/symbol-enum-member.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/symbol-enum-member.glif",
"repo_id": "cascadia-code",
"token_count": 980
}
| 595 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="symbol-snippet" format="2">
<advance width="1200"/>
<unicode hex="EB66"/>
<note>
symbol-snippet
</note>
<outline>
<contour>
<point x="1137" y="1335" type="line"/>
<point x="63" y="1335" type="line"/>
<point x="20" y="1292" type="line"/>
<point x="20" y="261" type="line"/>
<point x="106" y="261" type="line"/>
<point x="106" y="1249" type="line"/>
<point x="1094" y="1249" type="line"/>
<point x="1094" y="261" type="line"/>
<point x="1180" y="261" type="line"/>
<point x="1180" y="1292" type="line"/>
</contour>
<contour>
<point x="106" y="85" type="line"/>
<point x="106" y="175" type="line"/>
<point x="20" y="175" type="line"/>
<point x="20" y="85" type="line"/>
</contour>
<contour>
<point x="287" y="85" type="line"/>
<point x="287" y="175" type="line"/>
<point x="197" y="175" type="line"/>
<point x="197" y="85" type="line"/>
</contour>
<contour>
<point x="378" y="85" type="line"/>
<point x="464" y="85" type="line"/>
<point x="464" y="175" type="line"/>
<point x="378" y="175" type="line"/>
</contour>
<contour>
<point x="645" y="85" type="line"/>
<point x="645" y="175" type="line"/>
<point x="555" y="175" type="line"/>
<point x="555" y="85" type="line"/>
</contour>
<contour>
<point x="736" y="85" type="line"/>
<point x="822" y="85" type="line"/>
<point x="822" y="175" type="line"/>
<point x="736" y="175" type="line"/>
</contour>
<contour>
<point x="1094" y="85" type="line"/>
<point x="1180" y="85" type="line"/>
<point x="1180" y="175" type="line"/>
<point x="1094" y="175" type="line"/>
</contour>
<contour>
<point x="913" y="85" type="line"/>
<point x="1003" y="85" type="line"/>
<point x="1003" y="175" type="line"/>
<point x="913" y="175" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/symbol-snippet.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/symbol-snippet.glif",
"repo_id": "cascadia-code",
"token_count": 994
}
| 596 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="unfold" format="2">
<advance width="1200"/>
<unicode hex="EB73"/>
<note>
unfold
</note>
<outline>
<contour>
<point x="644" y="854" type="line"/>
<point x="644" y="1241" type="line"/>
<point x="742" y="1143" type="line"/>
<point x="809" y="1210" type="line"/>
<point x="634" y="1385" type="line"/>
<point x="561" y="1385" type="line"/>
<point x="386" y="1215" type="line"/>
<point x="453" y="1143" type="line"/>
<point x="551" y="1241" type="line"/>
<point x="551" y="854" type="line"/>
</contour>
<contour>
<point x="644" y="566" type="line"/>
<point x="551" y="566" type="line"/>
<point x="551" y="179" type="line"/>
<point x="453" y="277" type="line"/>
<point x="386" y="210" type="line"/>
<point x="561" y="35" type="line"/>
<point x="634" y="35" type="line"/>
<point x="809" y="210" type="line"/>
<point x="742" y="277" type="line"/>
<point x="644" y="179" type="line"/>
</contour>
<contour>
<point x="747" y="1097" type="line"/>
<point x="747" y="999" type="line"/>
<point x="1036" y="999" type="line"/>
<point x="737" y="767" type="line"/>
<point x="448" y="767" type="line"/>
<point x="159" y="999" type="line"/>
<point x="453" y="999" type="line"/>
<point x="453" y="1097" type="line"/>
<point x="46" y="1097" type="line"/>
<point x="20" y="999" type="line"/>
<point x="376" y="705" type="line"/>
<point x="20" y="421" type="line"/>
<point x="46" y="323" type="line"/>
<point x="453" y="323" type="line"/>
<point x="453" y="421" type="line"/>
<point x="159" y="421" type="line"/>
<point x="458" y="653" type="line"/>
<point x="747" y="653" type="line"/>
<point x="1036" y="421" type="line"/>
<point x="747" y="421" type="line"/>
<point x="747" y="323" type="line"/>
<point x="1149" y="323" type="line"/>
<point x="1180" y="421" type="line"/>
<point x="824" y="715" type="line"/>
<point x="1180" y="999" type="line"/>
<point x="1149" y="1097" type="line"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/unfold.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/codicon.ufo/glyphs/unfold.glif",
"repo_id": "cascadia-code",
"token_count": 1074
}
| 597 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="bigger" format="2">
<advance width="1200"/>
<unicode hex="E285"/>
<note>
bigger
</note>
<outline>
<contour>
<point x="1035" y="751"/>
<point x="1003" y="782" type="qcurve" smooth="yes"/>
<point x="951" y="838" type="line"/>
<point x="945" y="840" type="line"/>
<point x="397" y="1391" type="line" smooth="yes"/>
<point x="368" y="1420"/>
<point x="281" y="1420"/>
<point x="252" y="1391" type="qcurve" smooth="yes"/>
<point x="197" y="1336" type="line" smooth="yes"/>
<point x="165" y="1307"/>
<point x="165" y="1220"/>
<point x="197" y="1191" type="qcurve" smooth="yes"/>
<point x="675" y="710" type="line"/>
<point x="197" y="229" type="line" smooth="yes"/>
<point x="165" y="200"/>
<point x="165" y="113"/>
<point x="197" y="84" type="qcurve" smooth="yes"/>
<point x="252" y="29" type="line" smooth="yes"/>
<point x="281" y="0"/>
<point x="368" y="0"/>
<point x="397" y="29" type="qcurve" smooth="yes"/>
<point x="945" y="580" type="line"/>
<point x="948" y="580"/>
<point x="951" y="582" type="qcurve" smooth="yes"/>
<point x="1003" y="638" type="line" smooth="yes"/>
<point x="1035" y="669"/>
</contour>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.lastChange</key>
<string>2024-02-27 18:45:38 +0000</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/font-awesome-extension.ufo/glyphs/bigger.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/font-awesome-extension.ufo/glyphs/bigger.glif",
"repo_id": "cascadia-code",
"token_count": 718
}
| 598 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="Endeavour OS" format="2">
<advance width="1200"/>
<unicode hex="F322"/>
<note>
Endeavour OS
</note>
<outline>
<contour>
<point x="150" y="371"/>
<point x="159" y="371" type="qcurve" smooth="yes"/>
<point x="168" y="371" type="line"/>
<point x="248" y="499" type="line" smooth="yes"/>
<point x="590" y="1049"/>
<point x="668" y="1156" type="qcurve" smooth="yes"/>
<point x="672" y="1163"/>
<point x="688" y="1181"/>
<point x="695" y="1192"/>
<point x="695" y="1192" type="qcurve"/>
<point x="691" y="1192"/>
<point x="666" y="1163" type="qcurve" smooth="yes"/>
<point x="654" y="1151"/>
<point x="606" y="1096"/>
<point x="597" y="1087" type="qcurve" smooth="yes"/>
<point x="481" y="957"/>
<point x="177" y="595" type="qcurve" smooth="yes"/>
<point x="22" y="407"/>
<point x="20" y="396" type="qcurve"/>
<point x="29" y="391"/>
<point x="127" y="375" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="618" y="220"/>
<point x="937" y="259" type="qcurve" smooth="yes"/>
<point x="1058" y="273"/>
<point x="1113" y="314" type="qcurve" smooth="yes"/>
<point x="1195" y="373"/>
<point x="1177" y="492" type="qcurve" smooth="yes"/>
<point x="1149" y="668"/>
<point x="907" y="960" type="qcurve" smooth="yes"/>
<point x="814" y="1071"/>
<point x="725" y="1167" type="qcurve" smooth="yes"/>
<point x="711" y="1183"/>
<point x="700" y="1192" type="qcurve"/>
<point x="695" y="1192"/>
<point x="727" y="1149"/>
<point x="748" y="1122" type="qcurve" smooth="yes"/>
<point x="974" y="818"/>
<point x="1031" y="624" type="qcurve" smooth="yes"/>
<point x="1049" y="565"/>
<point x="1051" y="510" type="qcurve" smooth="yes"/>
<point x="1051" y="476"/>
<point x="1044" y="449" type="qcurve" smooth="yes"/>
<point x="1019" y="353"/>
<point x="880" y="328" type="qcurve" smooth="yes"/>
<point x="860" y="325"/>
<point x="704" y="325"/>
<point x="652" y="328" type="qcurve" smooth="yes"/>
<point x="451" y="341"/>
<point x="239" y="362" type="qcurve" smooth="yes"/>
<point x="171" y="369"/>
<point x="166" y="366"/>
<point x="86" y="234"/>
<point x="88" y="232"/>
<point x="161" y="229" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="670" y="1133"/>
<point x="584" y="1001" type="qcurve" smooth="yes"/>
<point x="501" y="873"/>
<point x="346" y="624" type="qcurve" smooth="yes"/>
<point x="232" y="442"/>
<point x="196" y="382" type="qcurve"/>
<point x="196" y="382"/>
<point x="200" y="382" type="qcurve" smooth="yes"/>
<point x="205" y="382"/>
<point x="205" y="380" type="qcurve"/>
<point x="435" y="360"/>
<point x="584" y="348" type="qcurve" smooth="yes"/>
<point x="700" y="341"/>
<point x="768" y="341" type="qcurve" smooth="yes"/>
<point x="805" y="339"/>
<point x="834" y="341" type="qcurve" smooth="yes"/>
<point x="914" y="346"/>
<point x="958" y="369" type="qcurve" smooth="yes"/>
<point x="1006" y="394"/>
<point x="1024" y="442" type="qcurve" smooth="yes"/>
<point x="1042" y="485"/>
<point x="1033" y="549" type="qcurve" smooth="yes"/>
<point x="1031" y="560"/>
<point x="1031" y="565" type="qcurve"/>
<point x="1026" y="583"/>
<point x="1022" y="604" type="qcurve" smooth="yes"/>
<point x="1017" y="620"/>
<point x="1010" y="638" type="qcurve" smooth="yes"/>
<point x="992" y="693"/>
<point x="960" y="756" type="qcurve" smooth="yes"/>
<point x="951" y="777"/>
<point x="949" y="779" type="qcurve"/>
<point x="882" y="905"/>
<point x="766" y="1069" type="qcurve" smooth="yes"/>
<point x="704" y="1153"/>
<point x="695" y="1167" type="qcurve"/>
<point x="695" y="1167"/>
<point x="693" y="1163"/>
<point x="693" y="1163" type="qcurve"/>
</contour>
</outline>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/font-logos.ufo/glyphs/E_ndeavour O_S_.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/font-logos.ufo/glyphs/E_ndeavour O_S_.glif",
"repo_id": "cascadia-code",
"token_count": 2063
}
| 599 |
<?xml version='1.0' encoding='UTF-8'?>
<glyph name="cloud-offline" format="2">
<advance width="1200"/>
<unicode hex="F4AD"/>
<note>
cloud-offline
</note>
<outline>
<contour>
<point x="471" y="1200"/>
<point x="404" y="1173" type="qcurve" smooth="yes"/>
<point x="388" y="1167"/>
<point x="369" y="1139"/>
<point x="369" y="1122" type="qcurve" smooth="yes"/>
<point x="369" y="1100"/>
<point x="401" y="1068"/>
<point x="424" y="1068" type="qcurve" smooth="yes"/>
<point x="434" y="1068"/>
<point x="444" y="1072" type="qcurve" smooth="yes"/>
<point x="492" y="1091"/>
<point x="546" y="1091" type="qcurve" smooth="yes"/>
<point x="547" y="1091"/>
<point x="548" y="1091" type="qcurve" smooth="yes"/>
<point x="635" y="1091"/>
<point x="774" y="990"/>
<point x="802" y="911" type="qcurve" smooth="yes"/>
<point x="808" y="894"/>
<point x="836" y="874"/>
<point x="854" y="874" type="qcurve" smooth="yes"/>
<point x="945" y="874"/>
<point x="1071" y="747"/>
<point x="1071" y="656" type="qcurve" smooth="yes"/>
<point x="1071" y="656"/>
<point x="1071" y="655" type="qcurve" smooth="yes"/>
<point x="1071" y="577"/>
<point x="1022" y="517" type="qcurve" smooth="yes"/>
<point x="1011" y="502"/>
<point x="1011" y="483" type="qcurve" smooth="yes"/>
<point x="1011" y="461"/>
<point x="1042" y="429"/>
<point x="1065" y="429" type="qcurve" smooth="yes"/>
<point x="1089" y="429"/>
<point x="1106" y="448" type="qcurve" smooth="yes"/>
<point x="1180" y="538"/>
<point x="1180" y="654" type="qcurve" smooth="yes"/>
<point x="1180" y="655"/>
<point x="1180" y="656"/>
<point x="1180" y="656" type="qcurve"/>
<point x="1180" y="657"/>
<point x="1180" y="658" type="qcurve" smooth="yes"/>
<point x="1180" y="720"/>
<point x="1135" y="833"/>
<point x="1058" y="918"/>
<point x="951" y="974"/>
<point x="891" y="980" type="qcurve"/>
<point x="845" y="1079"/>
<point x="660" y="1201"/>
<point x="547" y="1201" type="qcurve"/>
<point x="547" y="1201"/>
<point x="546" y="1200"/>
<point x="546" y="1200" type="qcurve" smooth="yes"/>
</contour>
<contour>
<point x="56" y="1159"/>
<point x="72" y="1143" type="qcurve" smooth="yes"/>
<point x="107" y="1108" type="line" smooth="yes"/>
<point x="142" y="1074"/>
<point x="212" y="1003"/>
<point x="212" y="1003" type="qcurve"/>
<point x="165" y="918"/>
<point x="165" y="821" type="qcurve" smooth="yes"/>
<point x="165" y="818"/>
<point x="165" y="814" type="qcurve"/>
<point x="99" y="783"/>
<point x="20" y="660"/>
<point x="20" y="584" type="qcurve" smooth="yes"/>
<point x="20" y="583"/>
<point x="20" y="582"/>
<point x="20" y="582" type="qcurve"/>
<point x="20" y="530"/>
<point x="60" y="438"/>
<point x="128" y="370"/>
<point x="221" y="330"/>
<point x="272" y="330" type="qcurve" smooth="yes"/>
<point x="273" y="330"/>
<point x="274" y="330" type="qcurve" smooth="yes"/>
<point x="854" y="330" type="line" smooth="yes"/>
<point x="869" y="330"/>
<point x="884" y="331" type="qcurve"/>
<point x="1015" y="201" type="line" smooth="yes"/>
<point x="1031" y="184"/>
<point x="1054" y="184" type="qcurve" smooth="yes"/>
<point x="1061" y="184"/>
<point x="1068" y="186" type="qcurve" smooth="yes"/>
<point x="1086" y="190"/>
<point x="1109" y="219"/>
<point x="1109" y="238" type="qcurve" smooth="yes"/>
<point x="1109" y="261"/>
<point x="1092" y="278" type="qcurve" smooth="yes"/>
<point x="943" y="426" type="line"/>
<point x="941" y="428" type="line"/>
<point x="322" y="1047" type="line"/>
<point x="320" y="1050"/>
<point x="317" y="1053" type="qcurve"/>
<point x="275" y="1095" type="line" smooth="yes"/>
<point x="233" y="1137"/>
<point x="149" y="1220"/>
<point x="149" y="1220" type="qcurve"/>
<point x="133" y="1236"/>
<point x="88" y="1236"/>
<point x="56" y="1204"/>
</contour>
<contour>
<point x="274" y="819"/>
<point x="274" y="820"/>
<point x="274" y="820" type="qcurve" smooth="yes"/>
<point x="274" y="873"/>
<point x="294" y="922" type="qcurve"/>
<point x="777" y="439" type="line"/>
<point x="274" y="439" type="line" smooth="yes"/>
<point x="213" y="439"/>
<point x="129" y="523"/>
<point x="129" y="584" type="qcurve" smooth="yes"/>
<point x="129" y="635"/>
<point x="189" y="712"/>
<point x="236" y="724" type="qcurve" smooth="yes"/>
<point x="254" y="729"/>
<point x="277" y="758"/>
<point x="277" y="777" type="qcurve" smooth="yes"/>
<point x="277" y="781"/>
<point x="277" y="784" type="qcurve" smooth="yes"/>
<point x="274" y="802"/>
<point x="274" y="819" type="qcurve"/>
</contour>
</outline>
<lib>
<dict>
<key>com.schriftgestaltung.Glyphs.lastChange</key>
<string>2024-02-27 18:49:53 +0000</string>
</dict>
</lib>
</glyph>
|
cascadia-code/sources/nerdfonts/full/processed/octicons.ufo/glyphs/cloud-offline.glif/0
|
{
"file_path": "cascadia-code/sources/nerdfonts/full/processed/octicons.ufo/glyphs/cloud-offline.glif",
"repo_id": "cascadia-code",
"token_count": 2651
}
| 600 |
# Causica Documentation
```{eval-rst}
.. toctree::
:hidden:
Home <self>
```
```{eval-rst}
.. toctree::
:hidden:
:maxdepth: 2
```
This is the documentation for Causica, a Python package for causal discovery and inference.
|
causica/docs/src/index.md/0
|
{
"file_path": "causica/docs/src/index.md",
"repo_id": "causica",
"token_count": 93
}
| 601 |
from typing import Iterable
import torch
from torch.utils.data import Dataset
from causica.data_generation.generate_data import sample_dataset
from causica.data_generation.samplers.sem_sampler import SEMSampler
from causica.datasets.causal_dataset import CausalDataset
from causica.distributions.transforms import TensorToTensorDictTransform
class CausalMetaset(Dataset):
"""A metaset (dataset of datasets) that returns datasets from SEM samples.
SEM samples are returned as a CausalDataset object.
See Also:
CausalDataset: For a description of the format of the samples.
"""
def __init__(
self,
sem_sampler: SEMSampler,
sample_dataset_size: int,
dataset_size: int,
num_interventions: int = 0,
num_intervention_samples: int = 1000,
num_sems: int = 0,
sample_interventions: bool = False,
sample_counterfactuals: bool = False,
):
"""
Args:
sem_sampler: The sampler for SEMs
sample_dataset_size: The size of the dataset to sample from the SEM
dataset_size: The size of this dataset, if not supplied it will be infinitely long
It is useful to set this value to a finite size so it can be used with `ChainDataset`,
which relies on the iterator terminating to chain the next one.
num_interventions: The number of interventions to sample per dataset. If 0, no interventions are sampled.
num_intervention_samples: The number of interventional samples to sample.
num_sems: The number of sems to sample the data from. If 0, each data sample is generated from a new SEM.
sample_interventions: Whether to sample interventions.
sample_counterfactuals: Whether to sample counterfactuals.
"""
self.sem_sampler = sem_sampler
self.sample_dataset_size = torch.Size([sample_dataset_size])
self.dataset_size = dataset_size
self.num_interventions = num_interventions
self.num_intervention_samples = num_intervention_samples
self.num_sems = num_sems
self.sample_interventions = sample_interventions
self.sample_counterfactuals = sample_counterfactuals
self.sems = [sem_sampler.sample() for _ in range(num_sems)]
self.td_to_tensor_transform = TensorToTensorDictTransform(self.sem_sampler.shapes_dict)
def __len__(self) -> int:
"""Return the size of the dataset."""
return self.dataset_size
def __getitem__(self, index) -> CausalDataset:
"""Return a sample from the dataset.
See Also:
CausalDataset: For a description of the format of the samples.
"""
if index >= len(self) or index < -len(self):
raise IndexError(f"index {index} out of range for dataset of size {len(self)}")
return self._sample(index)
def _sample(self, index: int = 0) -> CausalDataset:
"""Sample a new dataset and returns the data, graph and potentially interventions and counterfactuals."""
if self.num_sems > 0:
sem = self.sems[index % self.num_sems]
else:
sem = self.sem_sampler.sample()
return sample_dataset(
sem,
self.sample_dataset_size,
self.num_interventions,
self.num_intervention_samples,
self.sample_interventions,
self.sample_counterfactuals,
)
|
causica/src/causica/datasets/synthetic_dataset.py/0
|
{
"file_path": "causica/src/causica/datasets/synthetic_dataset.py",
"repo_id": "causica",
"token_count": 1408
}
| 602 |
from typing import Optional
import torch
import torch.distributions as td
import torch.nn.functional as F
from torch import nn
from torch.distributions import OneHotCategorical
from causica.distributions.noise.noise import Noise, NoiseModule
class CategoricalNoise(OneHotCategorical, Noise):
def __init__(self, delta_logits: torch.Tensor, base_logits: torch.Tensor):
"""
A Categorical distribution with parameters defined by base_logits and self.delta_logits (predictions from an NN).
Args:
delta_logits: Tensor with shape [sample_shape, event_shape]
base_logits: Tensor with shape [event_shape] where event_shape shows the number of categories for this node
"""
self.delta_logits = delta_logits
super().__init__(logits=base_logits + delta_logits, validate_args=False)
def sample_to_noise(self, samples: torch.Tensor) -> torch.Tensor:
"""
Transform from the sample observations to corresponding noise variables.
This will draw from the noise posterior given the observations
A posterior sample of the Gumbel noise random variables given observation x and probabilities
`self.base_logits + logit_deltas`.
This methodology is described in https://arxiv.org/pdf/1905.05824.pdf.
See https://cmaddis.github.io/gumbel-machinery for derivation of Gumbel posteriors.
For a derivation of this exact algorithm using softplus, see https://www.overleaf.com/8628339373sxjmtvyxcqnx.
Args:
samples: Tensor of shape sample_shape + batch_shape + event_shape
Returns:
The generated samples with shape sample_shape + batch_shape + event_shape
"""
device = self.delta_logits.device
dist = td.Gumbel(torch.tensor(0.0, device=device), torch.tensor(1.0, device=device))
top_sample = dist.sample(samples.shape[:-1] + (1,)) + self.logits.logsumexp(-1, keepdim=True)
lower_samples = dist.sample(samples.shape) + self.logits
lower_samples[samples == 1] = float("inf")
return top_sample - F.softplus(top_sample - lower_samples) - self.delta_logits
def noise_to_sample(self, noise: torch.Tensor) -> torch.Tensor:
"""
Generate samples using the given exogenous noise.
Args:
noise: noise variable with shape sample_shape + batch_shape.
Returns:
The generated samples with shape sample_shape + batch_shape + event_shape
"""
x = self.delta_logits + noise
maxes = torch.max(x, dim=-1, keepdim=True)
return (x >= maxes[0]).float()
class CategoricalNoiseModule(NoiseModule[CategoricalNoise]):
"""Represents a CategoricalNoise distribution with learnable logits."""
def __init__(self, num_classes: int, init_base_logits: torch.Tensor | None = None):
"""
Args:
num_classes: Number of classes.
init_base_logits: Initial base logits.
"""
super().__init__()
if init_base_logits is not None:
assert init_base_logits.ndim == 1
assert init_base_logits.shape[0] == num_classes
else:
init_base_logits = torch.zeros(num_classes)
self.base_logits = nn.Parameter(init_base_logits)
def forward(self, x: Optional[torch.Tensor] = None) -> CategoricalNoise:
if x is None:
x = torch.zeros_like(self.base_logits)
return CategoricalNoise(delta_logits=x, base_logits=self.base_logits)
|
causica/src/causica/distributions/noise/categorical.py/0
|
{
"file_path": "causica/src/causica/distributions/noise/categorical.py",
"repo_id": "causica",
"token_count": 1416
}
| 603 |
import torch
from tensordict import TensorDict
from causica.functional_relationships.functional_relationships import FunctionalRelationships
class DoFunctionalRelationships(FunctionalRelationships):
"""
A `FunctionalRelationship` that one can "do", i.e. condition nodes and cut the links to their parents.
The do intervention can be a single intervention (i.e. empty batch shape) or a batch of interventions. The batch
shape of the do tensordict batches the interventions and the original functional relationship is broadcast across
the batch shape of the do tensordict.
"""
def __init__(self, func: FunctionalRelationships, do: TensorDict, submatrix: torch.Tensor) -> None:
"""
Args:
func: The unintervened functional relationships
do: the nodes on which to intervene. If the do has a batch shape, then the functional relationship will be
broadcast to that batch shape.
submatrix: the submatrix that the unintervened nodes represent in the larger graph
"""
if not all(val.ndim >= 1 for val in do.values()):
raise ValueError("Intervention is only supported for at least vector valued interventions")
if len({val.ndim for val in do.values()}) > 1:
raise ValueError("Intervention must have the same number of dimensions for all variables")
new_shapes = {key: shape for key, shape in func.shapes.items() if key not in do.keys()}
super().__init__(new_shapes, batch_shape=do.batch_size + func.batch_shape)
self.func = func
self.do = do # dict of key to vectors
self.submatrix = submatrix
self.do_nodes_mask = torch.tensor(
[(name in self.do.keys()) for name in self.func.shapes.keys()], dtype=torch.bool
)
def pad_intervened_graphs(self, graphs: torch.Tensor) -> torch.Tensor:
"""
Pad the intervened graph with the unintervened nodes.
Args:
graphs: Weighted adjacency matrix, size batch_size_g + (do_func_n, do_func_n)
Returns:
A tensor of shape batch_shape_g + (func_n, func_n)
"""
num_nodes = self.func.tensor_to_td.num_keys
target_shape = graphs.shape[:-2] + (num_nodes, num_nodes)
output_graphs = torch.zeros(target_shape, dtype=graphs.dtype, device=graphs.device)
assign_submatrix(output_graphs, graphs, ~self.do_nodes_mask, ~self.do_nodes_mask)
assign_submatrix(output_graphs, self.submatrix, self.do_nodes_mask, ~self.do_nodes_mask)
return output_graphs
def forward(self, samples: TensorDict, graphs: torch.Tensor) -> TensorDict:
"""
Run forward on the underlying functional relationship with the intervened nodes filled in.
The samples are expected to have a batch shape in order: samples, functions, graphs.
Args:
samples: Batched inputs, size batch_size_x + batch_size_f + batch_shape_g + (concatenated_shape).
graphs: Weighted adjacency matrix, size batch_size_g + (n, n)
Returns:
A tensor of shape batch_shape_x + batch_size_f + batch_shape_g + (concatenated_shape)
"""
# add the expanded intervention values to the samples
batch_shape_g = graphs.shape[:-2]
do = self.do
if do.batch_dims > 0:
do = do[(...,) + (None,) * len(batch_shape_g)]
expanded_do = do.expand(*samples.batch_size)
samples_with_do = samples.clone(False).update(expanded_do)
# create the full graphs
graphs = self.pad_intervened_graphs(graphs)
forward = self.func.forward(samples_with_do, graphs)
return forward.select(*(key for key in forward.keys() if key not in self.do.keys()), inplace=True)
def assign_submatrix(A: torch.Tensor, B: torch.Tensor, x_mask: torch.Tensor, y_mask: torch.Tensor) -> None:
"""
Assign `B` to a submatrix of `A`. The matrix `A` is changed in place.
Args:
A: tensor with >=2 dimensions `[*b, x, y]`
B: tensor with dimensions `[*b, x', y']` where `x'<=x` and `y'<=y`
x_mask: boolean tensor of shape `[*b, x]` indicating the rows of A that are to be updated. The sum
of x_mask should equal `x'`
y_mask: boolean tensor of shape `[*b, y]` indicating the columns of A that are to be updated. The sum
of y_mask should equal `y'`
"""
assign_mask = torch.ones_like(A, dtype=torch.bool)
assign_mask[..., ~x_mask, :] = 0
assign_mask[..., :, ~y_mask] = 0
A[assign_mask] = B.flatten()
def create_do_functional_relationship(
interventions: TensorDict, func: FunctionalRelationships, graph: torch.Tensor
) -> tuple[DoFunctionalRelationships, torch.Tensor]:
"""
Given a set of interventions, `FunctionalRelationships` and a graph, create a `DoFunctionalRelationships` and an intervened graph.
Args:
interventions: the nodes and their intervention values
func: the functional relationship of the unintervened SEM
graph: the unintervened graph shape: [..., num_nodes, num_nodes]
Return:
A tuple with the intervened functional relationship and the intervened graph
"""
if func.batch_shape == torch.Size((1,)):
func.batch_shape = torch.Size()
if interventions.ndim > 1:
raise ValueError("Interventions must be at most a single batch of interventions")
if graph.ndim > 3:
raise ValueError("Graph must be at most a single batch of graphs")
if interventions.batch_dims > 0 and len(func.batch_shape) > 0:
raise ValueError("Cannot intervene on a batch of interventions and a batch of functional relationships")
node_names = list(func.shapes.keys())
do_nodes_mask = torch.zeros(len(node_names), dtype=torch.bool)
for i, name in enumerate(node_names):
if name in interventions.keys():
do_nodes_mask[i] = 1
do_graph = graph[..., ~do_nodes_mask, :][..., :, ~do_nodes_mask]
submatrix = graph[..., do_nodes_mask, :][..., :, ~do_nodes_mask]
# Expanding graph if interventions or functions are batched
if do_graph.ndim == 2 and interventions.batch_dims > 0 or len(func.batch_shape) > 0:
do_graph = do_graph.unsqueeze(0)
submatrix = submatrix.unsqueeze(0)
# Expanding interventions if graph is batched and functions are not
if do_graph.ndim == 3 and interventions.batch_dims == 0 and len(func.batch_shape) == 0:
interventions = interventions.unsqueeze(0)
return DoFunctionalRelationships(func, interventions, submatrix), do_graph
|
causica/src/causica/functional_relationships/do_functional_relationships.py/0
|
{
"file_path": "causica/src/causica/functional_relationships/do_functional_relationships.py",
"repo_id": "causica",
"token_count": 2537
}
| 604 |
import logging
from causica.lightning.callbacks import MLFlowSaveConfigCallback
from causica.lightning.cli import LightningCLIWithDefaults
from causica.lightning.data_modules.deci_data_module import DECIDataModule
from causica.lightning.modules.deci_module import DECIModule
if __name__ == "__main__":
# Set Azure logging to warning to prevent spam from HTTP requests
logging.getLogger("azure").setLevel(logging.WARNING)
cli = LightningCLIWithDefaults(
DECIModule,
DECIDataModule,
run=False,
save_config_callback=MLFlowSaveConfigCallback,
subclass_mode_model=True,
subclass_mode_data=True,
)
# This automatically resumes training if it finds a "last" checkpoint in one of the output directories of the
# ModelCheckpoint callbacks.
# See https://lightning.ai/docs/pytorch/stable/common/trainer.html#fit for more information.
cli.trainer.fit(cli.model, datamodule=cli.datamodule, ckpt_path="last")
cli.trainer.test(cli.model, datamodule=cli.datamodule)
|
causica/src/causica/lightning/main.py/0
|
{
"file_path": "causica/src/causica/lightning/main.py",
"repo_id": "causica",
"token_count": 369
}
| 605 |
import numpy as np
import torch
def fill_triangular(vec: torch.Tensor, upper: bool = False) -> torch.Tensor:
"""
Args:
vec: A tensor of shape (..., n(n-1)/2)
upper: whether to fill the upper or lower triangle
Returns:
An array of shape (..., n, n), where the strictly upper (lower) triangle is filled from vec
with zeros elsewhere
"""
num_nodes = num_lower_tri_elements_to_n(vec.shape[-1])
idxs = torch.triu_indices(num_nodes, num_nodes, offset=1, device=vec.device)
output = torch.zeros(vec.shape[:-1] + (num_nodes, num_nodes), device=vec.device)
output[..., idxs[0, :], idxs[1, :]] = vec
return output if upper else output.transpose(-1, -2)
def unfill_triangular(mat: torch.Tensor, upper: bool = False) -> torch.Tensor:
"""
Fill a vector of length n(n-1)/2 with elements from the strictly upper(lower) triangle.
Args:
mat: A tensor of shape (..., n, n)
upper: whether to fill from the upper triangle
Returns:
A vector of shape (..., n(n-1)/2), filled from the upper triangle
"""
num_nodes = mat.shape[-1]
idxs = torch.triu_indices(num_nodes, num_nodes, offset=1, device=mat.device)
matrix = mat if upper else mat.transpose(-2, -1)
return matrix[..., idxs[0, :], idxs[1, :]]
def num_lower_tri_elements_to_n(x: int) -> int:
"""
Calculate the size of the matrix from the number of strictly lower triangular elements.
We have x = n(n - 1) / 2 for some n
n² - n - 2x = 0
so n = (1 + √(1 + 8x)) / 2
"""
val = int(np.sqrt(1 + 8 * x) + 1) // 2
if val * (val - 1) != 2 * x:
raise ValueError("Invalid number of lower triangular elements")
return val
|
causica/src/causica/triangular_transformations.py/0
|
{
"file_path": "causica/src/causica/triangular_transformations.py",
"repo_id": "causica",
"token_count": 699
}
| 606 |
import numpy as np
import torch
from causica.graph.dag_constraint import calculate_dagness
def test_calculate_dagness():
dag = torch.Tensor([[0, 0, 1], [0, 0, 1], [0, 0, 0]])
assert calculate_dagness(dag) == 0
dag_one_cycle = torch.Tensor([[0, 1, 0], [0, 0, 1], [1, 0, 0]])
dag_two_cycle = torch.Tensor([[1, 1, 0], [0, 0, 1], [1, 0, 0]])
assert calculate_dagness(dag_one_cycle) > 0
assert calculate_dagness(dag_two_cycle) > 0
assert calculate_dagness(dag_one_cycle) < calculate_dagness(dag_two_cycle)
self_loop_non_dag = torch.Tensor([[1, 0], [0, 1]])
np.testing.assert_almost_equal(
calculate_dagness(self_loop_non_dag),
2 * np.exp(1) - 2,
decimal=5,
)
|
causica/test/test_dag_constraint.py/0
|
{
"file_path": "causica/test/test_dag_constraint.py",
"repo_id": "causica",
"token_count": 339
}
| 607 |
import math
import torch
from torch import nn
from torch.nn.modules.utils import _pair
from cliffordlayers.nn.functional.cliffordg3conv import clifford_g3convnd
def get_clifford_left_kernel(M, w, flatten=True):
"""
Obtains the matrix that computes the geometric product from the left.
When the output is flattened, it can be used to apply a fully connected
layer on the multivectors.
Args:
M (Tensor): Cayley table that defines the geometric relation.
w (Tensor): Input tensor with shape (o, i, c) where o is the number of output channels,
i is the number of input channels, and c is the number of blades.
flatten (bool, optional): If True, the resulting matrix will be reshaped for subsequent
fully connected operations. Defaults to True.
"""
o, i, c = w.size()
k = torch.einsum("ijk, pqi->jpkq", M, w)
if flatten:
k = k.reshape(o * c, i * c)
return k
def get_clifford_right_kernel(M, w, flatten=True):
"""
Obtains the matrix that computes the geometric product from the right.
When the output is flattened, it can be used to apply a fully connected
layer on the multivectors.
Args:
M (Tensor): Cayley table that defines the geometric relation.
w (Tensor): Input tensor with shape (o, i, c) where o is the number of output channels,
i is the number of input channels, and c is the number of blades.
flatten (bool, optional): If True, the resulting matrix will be reshaped for subsequent
fully connected operations. Defaults to True.
"""
o, i, c = w.size()
k = torch.einsum("ijk, pqk->jpiq", M, w)
if flatten:
k = k.reshape(o * c, i * c)
return k
class PGAConjugateLinear(nn.Module):
"""
Linear layer that applies the PGA conjugation to the input.
Args:
in_features (int): Number of input features.
out_features (int): Number of output features.
algebra (Algebra): Algebra object that defines the geometric product.
input_blades (tuple): Nonnegative blades of the input multivectors.
action_blades (tuple, optional): Blades of the action. Defaults to (0, 5, 6, 7, 8, 9, 10, 15),
which encodes rotation and translation.
"""
def __init__(
self,
in_features,
out_features,
algebra,
input_blades,
action_blades=(0, 5, 6, 7, 8, 9, 10, 15),
):
super().__init__()
assert torch.all(algebra.metric == torch.tensor([0, 1, 1, 1]))
self.input_blades = input_blades
self.in_features = in_features
self.out_features = out_features
self.algebra = algebra
self.action_blades = action_blades
self.n_action_blades = len(action_blades)
self._action = nn.Parameter(torch.empty(out_features, in_features, self.n_action_blades))
self.weight = nn.Parameter(torch.empty(out_features, in_features))
self.embed_e0 = nn.Parameter(torch.zeros(in_features, 1))
self.inverse = algebra.reverse
self.reset_parameters()
def reset_parameters(self):
# Init the rotation parts uniformly.
torch.nn.init.uniform_(self._action[..., 0], -1, 1)
torch.nn.init.uniform_(self._action[..., 4:7], -1, 1)
# Init the translation parts with zeros.
torch.nn.init.zeros_(self._action[..., 1:4])
torch.nn.init.zeros_(self._action[..., 7])
norm = self.algebra.norm(self.algebra.embed(self._action.data, self.action_blades))
assert torch.allclose(norm[..., 1:], torch.tensor(0.0), atol=1e-3)
norm = norm[..., :1]
self._action.data = self._action.data / norm
torch.nn.init.kaiming_uniform_(self.weight, a=math.sqrt(5))
@property
def action(self):
return self.algebra.embed(self._action, self.action_blades)
def forward(self, input):
M = self.algebra.cayley
k = self.action
k_ = self.inverse(k)
x = self.algebra.embed(input, self.input_blades)
x[..., 14:15] = self.embed_e0
# x[..., 14:15] = 1
k_l = get_clifford_left_kernel(M, k, flatten=False)
k_r = get_clifford_right_kernel(M, k_, flatten=False)
x = torch.einsum("oi,poqi,qori,bir->bop", self.weight, k_r, k_l, x)
x = self.algebra.get(x, self.input_blades)
return x
class MultiVectorAct(nn.Module):
"""
A module to apply multivector activations to the input.
Args:
channels (int): Number of channels in the input.
algebra: The algebra object that defines the geometric product.
input_blades (list, tuple): The nonnegative input blades.
kernel_blades (list, tuple, optional): The blades that will be used to compute the activation. Defaults to all input blades.
agg (str, optional): The aggregation method to be used. Options include "linear", "sum", and "mean". Defaults to "linear".
"""
def __init__(self, channels, algebra, input_blades, kernel_blades=None, agg="linear"):
super().__init__()
self.algebra = algebra
self.input_blades = tuple(input_blades)
if kernel_blades is not None:
self.kernel_blades = tuple(kernel_blades)
else:
self.kernel_blades = self.input_blades
if agg == "linear":
self.conv = nn.Conv1d(channels, channels, kernel_size=len(self.kernel_blades), groups=channels)
self.agg = agg
def forward(self, input):
v = self.algebra.embed(input, self.input_blades)
if self.agg == "linear":
v = v * torch.sigmoid(self.conv(v[..., self.kernel_blades]))
elif self.agg == "sum":
v = v * torch.sigmoid(v[..., self.kernel_blades].sum(dim=-1, keepdim=True))
elif self.agg == "mean":
v = v * torch.sigmoid(v[..., self.kernel_blades].mean(dim=-1, keepdim=True))
else:
raise ValueError(f"Aggregation {self.agg} not implemented.")
v = self.algebra.get(v, self.input_blades)
return v
class _CliffordG3ConvNd(nn.Module):
"""
A Clifford geometric algebra convolutional layer for N-dimensional fields where the features are vectors in G3.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
kernel_size (int, optional): Size of the convolutional kernel. Defaults to 1.
stride (int, optional): Stride of the convolution operation. Defaults to 1.
padding (int, optional): Padding added to both sides of the input. Defaults to 0.
dilation (int, optional): Dilation rate of the kernel. Defaults to 1.
groups (int, optional): Number of blocked connections from input channels to output channels. Defaults to 1.
transposed (bool, optional): If True, performs a transposed convolution. Defaults to False.
bias (bool, optional): If True, adds a bias term to the output. Defaults to True.
"""
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 1,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
transposed: bool = False,
bias: bool = True,
):
super().__init__()
if in_channels % groups != 0:
raise ValueError("in_channels must be divisible by groups")
if out_channels % groups != 0:
raise ValueError("out_channels must be divisible by groups")
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
if transposed:
self.weights = nn.ParameterList(
[nn.Parameter(torch.empty(in_channels, out_channels // groups, *kernel_size)) for _ in range(4)]
)
else:
self.weights = nn.ParameterList(
[nn.Parameter(torch.empty(out_channels, in_channels // groups, *kernel_size)) for _ in range(4)]
)
if bias:
self.bias = nn.ParameterList([nn.Parameter(torch.empty(out_channels)) for _ in range(3)])
else:
self.register_parameter("bias", None)
self.scale_param = nn.Parameter(torch.Tensor(self.weights[0].shape))
self.zero_kernel = nn.Parameter(torch.zeros(self.weights[0].shape), requires_grad=False)
self.reset_parameters()
def reset_parameters(self):
for weight in self.weights:
nn.init.kaiming_uniform_(weight, a=math.sqrt(5))
nn.init.kaiming_uniform_(self.scale_param, a=math.sqrt(5))
self.weights.append(self.scale_param)
self.weights.append(self.zero_kernel)
if self.bias is not None:
for i, bias in enumerate(self.bias):
fan_in, _ = nn.init._calculate_fan_in_and_fan_out(self.weights[i])
if fan_in != 0:
bound = 1 / math.sqrt(fan_in)
nn.init.uniform_(bias, -bound, bound)
def extra_repr(self):
s = "{in_channels}, {out_channels}, kernel_size={kernel_size}" ", stride={stride}"
if self.padding != (0,) * len(self.padding):
s += ", padding={padding}"
if self.dilation != (1,) * len(self.dilation):
s += ", dilation={dilation}"
if self.groups != 1:
s += ", groups={groups}"
if self.bias is None:
s += ", bias=False"
return s.format(**self.__dict__)
class CliffordG3Conv2d(_CliffordG3ConvNd):
"""
2D convolutional layer where the features are vectors in G3.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
kernel_size (int, optional): Size of the convolutional kernel. Defaults to 1.
stride (int, optional): Stride of the convolution operation. Defaults to 1.
padding (int or str, optional): Padding added to both sides of the input or padding mode. Defaults to 0.
dilation (int, optional): Dilation rate of the kernel. Defaults to 1.
groups (int, optional): Number of blocked connections from input channels to output channels. Defaults to 1.
bias (bool, optional): If True, adds a bias term to the output. Defaults to False.
"""
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 1,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
bias: bool = False,
):
kernel_size_ = _pair(kernel_size)
stride_ = _pair(stride)
padding_ = padding if isinstance(padding, str) else _pair(padding)
dilation_ = _pair(dilation)
super().__init__(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size_,
stride=stride_,
padding=padding_,
dilation=dilation_,
groups=groups,
transposed=False,
bias=bias,
)
def forward(self, input):
x = torch.cat([input[..., 0], input[..., 1], input[..., 2]], dim=1)
x = clifford_g3convnd(
x,
self.weights,
self.bias,
self.stride,
self.padding,
self.dilation,
self.groups,
)
e_0 = x[:, : self.out_channels, :, :]
e_1 = x[:, self.out_channels : self.out_channels * 2, :, :]
e_2 = x[:, self.out_channels * 2 : self.out_channels * 3, :, :]
return torch.stack([e_0, e_1, e_2], dim=-1)
class CliffordG3ConvTranspose2d(_CliffordG3ConvNd):
"""
2D transposed convolutional layer where the features are vectors in G3.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
kernel_size (int, optional): Size of the convolutional kernel. Defaults to 1.
stride (int, optional): Stride of the convolution operation. Defaults to 1.
padding (int or str, optional): Padding added to both sides of the input or padding mode. Defaults to 0.
dilation (int, optional): Dilation rate of the kernel. Defaults to 1.
groups (int, optional): Number of blocked connections from input channels to output channels. Defaults to 1.
bias (bool, optional): If True, adds a bias term to the output. Defaults to False.
"""
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 1,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
bias: bool = False,
):
kernel_size_ = _pair(kernel_size)
stride_ = _pair(stride)
padding_ = padding if isinstance(padding, str) else _pair(padding)
dilation_ = _pair(dilation)
super().__init__(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size_,
stride=stride_,
padding=padding_,
dilation=dilation_,
groups=groups,
transposed=True,
bias=bias,
)
def forward(self, input):
x = torch.cat([input[..., 0], input[..., 1], input[..., 2]], dim=1)
x = clifford_g3convnd(
x,
self.weights,
self.bias,
self.stride,
self.padding,
self.dilation,
self.groups,
transposed=True,
)
e_0 = x[:, : self.out_channels, :, :]
e_1 = x[:, self.out_channels : self.out_channels * 2, :, :]
e_2 = x[:, self.out_channels * 2 : self.out_channels * 3, :, :]
return torch.stack([e_0, e_1, e_2], dim=-1)
class CliffordG3LinearVSiLU(nn.Module):
"""
A module that applies the vector SiLU using a linear combination to vectors in G3.
Args:
channels (int): Number of channels in the input.
"""
def __init__(self, channels):
super().__init__()
self.conv = nn.Conv3d(channels, channels, (1, 1, 3), groups=channels)
def forward(self, input):
return input * torch.sigmoid(self.conv(input))
class CliffordG3SumVSiLU(nn.Module):
"""
A module that applies the vector SiLU using vector sum to vectors in G3.
"""
def __init__(self):
super().__init__()
def forward(self, input):
return torch.sigmoid(input.sum(-1, keepdim=True)) * input
class CliffordG3MeanVSiLU(nn.Module):
"""
A module that applies the vector SiLU using vector mean to vectors in G3.
"""
def __init__(self):
super().__init__()
def forward(self, input):
return torch.sigmoid(input.mean(-1, keepdim=True)) * input
class CliffordG3GroupNorm(nn.Module):
"""
A module that applies group normalization to vectors in G3.
Args:
num_groups (int): Number of groups to normalize over.
num_features (int): Number of features in the input.
num_blades (int): Number of blades in the input.
scale_norm (bool, optional): If True, the output is scaled by the norm of the input. Defaults to False.
"""
def __init__(self, num_groups, num_features, num_blades, scale_norm=False):
super().__init__()
self.weight = nn.Parameter(torch.ones(num_features))
self.bias = nn.Parameter(torch.zeros(num_features, num_blades))
self.num_groups = num_groups
self.scale_norm = scale_norm
self.num_blades = num_blades
self.num_features = num_features
def forward(self, x):
N, C, *D, I = x.size()
G = self.num_groups
assert C % G == 0
x = x.view(N, G, -1, I)
mean = x.mean(-2, keepdim=True)
x = x - mean
if self.scale_norm:
norm = x.norm(dim=-1, keepdim=True).mean(dim=-2, keepdims=True)
x = x / norm
x = x.view(len(x), self.num_features, -1, self.num_blades)
return (x * self.weight[None, :, None, None] + self.bias[None, :, None]).view(N, C, *D, I)
|
cliffordlayers/cliffordlayers/nn/modules/gcan.py/0
|
{
"file_path": "cliffordlayers/cliffordlayers/nn/modules/gcan.py",
"repo_id": "cliffordlayers",
"token_count": 7238
}
| 608 |
# Functions
::: cliffordlayers.nn.functional.batchnorm
::: cliffordlayers.nn.functional.groupnorm
|
cliffordlayers/docs/reference/functional.md/0
|
{
"file_path": "cliffordlayers/docs/reference/functional.md",
"repo_id": "cliffordlayers",
"token_count": 33
}
| 609 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Tuple
import torch
from cliffordlayers.nn.functional.groupnorm import (
clifford_group_norm,
complex_group_norm,
)
from cliffordlayers.nn.modules.groupnorm import (
CliffordGroupNorm1d,
CliffordGroupNorm2d,
CliffordGroupNorm3d,
ComplexGroupNorm1d,
)
from cliffordlayers.signature import CliffordSignature
def test_clifford_instance_norm1d_vs_complex_instance_norm():
"""Test Clifford1d groupnorm function against complex groupnorm function using num_groups=1 and g = [-1]."""
x = torch.randn(4, 16, 8, 2)
x_norm_clifford = clifford_group_norm(
x,
CliffordSignature(
[
-1,
]
).n_blades,
num_groups=1,
)
x_norm_complex = complex_group_norm(
torch.view_as_complex(x),
num_groups=1,
)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_clifford_layer_norm1d_vs_complex_layer_norm():
"""Test Clifford1d groupnorm function against complex groupnorm function using num_groups=channels and g = [-1]."""
channels = 16
x = torch.randn(4, channels, 8, 2)
x_norm_clifford = clifford_group_norm(
x,
CliffordSignature(
[
-1,
]
).n_blades,
num_groups=channels,
)
x_norm_complex = complex_group_norm(
torch.view_as_complex(x),
num_groups=channels,
)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_clifford_groupnorm1d_vs_complex_groupnorm_scaled():
"""Test Clifford1d groupnorm function against complex groupnorm function using num_groups=2 and g = [-1],
where an affine transformation is applied.
"""
channels = 16
num_groups = 2
x = torch.randn(4, channels, 8, 2)
w = torch.randn(2, 2, int(channels / num_groups))
b = torch.randn(2, int(channels / num_groups))
x_norm_clifford = clifford_group_norm(
x,
CliffordSignature(
[
-1,
]
).n_blades,
num_groups=num_groups,
weight=w,
bias=b,
)
x_norm_complex = complex_group_norm(
torch.view_as_complex(x),
num_groups=num_groups,
weight=w,
bias=b,
)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_clifford_groupnorm1d_vs_complex_groupnorm_scaled_validation():
"""Test Clifford1d groupnorm function against complex groupnorm function in the validation setting using num_groups=2 and g = [-1],
where an affine transformation is applied.
"""
channels = 16
num_groups = 2
x = torch.randn(4, channels, 8, 2)
w = torch.randn(2, 2, int(channels / num_groups))
b = torch.randn(2, int(channels / num_groups))
x_norm_clifford = clifford_group_norm(
x,
CliffordSignature(
[
-1,
]
).n_blades,
num_groups=num_groups,
weight=w,
bias=b,
training=False,
)
x_norm_complex = complex_group_norm(
torch.view_as_complex(x),
num_groups=num_groups,
weight=w,
bias=b,
training=False,
)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_complex_groupnorm_valid():
"""Test complex group norm implementation for num_groups=8:
validation setting where running_mean and running_cov are provided
is tested against training setting where exactly this running_mean and running_cov should be calculated.
"""
channels = 16
num_groups = 8
x = torch.randn(1, channels, 64, 2)
B, C, *D, I = x.shape
# Now reshape x as done in the group norm routine and calculate mean and covariance accordingly.
x_r = x.view(1, int(B * C / num_groups), num_groups, *D, I)
B, C, *D, I = x_r.shape
B_dim, C_dim, *D_dims, I_dim = range(len(x_r.shape))
shape = 1, C, *([1] * (x_r.dim() - 3))
mean = x_r.mean(dim=(B_dim, *D_dims))
x_mean = x_r - mean.reshape(*shape, I)
X = x_mean.permute(C_dim, I_dim, B_dim, *D_dims).flatten(2, -1)
cov = torch.matmul(X, X.transpose(-1, -2)) / X.shape[-1]
assert mean.shape == (int(channels / num_groups), 2)
assert cov.shape == (int(channels / num_groups), 2, 2)
x_norm_valid = complex_group_norm(
torch.view_as_complex(x),
num_groups=num_groups,
running_mean=mean.permute(1, 0),
running_cov=cov.permute(1, 2, 0),
training=False,
)
x_norm = complex_group_norm(
torch.view_as_complex(x),
num_groups=num_groups,
)
torch.testing.assert_close(x_norm, x_norm_valid)
def get_mean_cov(x: torch.Tensor, num_groups: int) -> Tuple[torch.Tensor, torch.Tensor]:
"""Getting mean and covariance tensor for arbitrary Clifford algebras.
Args:
x (torch.Tensor): Input tensor of shape `(B, C, *D, I)` where I is the blade of the algebra.
num_groups (int): Number of groups.
Returns:
Tuple[torch.Tensor, torch.Tensor]: Mean and covariance tensors of shapes `(I, C/num_groups)` and `(I, I, C/num_groups)`.
"""
B, C, *D, I = x.shape
# Now reshape x as done in the group norm routine and calculate mean and covariance accordingly.
x = x.view(1, int(B * C / num_groups), num_groups, *D, I)
B, C, *D, I = x.shape
B_dim, C_dim, *D_dims, I_dim = range(len(x.shape))
shape = 1, C, *([1] * (x.dim() - 3))
mean = x.mean(dim=(B_dim, *D_dims))
x_mean = x - mean.reshape(*shape, I)
X = x_mean.permute(C_dim, I_dim, B_dim, *D_dims).flatten(2, -1)
cov = torch.matmul(X, X.transpose(-1, -2)) / X.shape[-1]
return mean, cov
def test_clifford_groupnorm1d_valid():
"""Test Clifford1d group norm implementation for num_groups=8 and g=[1]:
validation setting where running_mean and running_cov are provided
is tested against training setting where exactly this running_mean and running_cov should be calculated.
"""
channels = 16
num_groups = 8
x = torch.randn(1, channels, 64, 2)
mean, cov = get_mean_cov(x, num_groups)
assert mean.shape == (int(channels / num_groups), 2)
assert cov.shape == (int(channels / num_groups), 2, 2)
x_norm_valid = clifford_group_norm(
x,
CliffordSignature([1]).n_blades,
num_groups=num_groups,
running_mean=mean.permute(1, 0),
running_cov=cov.permute(1, 2, 0),
training=False,
)
x_norm = clifford_group_norm(
x,
CliffordSignature([1]).n_blades,
num_groups=num_groups,
)
torch.testing.assert_close(x_norm, x_norm_valid)
def test_clifford_groupnorm2d_valid():
"""Test Clifford2d group norm implementation for num_groups=4 and g=[1, 1]:
validation setting where running_mean and running_cov are provided
is tested against training setting where exactly this running_mean and running_cov should be calculated.
"""
channels = 32
num_groups = 4
x = torch.randn(1, channels, 64, 64, 4)
mean, cov = get_mean_cov(x, num_groups)
assert mean.shape == (int(channels / num_groups), 4)
assert cov.shape == (int(channels / num_groups), 4, 4)
x_norm_valid = clifford_group_norm(
x,
CliffordSignature([1, 1]).n_blades,
num_groups=num_groups,
running_mean=mean.permute(1, 0),
running_cov=cov.permute(1, 2, 0),
training=False,
)
x_norm = clifford_group_norm(
x,
CliffordSignature([1, 1]).n_blades,
num_groups=num_groups,
)
torch.testing.assert_close(x_norm, x_norm_valid)
def test_clifford_groupnorm3d_valid():
"""Test Clifford3d group norm implementation for num_groups=4 and g=[1, 1, 1]:
validation setting where running_mean and running_cov are provided
is tested against training setting where exactly this running_mean and running_cov should be calculated.
"""
channels = 32
num_groups = 4
x = torch.randn(1, channels, 32, 32, 32, 8)
mean, cov = get_mean_cov(x, num_groups)
assert mean.shape == (int(channels / num_groups), 8)
assert cov.shape == (int(channels / num_groups), 8, 8)
x_norm_valid = clifford_group_norm(
x,
CliffordSignature([1, 1, 1]).n_blades,
num_groups=num_groups,
running_mean=mean.permute(1, 0),
running_cov=cov.permute(1, 2, 0),
training=False,
)
x_norm = clifford_group_norm(
x,
CliffordSignature([1, 1, 1]).n_blades,
num_groups=num_groups,
)
torch.testing.assert_close(x_norm, x_norm_valid)
def test_modules_clifford_groupnorm1d_vs_complex_instancenorm1d():
"""Test Clifford1d groupnorm module against complex groupnorm module using num_groups=1 and g = [-1]."""
x = torch.randn(4, 16, 8, 2)
complex_norm = ComplexGroupNorm1d(
channels=16,
num_groups=1,
)
x_norm_complex = complex_norm(torch.view_as_complex(x))
clifford_norm = CliffordGroupNorm1d(
[-1],
channels=16,
num_groups=1,
)
x_norm_clifford = clifford_norm(x)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_modules_clifford_layernorm1d_vs_complex_layernorm1d():
"""Test Clifford1d groupnorm module against complex groupnorm module using num_groups=num_channels and g = [-1]."""
x = torch.randn(4, 16, 8, 2)
complex_norm = ComplexGroupNorm1d(
channels=16,
num_groups=16,
)
x_norm_complex = complex_norm(torch.view_as_complex(x))
clifford_norm = CliffordGroupNorm1d(
[-1],
channels=16,
num_groups=16,
)
x_norm_clifford = clifford_norm(x)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_modules_clifford_groupnorm1d_vs_complex_groupnorm1d():
"""Test Clifford1d groupnorm module against complex groupnorm module using num_groups=2 and g = [-1]."""
x = torch.randn(4, 16, 8, 2)
complex_norm = ComplexGroupNorm1d(
channels=16,
num_groups=2,
)
x_norm_complex = complex_norm(torch.view_as_complex(x))
clifford_norm = CliffordGroupNorm1d(
[-1],
channels=16,
num_groups=2,
)
x_norm_clifford = clifford_norm(x)
torch.testing.assert_close(x_norm_clifford, torch.view_as_real(x_norm_complex))
def test_module_clifford_groupnorm2d():
"""Test Clifford2d groupnorm module for correct shapes using num_groups=2 and g = [-1, -1]."""
x = torch.randn(4, 16, 64, 64, 4)
clifford_norm = CliffordGroupNorm2d(
[-1, -1],
num_groups=2,
channels=16,
)
x_norm_clifford = clifford_norm(x)
assert x.shape == x_norm_clifford.shape
def test_module_clifford_groupnorm3d():
"""Test Clifford3d groupnorm module for correct shapes using num_groups=2 and g = [-1, -1, -1]."""
x = torch.randn(4, 16, 64, 64, 64, 8)
clifford_norm = CliffordGroupNorm3d(
[-1, -1, -1],
num_groups=8,
channels=16,
)
x_norm_clifford = clifford_norm(x)
assert x.shape == x_norm_clifford.shape
|
cliffordlayers/tests/test_clifford_groupnorm.py/0
|
{
"file_path": "cliffordlayers/tests/test_clifford_groupnorm.py",
"repo_id": "cliffordlayers",
"token_count": 4931
}
| 610 |
#!/usr/bin/env/python
"""
Usage:
CGVAE.py [options]
Options:
-h --help Show this screen
--dataset NAME Dataset name: zinc, qm9, cep
--config-file FILE Hyperparameter configuration file path (in JSON format)
--config CONFIG Hyperparameter configuration dictionary (in JSON format)
--log_dir NAME log dir name
--data_dir NAME data dir name
--restore FILE File to restore weights from.
--freeze-graph-model Freeze weights of graph model components
"""
from typing import Sequence, Any
from docopt import docopt
from collections import defaultdict, deque
import numpy as np
import tensorflow as tf
import sys, traceback
import pdb
import json
import os
from GGNN_core import ChemModel
import utils
from utils import *
import pickle
import random
from numpy import linalg as LA
from rdkit import Chem
from copy import deepcopy
from rdkit.Chem import QED
import os
import time
from data_augmentation import *
'''
Comments provide the expected tensor shapes where helpful.
Key to symbols in comments:
---------------------------
[...]: a tensor
; ; : a list
b: batch size
e: number of edege types (3)
es: maximum number of BFS transitions in this batch
v: number of vertices per graph in this batch
h: GNN hidden size
'''
class DenseGGNNChemModel(ChemModel):
def __init__(self, args):
super().__init__(args)
@classmethod
def default_params(cls):
params = dict(super().default_params())
params.update({
'task_sample_ratios': {},
'use_edge_bias': True, # whether use edge bias in gnn
'clamp_gradient_norm': 1.0,
'out_layer_dropout_keep_prob': 1.0,
'tie_fwd_bkwd': True,
'task_ids': [0], # id of property prediction
'random_seed': 0, # fixed for reproducibility
'batch_size': 8 if dataset=='zinc' or dataset=='cep' else 64,
"qed_trade_off_lambda": 10,
'prior_learning_rate': 0.05,
'stop_criterion': 0.01,
'num_epochs': 3 if dataset=='zinc' or dataset=='cep' else 10,
'epoch_to_generate': 3 if dataset=='zinc' or dataset=='cep' else 10,
'number_of_generation': 30000,
'optimization_step': 0,
'maximum_distance': 50,
"use_argmax_generation": False, # use random sampling or argmax during generation
'residual_connection_on': True, # whether residual connection is on
'residual_connections': { # For iteration i, specify list of layers whose output is added as an input
2: [0],
4: [0, 2],
6: [0, 2, 4],
8: [0, 2, 4, 6],
10: [0, 2, 4, 6, 8],
12: [0, 2, 4, 6, 8, 10],
14: [0, 2, 4, 6, 8, 10, 12],
},
'num_timesteps': 12, # gnn propagation step
'hidden_size': 100,
"kl_trade_off_lambda": 0.3, # kl tradeoff
'learning_rate': 0.001,
'graph_state_dropout_keep_prob': 1,
"compensate_num": 1, # how many atoms to be added during generation
'train_file': 'data/molecules_train_%s.json' % dataset,
'valid_file': 'data/molecules_valid_%s.json' % dataset,
'try_different_starting': True,
"num_different_starting": 6,
'generation': False, # only for generation
'use_graph': True, # use gnn
"label_one_hot": False, # one hot label or not
"multi_bfs_path": False, # whether sample several BFS paths for each molecule
"bfs_path_count": 30,
"path_random_order": False, # False: canonical order, True: random order
"sample_transition": False, # whether use transition sampling
'edge_weight_dropout_keep_prob': 1,
'check_overlap_edge': False,
"truncate_distance": 10,
})
return params
def prepare_specific_graph_model(self) -> None:
h_dim = self.params['hidden_size']
expanded_h_dim=self.params['hidden_size']+self.params['hidden_size'] + 1 # 1 for focus bit
self.placeholders['graph_state_keep_prob'] = tf.placeholder(tf.float32, None, name='graph_state_keep_prob')
self.placeholders['edge_weight_dropout_keep_prob'] = tf.placeholder(tf.float32, None, name='edge_weight_dropout_keep_prob')
self.placeholders['initial_node_representation'] = tf.placeholder(tf.float32,
[None, None, self.params['hidden_size']],
name='node_features') # padded node symbols
# mask out invalid node
self.placeholders['node_mask'] = tf.placeholder(tf.float32, [None, None], name='node_mask') # [b x v]
self.placeholders['num_vertices'] = tf.placeholder(tf.int32, ())
# adj for encoder
self.placeholders['adjacency_matrix'] = tf.placeholder(tf.float32,
[None, self.num_edge_types, None, None], name="adjacency_matrix") # [b, e, v, v]
# labels for node symbol prediction
self.placeholders['node_symbols'] = tf.placeholder(tf.float32, [None, None, self.params['num_symbols']]) # [b, v, edge_type]
# node symbols used to enhance latent representations
self.placeholders['latent_node_symbols'] = tf.placeholder(tf.float32,
[None, None, self.params['hidden_size']], name='latent_node_symbol') # [b, v, h]
# mask out cross entropies in decoder
self.placeholders['iteration_mask']=tf.placeholder(tf.float32, [None, None]) # [b, es]
# adj matrices used in decoder
self.placeholders['incre_adj_mat']=tf.placeholder(tf.float32, [None, None, self.num_edge_types, None, None], name='incre_adj_mat') # [b, es, e, v, v]
# distance
self.placeholders['distance_to_others']=tf.placeholder(tf.int32, [None, None, None], name='distance_to_others') # [b, es,v]
# maximum iteration number of this batch
self.placeholders['max_iteration_num']=tf.placeholder(tf.int32, [], name='max_iteration_num') # number
# node number in focus at each iteration step
self.placeholders['node_sequence']=tf.placeholder(tf.float32, [None, None, None], name='node_sequence') # [b, es, v]
# mask out invalid edge types at each iteration step
self.placeholders['edge_type_masks']=tf.placeholder(tf.float32, [None, None, self.num_edge_types, None], name='edge_type_masks') # [b, es, e, v]
# ground truth edge type labels at each iteration step
self.placeholders['edge_type_labels']=tf.placeholder(tf.float32, [None, None, self.num_edge_types, None], name='edge_type_labels') # [b, es, e, v]
# mask out invalid edge at each iteration step
self.placeholders['edge_masks']=tf.placeholder(tf.float32, [None, None, None], name='edge_masks') # [b, es, v]
# ground truth edge labels at each iteration step
self.placeholders['edge_labels']=tf.placeholder(tf.float32, [None, None, None], name='edge_labels') # [b, es, v]
# ground truth labels for whether it stops at each iteration step
self.placeholders['local_stop']=tf.placeholder(tf.float32, [None, None], name='local_stop') # [b, es]
# z_prior sampled from standard normal distribution
self.placeholders['z_prior']=tf.placeholder(tf.float32, [None, None, self.params['hidden_size']], name='z_prior') # the prior of z sampled from normal distribution
# put in front of kl latent loss
self.placeholders['kl_trade_off_lambda']=tf.placeholder(tf.float32, [], name='kl_trade_off_lambda') # number
# overlapped edge features
self.placeholders['overlapped_edge_features']=tf.placeholder(tf.int32, [None, None, None], name='overlapped_edge_features') # [b, es, v]
# weights for encoder and decoder GNN.
if self.params["residual_connection_on"]:
# weights for encoder and decoder GNN. Different weights for each iteration
for scope in ['_encoder', '_decoder']:
if scope == '_encoder':
new_h_dim=h_dim
else:
new_h_dim=expanded_h_dim
for iter_idx in range(self.params['num_timesteps']):
with tf.variable_scope("gru_scope"+scope+str(iter_idx), reuse=False):
self.weights['edge_weights'+scope+str(iter_idx)] = tf.Variable(glorot_init([self.num_edge_types, new_h_dim, new_h_dim]))
if self.params['use_edge_bias']:
self.weights['edge_biases'+scope+str(iter_idx)] = tf.Variable(np.zeros([self.num_edge_types, 1, new_h_dim]).astype(np.float32))
cell = tf.contrib.rnn.GRUCell(new_h_dim)
cell = tf.nn.rnn_cell.DropoutWrapper(cell,
state_keep_prob=self.placeholders['graph_state_keep_prob'])
self.weights['node_gru'+scope+str(iter_idx)] = cell
else:
for scope in ['_encoder', '_decoder']:
if scope == '_encoder':
new_h_dim=h_dim
else:
new_h_dim=expanded_h_dim
self.weights['edge_weights'+scope] = tf.Variable(glorot_init([self.num_edge_types, new_h_dim, new_h_dim]))
if self.params['use_edge_bias']:
self.weights['edge_biases'+scope] = tf.Variable(np.zeros([self.num_edge_types, 1, new_h_dim]).astype(np.float32))
with tf.variable_scope("gru_scope"+scope):
cell = tf.contrib.rnn.GRUCell(new_h_dim)
cell = tf.nn.rnn_cell.DropoutWrapper(cell,
state_keep_prob=self.placeholders['graph_state_keep_prob'])
self.weights['node_gru'+scope] = cell
# weights for calculating mean and variance
self.weights['mean_weights'] = tf.Variable(glorot_init([h_dim, h_dim]))
self.weights['mean_biases'] = tf.Variable(np.zeros([1, h_dim]).astype(np.float32))
self.weights['variance_weights'] = tf.Variable(glorot_init([h_dim, h_dim]))
self.weights['variance_biases'] = tf.Variable(np.zeros([1, h_dim]).astype(np.float32))
# The weights for generating nodel symbol logits
self.weights['node_symbol_weights'] = tf.Variable(glorot_init([h_dim, self.params['num_symbols']]))
self.weights['node_symbol_biases'] = tf.Variable(np.zeros([1, self.params['num_symbols']]).astype(np.float32))
feature_dimension=6*expanded_h_dim
# record the total number of features
self.params["feature_dimension"] = 6
# weights for generating edge type logits
for i in range(self.num_edge_types):
self.weights['edge_type_%d' % i] = tf.Variable(glorot_init([feature_dimension, feature_dimension]))
self.weights['edge_type_biases_%d' % i] = tf.Variable(np.zeros([1, feature_dimension]).astype(np.float32))
self.weights['edge_type_output_%d' % i] = tf.Variable(glorot_init([feature_dimension, 1]))
# weights for generating edge logits
self.weights['edge_iteration'] = tf.Variable(glorot_init([feature_dimension, feature_dimension]))
self.weights['edge_iteration_biases'] = tf.Variable(np.zeros([1, feature_dimension]).astype(np.float32))
self.weights['edge_iteration_output'] = tf.Variable(glorot_init([feature_dimension, 1]))
# Weights for the stop node
self.weights["stop_node"] = tf.Variable(glorot_init([1, expanded_h_dim]))
# Weight for distance embedding
self.weights['distance_embedding'] = tf.Variable(glorot_init([self.params['maximum_distance'], expanded_h_dim]))
# Weight for overlapped edge feature
self.weights["overlapped_edge_weight"] = tf.Variable(glorot_init([2, expanded_h_dim]))
# weights for linear projection on qed prediction input
self.weights['qed_weights'] = tf.Variable(glorot_init([h_dim, h_dim]))
self.weights['qed_biases'] = tf.Variable(np.zeros([1, h_dim]).astype(np.float32))
# use node embeddings
self.weights["node_embedding"]= tf.Variable(glorot_init([self.params["num_symbols"], h_dim]))
# graph state mask
self.ops['graph_state_mask']= tf.expand_dims(self.placeholders['node_mask'], 2)
# transform one hot vector to dense embedding vectors
def get_node_embedding_state(self, one_hot_state):
node_nums=tf.argmax(one_hot_state, axis=2)
return tf.nn.embedding_lookup(self.weights["node_embedding"], node_nums) * self.ops['graph_state_mask']
def compute_final_node_representations_with_residual(self, h, adj, scope_name): # scope_name: _encoder or _decoder
# h: initial representation, adj: adjacency matrix, different GNN parameters for encoder and decoder
v = self.placeholders['num_vertices']
# _decoder uses a larger latent space because concat of symbol and latent representation
if scope_name=="_decoder":
h_dim = self.params['hidden_size'] + self.params['hidden_size'] + 1
else:
h_dim = self.params['hidden_size']
h = tf.reshape(h, [-1, h_dim]) # [b*v, h]
# record all hidden states at each iteration
all_hidden_states=[h]
for iter_idx in range(self.params['num_timesteps']):
with tf.variable_scope("gru_scope"+scope_name+str(iter_idx), reuse=None) as g_scope:
for edge_type in range(self.num_edge_types):
# the message passed from this vertice to other vertices
m = tf.matmul(h, self.weights['edge_weights'+scope_name+str(iter_idx)][edge_type]) # [b*v, h]
if self.params['use_edge_bias']:
m += self.weights['edge_biases'+scope_name+str(iter_idx)][edge_type] # [b, v, h]
m = tf.reshape(m, [-1, v, h_dim]) # [b, v, h]
# collect the messages from other vertices to each vertice
if edge_type == 0:
acts = tf.matmul(adj[edge_type], m)
else:
acts += tf.matmul(adj[edge_type], m)
# all messages collected for each node
acts = tf.reshape(acts, [-1, h_dim]) # [b*v, h]
# add residual connection here
layer_residual_connections = self.params['residual_connections'].get(iter_idx)
if layer_residual_connections is None:
layer_residual_states = []
else:
layer_residual_states = [all_hidden_states[residual_layer_idx]
for residual_layer_idx in layer_residual_connections]
# concat current hidden states with residual states
acts= tf.concat([acts] + layer_residual_states, axis=1) # [b, (1+num residual connection)* h]
# feed msg inputs and hidden states to GRU
h = self.weights['node_gru'+scope_name+str(iter_idx)](acts, h)[1] # [b*v, h]
# record the new hidden states
all_hidden_states.append(h)
last_h = tf.reshape(all_hidden_states[-1], [-1, v, h_dim])
return last_h
def compute_final_node_representations_without_residual(self, h, adj, edge_weights, edge_biases, node_gru, gru_scope_name):
# h: initial representation, adj: adjacency matrix, different GNN parameters for encoder and decoder
v = self.placeholders['num_vertices']
if gru_scope_name=="gru_scope_decoder":
h_dim = self.params['hidden_size'] + self.params['hidden_size']
else:
h_dim = self.params['hidden_size']
h = tf.reshape(h, [-1, h_dim])
with tf.variable_scope(gru_scope_name) as scope:
for i in range(self.params['num_timesteps']):
if i > 0:
tf.get_variable_scope().reuse_variables()
for edge_type in range(self.num_edge_types):
m = tf.matmul(h, tf.nn.dropout(edge_weights[edge_type],
keep_prob=self.placeholders['edge_weight_dropout_keep_prob'])) # [b*v, h]
if self.params['use_edge_bias']:
m += edge_biases[edge_type] # [b, v, h]
m = tf.reshape(m, [-1, v, h_dim]) # [b, v, h]
if edge_type == 0:
acts = tf.matmul(adj[edge_type], m)
else:
acts += tf.matmul(adj[edge_type], m)
acts = tf.reshape(acts, [-1, h_dim]) # [b*v, h]
h = node_gru(acts, h)[1] # [b*v, h]
last_h = tf.reshape(h, [-1, v, h_dim])
return last_h
def compute_mean_and_logvariance(self):
h_dim = self.params['hidden_size']
reshped_last_h=tf.reshape(self.ops['final_node_representations'], [-1, h_dim])
mean=tf.matmul(reshped_last_h, self.weights['mean_weights']) + self.weights['mean_biases']
logvariance=tf.matmul(reshped_last_h, self.weights['variance_weights']) + self.weights['variance_biases']
return mean, logvariance
def sample_with_mean_and_logvariance(self):
v = self.placeholders['num_vertices']
h_dim = self.params['hidden_size']
# Sample from normal distribution
z_prior = tf.reshape(self.placeholders['z_prior'], [-1, h_dim])
# Train: sample from u, Sigma. Generation: sample from 0,1
z_sampled = tf.cond(self.placeholders['is_generative'], lambda: z_prior, # standard normal
lambda: tf.add(self.ops['mean'], tf.multiply(tf.sqrt(tf.exp(self.ops['logvariance'])), z_prior))) # non-standard normal
# filter
z_sampled = tf.reshape(z_sampled, [-1, v, h_dim]) * self.ops['graph_state_mask']
return z_sampled
def fully_connected(self, input, hidden_weight, hidden_bias, output_weight):
output=tf.nn.relu(tf.matmul(input, hidden_weight) + hidden_bias)
output=tf.matmul(output, output_weight)
return output
def generate_cross_entropy(self, idx, cross_entropy_losses, edge_predictions, edge_type_predictions):
v = self.placeholders['num_vertices']
h_dim = self.params['hidden_size']
num_symbols = self.params['num_symbols']
batch_size = tf.shape(self.placeholders['initial_node_representation'])[0]
# Use latent representation as decoder GNN'input
filtered_z_sampled = self.ops["initial_repre_for_decoder"] # [b, v, h+h]
# data needed in this iteration
incre_adj_mat = self.placeholders['incre_adj_mat'][:,idx,:,:, :] # [b, e, v, v]
distance_to_others = self.placeholders['distance_to_others'][:, idx, :] # [b,v]
overlapped_edge_features = self.placeholders['overlapped_edge_features'][:, idx, :] # [b,v]
node_sequence = self.placeholders['node_sequence'][:, idx, :] # [b, v]
node_sequence = tf.expand_dims(node_sequence, axis=2) # [b,v,1]
edge_type_masks = self.placeholders['edge_type_masks'][:, idx, :, :] # [b, e, v]
# make invalid locations to be very small before using softmax function
edge_type_masks = edge_type_masks * LARGE_NUMBER - LARGE_NUMBER
edge_type_labels = self.placeholders['edge_type_labels'][:, idx, :, :] # [b, e, v]
edge_masks=self.placeholders['edge_masks'][:, idx, :] # [b, v]
# make invalid locations to be very small before using softmax function
edge_masks = edge_masks * LARGE_NUMBER - LARGE_NUMBER
edge_labels = self.placeholders['edge_labels'][:, idx, :] # [b, v]
local_stop = self.placeholders['local_stop'][:, idx] # [b]
# concat the hidden states with the node in focus
filtered_z_sampled = tf.concat([filtered_z_sampled, node_sequence], axis=2) # [b, v, h + h + 1]
# Decoder GNN
if self.params["use_graph"]:
if self.params["residual_connection_on"]:
new_filtered_z_sampled = self.compute_final_node_representations_with_residual(filtered_z_sampled,
tf.transpose(incre_adj_mat, [1, 0, 2, 3]),
"_decoder") # [b, v, h + h]
else:
new_filtered_z_sampled = self.compute_final_node_representations_without_residual(filtered_z_sampled,
tf.transpose(incre_adj_mat, [1, 0, 2, 3]),
self.weights['edge_weights_decoder'],
self.weights['edge_biases_decoder'],
self.weights['node_gru_decoder'], "gru_scope_decoder") # [b, v, h + h]
else:
new_filtered_z_sampled = filtered_z_sampled
# Filter nonexist nodes
new_filtered_z_sampled=new_filtered_z_sampled * self.ops['graph_state_mask']
# Take out the node in focus
node_in_focus = tf.reduce_sum(node_sequence * new_filtered_z_sampled, axis=1)# [b, h + h]
# edge pair representation
edge_repr=tf.concat(\
[tf.tile(tf.expand_dims(node_in_focus, 1), [1,v,1]), new_filtered_z_sampled], axis=2) # [b, v, 2*(h+h)]
#combine edge repre with local and global repr
local_graph_repr_before_expansion = tf.reduce_sum(new_filtered_z_sampled, axis=1) / \
tf.reduce_sum(self.placeholders['node_mask'], axis=1, keep_dims=True) # [b, h + h]
local_graph_repr = tf.expand_dims(local_graph_repr_before_expansion, 1)
local_graph_repr = tf.tile(local_graph_repr, [1,v,1]) # [b, v, h+h]
global_graph_repr_before_expansion = tf.reduce_sum(filtered_z_sampled, axis=1) / \
tf.reduce_sum(self.placeholders['node_mask'], axis=1, keep_dims=True)
global_graph_repr = tf.expand_dims(global_graph_repr_before_expansion, 1)
global_graph_repr = tf.tile(global_graph_repr, [1,v,1]) # [b, v, h+h]
# distance representation
distance_repr = tf.nn.embedding_lookup(self.weights['distance_embedding'], distance_to_others) # [b, v, h+h]
# overlapped edge feature representation
overlapped_edge_repr = tf.nn.embedding_lookup(self.weights['overlapped_edge_weight'], overlapped_edge_features) # [b, v, h+h]
# concat and reshape.
combined_edge_repr = tf.concat([edge_repr, local_graph_repr,
global_graph_repr, distance_repr, overlapped_edge_repr], axis=2)
combined_edge_repr = tf.reshape(combined_edge_repr, [-1, self.params["feature_dimension"]*(h_dim + h_dim + 1)])
# Calculate edge logits
edge_logits=self.fully_connected(combined_edge_repr, self.weights['edge_iteration'],
self.weights['edge_iteration_biases'], self.weights['edge_iteration_output'])
edge_logits=tf.reshape(edge_logits, [-1, v]) # [b, v]
# filter invalid terms
edge_logits=edge_logits + edge_masks
# Calculate whether it will stop at this step
# prepare the data
expanded_stop_node = tf.tile(self.weights['stop_node'], [batch_size, 1]) # [b, h + h]
distance_to_stop_node = tf.nn.embedding_lookup(self.weights['distance_embedding'], tf.tile([0], [batch_size])) # [b, h + h]
overlap_edge_stop_node = tf.nn.embedding_lookup(self.weights['overlapped_edge_weight'], tf.tile([0], [batch_size])) # [b, h + h]
combined_stop_node_repr = tf.concat([node_in_focus, expanded_stop_node, local_graph_repr_before_expansion,
global_graph_repr_before_expansion, distance_to_stop_node, overlap_edge_stop_node], axis=1) # [b, 6 * (h + h)]
# logits for stop node
stop_logits = self.fully_connected(combined_stop_node_repr,
self.weights['edge_iteration'], self.weights['edge_iteration_biases'],
self.weights['edge_iteration_output']) #[b, 1]
edge_logits = tf.concat([edge_logits, stop_logits], axis=1) # [b, v + 1]
# Calculate edge type logits
edge_type_logits = []
for i in range(self.num_edge_types):
edge_type_logit = self.fully_connected(combined_edge_repr,
self.weights['edge_type_%d' % i], self.weights['edge_type_biases_%d' % i],
self.weights['edge_type_output_%d' % i]) #[b * v, 1]
edge_type_logits.append(tf.reshape(edge_type_logit, [-1, 1, v])) # [b, 1, v]
edge_type_logits = tf.concat(edge_type_logits, axis=1) # [b, e, v]
# filter invalid items
edge_type_logits = edge_type_logits + edge_type_masks # [b, e, v]
# softmax over edge type axis
edge_type_probs = tf.nn.softmax(edge_type_logits, 1) # [b, e, v]
# edge labels
edge_labels = tf.concat([edge_labels,tf.expand_dims(local_stop, 1)], axis=1) # [b, v + 1]
# softmax for edge
edge_loss =- tf.reduce_sum(tf.log(tf.nn.softmax(edge_logits) + SMALL_NUMBER) * edge_labels, axis=1)
# softmax for edge type
edge_type_loss =- edge_type_labels * tf.log(edge_type_probs + SMALL_NUMBER) # [b, e, v]
edge_type_loss = tf.reduce_sum(edge_type_loss, axis=[1, 2]) # [b]
# total loss
iteration_loss = edge_loss + edge_type_loss
cross_entropy_losses = cross_entropy_losses.write(idx, iteration_loss)
edge_predictions = edge_predictions.write(idx, tf.nn.softmax(edge_logits))
edge_type_predictions = edge_type_predictions.write(idx, edge_type_probs)
return (idx+1, cross_entropy_losses, edge_predictions, edge_type_predictions)
def construct_logit_matrices(self):
v = self.placeholders['num_vertices']
batch_size=tf.shape(self.placeholders['initial_node_representation'])[0]
h_dim = self.params['hidden_size']
# Initial state: embedding
latent_node_state= self.get_node_embedding_state(self.placeholders["latent_node_symbols"])
# concat z_sampled with node symbols
filtered_z_sampled = tf.concat([self.ops['z_sampled'],
latent_node_state], axis=2) # [b, v, h + h]
self.ops["initial_repre_for_decoder"] = filtered_z_sampled
# The tensor array used to collect the cross entropy losses at each step
cross_entropy_losses = tf.TensorArray(dtype=tf.float32, size=self.placeholders['max_iteration_num'])
edge_predictions= tf.TensorArray(dtype=tf.float32, size=self.placeholders['max_iteration_num'])
edge_type_predictions = tf.TensorArray(dtype=tf.float32, size=self.placeholders['max_iteration_num'])
idx_final, cross_entropy_losses_final, edge_predictions_final,edge_type_predictions_final=\
tf.while_loop(lambda idx, cross_entropy_losses,edge_predictions,edge_type_predictions: idx < self.placeholders['max_iteration_num'],
self.generate_cross_entropy,
(tf.constant(0), cross_entropy_losses,edge_predictions,edge_type_predictions,))
# record the predictions for generation
self.ops['edge_predictions'] = edge_predictions_final.read(0)
self.ops['edge_type_predictions'] = edge_type_predictions_final.read(0)
# final cross entropy losses
cross_entropy_losses_final = cross_entropy_losses_final.stack()
self.ops['cross_entropy_losses'] = tf.transpose(cross_entropy_losses_final, [1,0]) # [b, es]
# Logits for node symbols
self.ops['node_symbol_logits']=tf.reshape(tf.matmul(tf.reshape(self.ops['z_sampled'],[-1, h_dim]), self.weights['node_symbol_weights']) +
self.weights['node_symbol_biases'], [-1, v, self.params['num_symbols']])
def construct_loss(self):
v = self.placeholders['num_vertices']
h_dim = self.params['hidden_size']
kl_trade_off_lambda =self.placeholders['kl_trade_off_lambda']
# Edge loss
self.ops["edge_loss"] = tf.reduce_sum(self.ops['cross_entropy_losses'] * self.placeholders['iteration_mask'], axis=1)
# KL loss
kl_loss = 1 + self.ops['logvariance'] - tf.square(self.ops['mean']) - tf.exp(self.ops['logvariance'])
kl_loss = tf.reshape(kl_loss, [-1, v, h_dim]) * self.ops['graph_state_mask']
self.ops['kl_loss'] = -0.5 * tf.reduce_sum(kl_loss, [1,2])
# Node symbol loss
self.ops['node_symbol_prob'] = tf.nn.softmax(self.ops['node_symbol_logits'])
self.ops['node_symbol_loss'] = -tf.reduce_sum(tf.log(self.ops['node_symbol_prob'] + SMALL_NUMBER) *
self.placeholders['node_symbols'], axis=[1,2])
# Add in the loss for calculating QED
for (internal_id, task_id) in enumerate(self.params['task_ids']):
with tf.variable_scope("out_layer_task%i" % task_id):
with tf.variable_scope("regression_gate"):
self.weights['regression_gate_task%i' % task_id] = MLP(self.params['hidden_size'], 1, [],
self.placeholders['out_layer_dropout_keep_prob'])
with tf.variable_scope("regression"):
self.weights['regression_transform_task%i' % task_id] = MLP(self.params['hidden_size'], 1, [],
self.placeholders['out_layer_dropout_keep_prob'])
normalized_z_sampled=tf.nn.l2_normalize(self.ops['z_sampled'], 2)
self.ops['qed_computed_values']=computed_values = self.gated_regression(normalized_z_sampled,
self.weights['regression_gate_task%i' % task_id],
self.weights['regression_transform_task%i' % task_id], self.params["hidden_size"],
self.weights['qed_weights'], self.weights['qed_biases'],
self.placeholders['num_vertices'], self.placeholders['node_mask'])
diff = computed_values - self.placeholders['target_values'][internal_id,:] # [b]
task_target_mask = self.placeholders['target_mask'][internal_id,:]
task_target_num = tf.reduce_sum(task_target_mask) + SMALL_NUMBER
diff = diff * task_target_mask # Mask out unused values [b]
self.ops['accuracy_task%i' % task_id] = tf.reduce_sum(tf.abs(diff)) / task_target_num
task_loss = tf.reduce_sum(0.5 * tf.square(diff)) / task_target_num # number
# Normalise loss to account for fewer task-specific examples in batch:
task_loss = task_loss * (1.0 / (self.params['task_sample_ratios'].get(task_id) or 1.0))
self.ops['qed_loss'].append(task_loss)
if task_id ==0: # Assume it is the QED score
z_sampled_shape=tf.shape(self.ops['z_sampled'])
flattened_z_sampled=tf.reshape(self.ops['z_sampled'], [z_sampled_shape[0], -1])
self.ops['l2_loss'] = 0.01* tf.reduce_sum(flattened_z_sampled * flattened_z_sampled, axis=1) /2
# Calculate the derivative with respect to QED + l2 loss
self.ops['derivative_z_sampled'] = tf.gradients(self.ops['qed_computed_values'] -
self.ops['l2_loss'],self.ops['z_sampled'])
self.ops['total_qed_loss'] = tf.reduce_sum(self.ops['qed_loss']) # number
self.ops['mean_edge_loss'] = tf.reduce_mean(self.ops["edge_loss"]) # record the mean edge loss
self.ops['mean_node_symbol_loss'] = tf.reduce_mean(self.ops["node_symbol_loss"])
self.ops['mean_kl_loss'] = tf.reduce_mean(kl_trade_off_lambda *self.ops['kl_loss'])
self.ops['mean_total_qed_loss'] = self.params["qed_trade_off_lambda"]*self.ops['total_qed_loss']
return tf.reduce_mean(self.ops["edge_loss"] + self.ops['node_symbol_loss'] + \
kl_trade_off_lambda *self.ops['kl_loss'])\
+ self.params["qed_trade_off_lambda"]*self.ops['total_qed_loss']
def gated_regression(self, last_h, regression_gate, regression_transform, hidden_size, projection_weight, projection_bias, v, mask):
# last_h: [b x v x h]
last_h = tf.reshape(last_h, [-1, hidden_size]) # [b*v, h]
# linear projection on last_h
last_h = tf.nn.relu(tf.matmul(last_h, projection_weight)+projection_bias) # [b*v, h]
# same as last_h
gate_input = last_h
# linear projection and combine
gated_outputs = tf.nn.sigmoid(regression_gate(gate_input)) * tf.nn.tanh(regression_transform(last_h)) # [b*v, 1]
gated_outputs = tf.reshape(gated_outputs, [-1, v]) # [b, v]
masked_gated_outputs = gated_outputs * mask # [b x v]
output = tf.reduce_sum(masked_gated_outputs, axis = 1) # [b]
output=tf.sigmoid(output)
return output
def calculate_incremental_results(self, raw_data, bucket_sizes, file_name):
incremental_results=[]
# copy the raw_data if more than 1 BFS path is added
new_raw_data=[]
for idx, d in enumerate(raw_data):
# Use canonical order or random order here. canonical order starts from index 0. random order starts from random nodes
if not self.params["path_random_order"]:
# Use several different starting index if using multi BFS path
if self.params["multi_bfs_path"]:
list_of_starting_idx= list(range(self.params["bfs_path_count"]))
else:
list_of_starting_idx=[0] # the index 0
else:
# get the node length for this molecule
node_length=len(d["node_features"])
if self.params["multi_bfs_path"]:
list_of_starting_idx= np.random.choice(node_length, self.params["bfs_path_count"], replace=True) #randomly choose several
else:
list_of_starting_idx= [random.choice(list(range(node_length)))] # randomly choose one
for list_idx, starting_idx in enumerate(list_of_starting_idx):
# choose a bucket
chosen_bucket_idx = np.argmax(bucket_sizes > max([v for e in d['graph']
for v in [e[0], e[2]]]))
chosen_bucket_size = bucket_sizes[chosen_bucket_idx]
# Calculate incremental results without master node
nodes_no_master, edges_no_master = to_graph(d['smiles'], self.params["dataset"])
incremental_adj_mat,distance_to_others,node_sequence,edge_type_masks,edge_type_labels,local_stop, edge_masks, edge_labels, overlapped_edge_features=\
construct_incremental_graph(dataset, edges_no_master, chosen_bucket_size,
len(nodes_no_master), nodes_no_master, self.params, initial_idx=starting_idx)
if self.params["sample_transition"] and list_idx > 0:
incremental_results[-1]=[x+y for x, y in zip(incremental_results[-1], [incremental_adj_mat,distance_to_others,
node_sequence,edge_type_masks,edge_type_labels,local_stop, edge_masks, edge_labels, overlapped_edge_features])]
else:
incremental_results.append([incremental_adj_mat, distance_to_others, node_sequence, edge_type_masks,
edge_type_labels, local_stop, edge_masks, edge_labels, overlapped_edge_features])
# copy the raw_data here
new_raw_data.append(d)
if idx % 50 == 0:
print('finish calculating %d incremental matrices' % idx, end="\r")
return incremental_results, new_raw_data
# ----- Data preprocessing and chunking into minibatches:
def process_raw_graphs(self, raw_data, is_training_data, file_name, bucket_sizes=None):
if bucket_sizes is None:
bucket_sizes = dataset_info(self.params["dataset"])["bucket_sizes"]
incremental_results, raw_data=self.calculate_incremental_results(raw_data, bucket_sizes, file_name)
bucketed = defaultdict(list)
x_dim = len(raw_data[0]["node_features"][0])
for d, (incremental_adj_mat,distance_to_others,node_sequence,edge_type_masks,edge_type_labels,local_stop, edge_masks, edge_labels, overlapped_edge_features)\
in zip(raw_data, incremental_results):
# choose a bucket
chosen_bucket_idx = np.argmax(bucket_sizes > max([v for e in d['graph']
for v in [e[0], e[2]]]))
chosen_bucket_size = bucket_sizes[chosen_bucket_idx]
# total number of nodes in this data point
n_active_nodes = len(d["node_features"])
bucketed[chosen_bucket_idx].append({
'adj_mat': graph_to_adj_mat(d['graph'], chosen_bucket_size, self.num_edge_types, self.params['tie_fwd_bkwd']),
'incre_adj_mat': incremental_adj_mat,
'distance_to_others': distance_to_others,
'overlapped_edge_features': overlapped_edge_features,
'node_sequence': node_sequence,
'edge_type_masks': edge_type_masks,
'edge_type_labels': edge_type_labels,
'edge_masks': edge_masks,
'edge_labels': edge_labels,
'local_stop': local_stop,
'number_iteration': len(local_stop),
'init': d["node_features"] + [[0 for _ in range(x_dim)] for __ in
range(chosen_bucket_size - n_active_nodes)],
'labels': [d["targets"][task_id][0] for task_id in self.params['task_ids']],
'mask': [1. for _ in range(n_active_nodes) ] + [0. for _ in range(chosen_bucket_size - n_active_nodes)]
})
if is_training_data:
for (bucket_idx, bucket) in bucketed.items():
np.random.shuffle(bucket)
for task_id in self.params['task_ids']:
task_sample_ratio = self.params['task_sample_ratios'].get(str(task_id))
if task_sample_ratio is not None:
ex_to_sample = int(len(bucket) * task_sample_ratio)
for ex_id in range(ex_to_sample, len(bucket)):
bucket[ex_id]['labels'][task_id] = None
bucket_at_step = [[bucket_idx for _ in range(len(bucket_data) // self.params['batch_size'])]
for bucket_idx, bucket_data in bucketed.items()]
bucket_at_step = [x for y in bucket_at_step for x in y]
return (bucketed, bucket_sizes, bucket_at_step)
def pad_annotations(self, annotations):
return np.pad(annotations,
pad_width=[[0, 0], [0, 0], [0, self.params['hidden_size'] - self.params["num_symbols"]]],
mode='constant')
def make_batch(self, elements, maximum_vertice_num):
# get maximum number of iterations in this batch. used to control while_loop
max_iteration_num=-1
for d in elements:
max_iteration_num=max(d['number_iteration'], max_iteration_num)
batch_data = {'adj_mat': [], 'init': [], 'labels': [], 'edge_type_masks':[], 'edge_type_labels':[], 'edge_masks':[],
'edge_labels':[],'node_mask': [], 'task_masks': [], 'node_sequence':[],
'iteration_mask': [], 'local_stop': [], 'incre_adj_mat': [], 'distance_to_others': [],
'max_iteration_num': max_iteration_num, 'overlapped_edge_features': []}
for d in elements:
# sparse to dense for saving memory
incre_adj_mat = incre_adj_mat_to_dense(d['incre_adj_mat'], self.num_edge_types, maximum_vertice_num)
distance_to_others = distance_to_others_dense(d['distance_to_others'], maximum_vertice_num)
overlapped_edge_features = overlapped_edge_features_to_dense(d['overlapped_edge_features'], maximum_vertice_num)
node_sequence = node_sequence_to_dense(d['node_sequence'],maximum_vertice_num)
edge_type_masks = edge_type_masks_to_dense(d['edge_type_masks'], maximum_vertice_num,self.num_edge_types)
edge_type_labels = edge_type_labels_to_dense(d['edge_type_labels'], maximum_vertice_num,self.num_edge_types)
edge_masks = edge_masks_to_dense(d['edge_masks'], maximum_vertice_num)
edge_labels = edge_labels_to_dense(d['edge_labels'], maximum_vertice_num)
batch_data['adj_mat'].append(d['adj_mat'])
batch_data['init'].append(d['init'])
batch_data['node_mask'].append(d['mask'])
batch_data['incre_adj_mat'].append(incre_adj_mat +
[np.zeros((self.num_edge_types, maximum_vertice_num,maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['distance_to_others'].append(distance_to_others +
[np.zeros((maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['overlapped_edge_features'].append(overlapped_edge_features +
[np.zeros((maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['node_sequence'].append(node_sequence +
[np.zeros((maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['edge_type_masks'].append(edge_type_masks +
[np.zeros((self.num_edge_types, maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['edge_masks'].append(edge_masks +
[np.zeros((maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['edge_type_labels'].append(edge_type_labels +
[np.zeros((self.num_edge_types, maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['edge_labels'].append(edge_labels +
[np.zeros((maximum_vertice_num))
for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['iteration_mask'].append([1 for _ in range(d['number_iteration'])]+
[0 for _ in range(max_iteration_num-d['number_iteration'])])
batch_data['local_stop'].append([int(s) for s in d["local_stop"]]+
[0 for _ in range(max_iteration_num-d['number_iteration'])])
target_task_values = []
target_task_mask = []
for target_val in d['labels']:
if target_val is None: # This is one of the examples we didn't sample...
target_task_values.append(0.)
target_task_mask.append(0.)
else:
target_task_values.append(target_val)
target_task_mask.append(1.)
batch_data['labels'].append(target_task_values)
batch_data['task_masks'].append(target_task_mask)
return batch_data
def get_dynamic_feed_dict(self, elements, latent_node_symbol, incre_adj_mat, num_vertices,
distance_to_others, overlapped_edge_dense, node_sequence, edge_type_masks, edge_masks, random_normal_states):
if incre_adj_mat is None:
incre_adj_mat=np.zeros((1, 1, self.num_edge_types, 1, 1))
distance_to_others=np.zeros((1,1,1))
overlapped_edge_dense=np.zeros((1,1,1))
node_sequence=np.zeros((1,1,1))
edge_type_masks=np.zeros((1,1,self.num_edge_types,1))
edge_masks=np.zeros((1,1,1))
latent_node_symbol=np.zeros((1,1,self.params["num_symbols"]))
return {
self.placeholders['z_prior']: random_normal_states, # [1, v, h]
self.placeholders['incre_adj_mat']: incre_adj_mat, # [1, 1, e, v, v]
self.placeholders['num_vertices']: num_vertices, # v
self.placeholders['initial_node_representation']: \
self.pad_annotations([elements['init']]),
self.placeholders['node_symbols']: [elements['init']],
self.placeholders['latent_node_symbols']: self.pad_annotations(latent_node_symbol),
self.placeholders['adjacency_matrix']: [elements['adj_mat']],
self.placeholders['node_mask']: [elements['mask']],
self.placeholders['graph_state_keep_prob']: 1,
self.placeholders['edge_weight_dropout_keep_prob']: 1,
self.placeholders['iteration_mask']: [[1]],
self.placeholders['is_generative']: True,
self.placeholders['out_layer_dropout_keep_prob'] : 1.0,
self.placeholders['distance_to_others'] : distance_to_others, # [1, 1,v]
self.placeholders['overlapped_edge_features']: overlapped_edge_dense,
self.placeholders['max_iteration_num']: 1,
self.placeholders['node_sequence']: node_sequence, #[1, 1, v]
self.placeholders['edge_type_masks']: edge_type_masks, #[1, 1, e, v]
self.placeholders['edge_masks']: edge_masks, # [1, 1, v]
}
def get_node_symbol(self, batch_feed_dict):
fetch_list = [self.ops['node_symbol_prob']]
result = self.sess.run(fetch_list, feed_dict=batch_feed_dict)
return result[0]
def node_symbol_one_hot(self, sampled_node_symbol, real_n_vertices, max_n_vertices):
one_hot_representations=[]
for idx in range(max_n_vertices):
representation = [0] * self.params["num_symbols"]
if idx < real_n_vertices:
atom_type=sampled_node_symbol[idx]
representation[atom_type]=1
one_hot_representations.append(representation)
return one_hot_representations
def search_and_generate_molecule(self, initial_idx, valences,
sampled_node_symbol, real_n_vertices, random_normal_states,
elements, max_n_vertices):
# New molecule
new_mol = Chem.MolFromSmiles('')
new_mol = Chem.rdchem.RWMol(new_mol)
# Add atoms
add_atoms(new_mol, sampled_node_symbol, self.params["dataset"])
# Breadth first search over the molecule
queue=deque([initial_idx])
# color 0: have not found 1: in the queue 2: searched already
color = [0] * max_n_vertices
color[initial_idx] = 1
# Empty adj list at the beginning
incre_adj_list=defaultdict(list)
# record the log probabilities at each step
total_log_prob=0
while len(queue) > 0:
node_in_focus = queue.popleft()
# iterate until the stop node is selected
while True:
# Prepare data for one iteration based on the graph state
edge_type_mask_sparse, edge_mask_sparse = generate_mask(valences, incre_adj_list, color, real_n_vertices, node_in_focus, self.params["check_overlap_edge"], new_mol)
edge_type_mask = edge_type_masks_to_dense([edge_type_mask_sparse], max_n_vertices, self.num_edge_types) # [1, e, v]
edge_mask = edge_masks_to_dense([edge_mask_sparse],max_n_vertices) # [1, v]
node_sequence = node_sequence_to_dense([node_in_focus], max_n_vertices) # [1, v]
distance_to_others_sparse = bfs_distance(node_in_focus, incre_adj_list)
distance_to_others = distance_to_others_dense([distance_to_others_sparse],max_n_vertices) # [1, v]
overlapped_edge_sparse = get_overlapped_edge_feature(edge_mask_sparse, color, new_mol)
overlapped_edge_dense = overlapped_edge_features_to_dense([overlapped_edge_sparse],max_n_vertices) # [1, v]
incre_adj_mat = incre_adj_mat_to_dense([incre_adj_list],
self.num_edge_types, max_n_vertices) # [1, e, v, v]
sampled_node_symbol_one_hot = self.node_symbol_one_hot(sampled_node_symbol, real_n_vertices, max_n_vertices)
# get feed_dict
feed_dict=self.get_dynamic_feed_dict(elements, [sampled_node_symbol_one_hot],
[incre_adj_mat], max_n_vertices, [distance_to_others], [overlapped_edge_dense],
[node_sequence], [edge_type_mask], [edge_mask], random_normal_states)
# fetch nn predictions
fetch_list = [self.ops['edge_predictions'], self.ops['edge_type_predictions']]
edge_probs, edge_type_probs = self.sess.run(fetch_list, feed_dict=feed_dict)
# select an edge
if not self.params["use_argmax_generation"]:
neighbor=np.random.choice(np.arange(max_n_vertices+1), p=edge_probs[0])
else:
neighbor=np.argmax(edge_probs[0])
# update log prob
total_log_prob+=np.log(edge_probs[0][neighbor]+SMALL_NUMBER)
# stop it if stop node is picked
if neighbor == max_n_vertices:
break
# or choose an edge type
if not self.params["use_argmax_generation"]:
bond=np.random.choice(np.arange(self.num_edge_types),p=edge_type_probs[0, :, neighbor])
else:
bond=np.argmax(edge_type_probs[0, :, neighbor])
# update log prob
total_log_prob+=np.log(edge_type_probs[0, :, neighbor][bond]+SMALL_NUMBER)
#update valences
valences[node_in_focus] -= (bond+1)
valences[neighbor] -= (bond+1)
#add the bond
new_mol.AddBond(int(node_in_focus), int(neighbor), number_to_bond[bond])
# add the edge to increment adj list
incre_adj_list[node_in_focus].append((neighbor, bond))
incre_adj_list[neighbor].append((node_in_focus, bond))
# Explore neighbor nodes
if color[neighbor]==0:
queue.append(neighbor)
color[neighbor]=1
color[node_in_focus]=2 # explored
# Remove unconnected node
remove_extra_nodes(new_mol)
new_mol=Chem.MolFromSmiles(Chem.MolToSmiles(new_mol))
return new_mol, total_log_prob
def gradient_ascent(self, random_normal_states, derivative_z_sampled):
return random_normal_states + self.params['prior_learning_rate'] * derivative_z_sampled
# optimization in latent space. generate one molecule for each optimization step
def optimization_over_prior(self, random_normal_states, num_vertices, generated_all_similes, elements, count):
# record how many optimization steps are taken
step=0
# generate a new molecule
self.generate_graph_with_state(random_normal_states, num_vertices, generated_all_similes, elements, step, count)
fetch_list = [self.ops['derivative_z_sampled'], self.ops['qed_computed_values'], self.ops['l2_loss']]
for _ in range(self.params['optimization_step']):
# get current qed and derivative
batch_feed_dict=self.get_dynamic_feed_dict(elements, None, None, num_vertices, None,
None, None, None, None,
random_normal_states)
derivative_z_sampled, qed_computed_values, l2_loss= self.sess.run(fetch_list, feed_dict=batch_feed_dict)
# update the states
random_normal_states=self.gradient_ascent(random_normal_states,
derivative_z_sampled[0])
# generate a new molecule
step+=1
self.generate_graph_with_state(random_normal_states, num_vertices,
generated_all_similes, elements, step, count)
return random_normal_states
def generate_graph_with_state(self, random_normal_states, num_vertices,
generated_all_similes, elements, step, count):
# Get back node symbol predictions
# Prepare dict
node_symbol_batch_feed_dict=self.get_dynamic_feed_dict(elements, None, None,
num_vertices, None, None, None, None, None, random_normal_states)
# Get predicted node probs
predicted_node_symbol_prob=self.get_node_symbol(node_symbol_batch_feed_dict)
# Node numbers for each graph
real_length=get_graph_length([elements['mask']])[0] # [valid_node_number]
# Sample node symbols
sampled_node_symbol=sample_node_symbol(predicted_node_symbol_prob, [real_length], self.params["dataset"])[0] # [v]
# Maximum valences for each node
valences=get_initial_valence(sampled_node_symbol, self.params["dataset"]) # [v]
# randomly pick the starting point or use zero
if not self.params["path_random_order"]:
# Try different starting points
if self.params["try_different_starting"]:
#starting_point=list(range(self.params["num_different_starting"]))
starting_point=random.sample(range(real_length),
min(self.params["num_different_starting"], real_length))
else:
starting_point=[0]
else:
if self.params["try_different_starting"]:
starting_point=random.sample(range(real_length),
min(self.params["num_different_starting"], real_length))
else:
starting_point=[random.choice(list(range(real_length)))] # randomly choose one
# record all molecules from different starting points
all_mol=[]
for idx in starting_point:
# generate a new molecule
new_mol, total_log_prob=self.search_and_generate_molecule(idx, np.copy(valences),
sampled_node_symbol, real_length,
random_normal_states, elements, num_vertices)
# record the molecule with largest number of shapes
if dataset=='qm9' and new_mol is not None:
all_mol.append((np.sum(shape_count(self.params["dataset"], True,
[Chem.MolToSmiles(new_mol)])[1]), total_log_prob, new_mol))
# record the molecule with largest number of pentagon and hexagonal for zinc and cep
elif dataset=='zinc' and new_mol is not None:
counts=shape_count(self.params["dataset"], True,[Chem.MolToSmiles(new_mol)])
all_mol.append((0.5 * counts[1][2]+ counts[1][3], total_log_prob, new_mol))
elif dataset=='cep' and new_mol is not None:
all_mol.append((np.sum(shape_count(self.params["dataset"], True,
[Chem.MolToSmiles(new_mol)])[1][2:]), total_log_prob, new_mol))
# select one out
best_mol = select_best(all_mol)
# nothing generated
if best_mol is None:
return
# visualize it
make_dir('visualization_%s' % dataset)
visualize_mol('visualization_%s/%d_%d.png' % (dataset, count, step), best_mol)
# record the best molecule
generated_all_similes.append(Chem.MolToSmiles(best_mol))
dump('generated_smiles_%s' % (dataset), generated_all_similes)
print("Real QED value")
print(QED.qed(best_mol))
if len(generated_all_similes) >= self.params['number_of_generation']:
print("generation done")
exit(0)
def compensate_node_length(self, elements, bucket_size):
maximum_length=bucket_size+self.params["compensate_num"]
real_length=get_graph_length([elements['mask']])[0]+self.params["compensate_num"]
elements['mask']=[1]*real_length + [0]*(maximum_length-real_length)
elements['init']=np.zeros((maximum_length, self.params["num_symbols"]))
elements['adj_mat']=np.zeros((self.num_edge_types, maximum_length, maximum_length))
return maximum_length
def generate_new_graphs(self, data):
# bucketed: data organized by bucket
(bucketed, bucket_sizes, bucket_at_step) = data
bucket_counters = defaultdict(int)
# all generated similes
generated_all_similes=[]
# counter
count = 0
# shuffle the lengths
np.random.shuffle(bucket_at_step)
for step in range(len(bucket_at_step)):
bucket = bucket_at_step[step] # bucket number
# data index
start_idx = bucket_counters[bucket] * self.params['batch_size']
end_idx = (bucket_counters[bucket] + 1) * self.params['batch_size']
# batch data
elements_batch = bucketed[bucket][start_idx:end_idx]
for elements in elements_batch:
# compensate for the length during generation
# (this is a result that BFS may not make use of all candidate nodes during generation)
maximum_length=self.compensate_node_length(elements, bucket_sizes[bucket])
# initial state
random_normal_states=generate_std_normal(1, maximum_length,\
self.params['hidden_size']) # [1, v, h]
random_normal_states = self.optimization_over_prior(random_normal_states,
maximum_length, generated_all_similes,elements, count)
count+=1
bucket_counters[bucket] += 1
def make_minibatch_iterator(self, data, is_training: bool):
(bucketed, bucket_sizes, bucket_at_step) = data
if is_training:
np.random.shuffle(bucket_at_step)
for _, bucketed_data in bucketed.items():
np.random.shuffle(bucketed_data)
bucket_counters = defaultdict(int)
dropout_keep_prob = self.params['graph_state_dropout_keep_prob'] if is_training else 1.
edge_dropout_keep_prob = self.params['edge_weight_dropout_keep_prob'] if is_training else 1.
for step in range(len(bucket_at_step)):
bucket = bucket_at_step[step]
start_idx = bucket_counters[bucket] * self.params['batch_size']
end_idx = (bucket_counters[bucket] + 1) * self.params['batch_size']
elements = bucketed[bucket][start_idx:end_idx]
batch_data = self.make_batch(elements, bucket_sizes[bucket])
num_graphs = len(batch_data['init'])
initial_representations = batch_data['init']
initial_representations = self.pad_annotations(initial_representations)
batch_feed_dict = {
self.placeholders['initial_node_representation']: initial_representations,
self.placeholders['node_symbols']: batch_data['init'],
self.placeholders['latent_node_symbols']: initial_representations,
self.placeholders['target_values']: np.transpose(batch_data['labels'], axes=[1,0]),
self.placeholders['target_mask']: np.transpose(batch_data['task_masks'], axes=[1, 0]),
self.placeholders['num_graphs']: num_graphs,
self.placeholders['num_vertices']: bucket_sizes[bucket],
self.placeholders['adjacency_matrix']: batch_data['adj_mat'],
self.placeholders['node_mask']: batch_data['node_mask'],
self.placeholders['graph_state_keep_prob']: dropout_keep_prob,
self.placeholders['edge_weight_dropout_keep_prob']: edge_dropout_keep_prob,
self.placeholders['iteration_mask']: batch_data['iteration_mask'],
self.placeholders['incre_adj_mat']: batch_data['incre_adj_mat'],
self.placeholders['distance_to_others']: batch_data['distance_to_others'],
self.placeholders['node_sequence']: batch_data['node_sequence'],
self.placeholders['edge_type_masks']: batch_data['edge_type_masks'],
self.placeholders['edge_type_labels']: batch_data['edge_type_labels'],
self.placeholders['edge_masks']: batch_data['edge_masks'],
self.placeholders['edge_labels']: batch_data['edge_labels'],
self.placeholders['local_stop']: batch_data['local_stop'],
self.placeholders['max_iteration_num']: batch_data['max_iteration_num'],
self.placeholders['kl_trade_off_lambda']: self.params['kl_trade_off_lambda'],
self.placeholders['overlapped_edge_features']: batch_data['overlapped_edge_features']
}
bucket_counters[bucket] += 1
yield batch_feed_dict
if __name__ == "__main__":
args = docopt(__doc__)
dataset=args.get('--dataset')
try:
model = DenseGGNNChemModel(args)
evaluation = False
if evaluation:
model.example_evaluation()
else:
model.train()
except:
typ, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
|
constrained-graph-variational-autoencoder/CGVAE.py/0
|
{
"file_path": "constrained-graph-variational-autoencoder/CGVAE.py",
"repo_id": "constrained-graph-variational-autoencoder",
"token_count": 32095
}
| 611 |
FROM tiangolo/uvicorn-gunicorn-fastapi:python3.9
ENV PORT 8080
ENV APP_MODULE app.api:app
ENV LOG_LEVEL debug
ENV WEB_CONCURRENCY 2
COPY ./requirements.txt ./requirements.txt
RUN pip install -r requirements.txt
RUN spacy download {{cookiecutter.spacy_model}}
COPY ./app /app/app
|
cookiecutter-spacy-fastapi/{{cookiecutter.project_slug}}/Dockerfile/0
|
{
"file_path": "cookiecutter-spacy-fastapi/{{cookiecutter.project_slug}}/Dockerfile",
"repo_id": "cookiecutter-spacy-fastapi",
"token_count": 113
}
| 612 |
# Sharing Updatable Models (SUM) on Blockchain
(formerly Decentralized & Collaborative AI on Blockchain)
<img src="./assets/logo.gif?raw=true" width=500 alt="Animated logo for the project. A neural network appears on a block. The nodes change color until finally converging. The block slides away on a chain and the process restarts on the next blank block.">
<!-- Put horizontally since build status badges are normally horizontal. -->
| [Demo][demo-folder] | [Simulation][simulation-folder] | Security |
|:-:|:-:|:-:|
| [](https://github.com/microsoft/0xDeCA10B/actions/workflows/demo-test.yml) | [](https://github.com/microsoft/0xDeCA10B/actions/workflows/simulation-test.yml) | [](https://dev.azure.com/maluuba/0xDeCA10B/_build/latest?definitionId=118&branchName=main) |
**Sharing Updatable Models (SUM) on Blockchain** is a framework to host and train publicly available machine learning models.
Ideally, using a model to get a prediction is free.
Adding data consists of validation by three steps as described below.
<img src="./assets/architecture_flow.png?raw=true" width=500 alt="Picture of a someone sending data to the addData method in CollaborativeTrainer which sends data to the 3 main components as further described next.">
1. The **IncentiveMechanism** validates the request to add data, for instance, in some cases a "stake" or deposit is required. In some cases, the incentive mechanism can also be triggered later to provide users with payments or virtual "karma" points.
2. The **DataHandler** stores data and meta-data on the blockchain. This ensures that it is accessible for all future uses, not limited to this smart contract.
3. The machine learning **model** is updated according to predefined training algorithms. In addition to adding data, anyone can query the model for predictions **for free**.
The basics of the framework can be found in our [blog post][blog1].
A demo of one incentive mechanism can be found [here][demo].
More details can be found in the [initial paper][overview-paper] describing the framework, accepted to Blockchain-2019, The IEEE International Conference on Blockchain.
This repository contains:
* [Demos][demo-folder] showcasing some proof of concept systems using the Ethereum blockchain. There is a locally deployable test blockchain and demo dashboard to interact with smart contracts written in Solidity.
* [Simulation tools][simulation-folder] written in Python to quickly see how models and incentive mechanisms would work when deployed.
<img src="./assets/aka.ms 0xDeCA10B QR.png?raw=true" width=250 alt="Picture of a QR code with aka.ms/0xDeCA10B written in the middle.">
# FAQ/Concerns
## Aren't smart contracts just for simple code?
There are many options.
We can restrict the framework to simple models: Perceptron, Naive Bayes, Nearest Centroid, etc.
We can also combine off-chain computation with on-chain computation in a few ways such as:
* encoding off-chain to a higher dimensional representation and just have the final layers of the model fine-tuned on-chain,
* using secure multiparty computation, or
* using external APIs, or as they are called the blockchain space, oracles, to train and run the model
We can also use algorithms that do not require all models parameters to be updated (e.g. Perceptron).
We hope to inspire more research in efficient ways to update more complex models.
Some of those proposals are not in the true spirit of this system which is to share models completely publicly but for some applications they may be suitable.
At least the data would be shared so others can still use it to train their own models.
## Will transaction fees be too high?
Fees in Ethereum are low enough for simple models: a few cents as of July 2019.
Simple machine learning models are good for many applications.
As described the previous answer, there are ways to keep transactions simple.
Fees are decreasing: Ethereum is switching to proof of stake.
Other blockchains may have lower or possibly no fees.
## What about storing models off-chain?
Storing the model parameters off-chain, e.g. using IPFS, is an option but many of the popular solutions do not have robust mirroring to ensure that the model will still be available if a node goes down.
One of the major goals of this project is to share models and improve their availability, the easiest way to do that now is to have the model stored and trained in a smart contract.
We're happy to make improvements! If you do know of a solution that would be cheaper and more robust than storing models on a blockchain like Ethereum then let us know by filing an issue!
## What if I just spam bad data?
This depends on the incentive mechanism (IM) chosen but essentially, you will lose a lot of money.
Others will notice the model is performing badly or does not work as expected and then stop contributing to it.
Depending on the IM, such as in Deposit, Refund, and Take: Self-Assessment, others that already submitted "good" data will gladly take your deposits without submitting any more data.
Furthermore, people can easily automatically correct your data using techniques from unsupervised learning such as clustering.
They can then use the data offline for their own private model or even deploy a new collection system using that model.
## What if no one gives bad data, then no one can profit?
That’s great!
This system will work as a source for quality data and models.
People will contribute data to help improve the machine learning models they use in their daily life.
Profit depends on the incentive mechanism (IM).
Yes, in Deposit, Refund, and Take: Self-Assessment, the contributors will not profit and should be able to claim back their own deposits.
In the Prediction Market based mechanism, contributors can still get rewarded by the original provider of the bounty and test set.
# Learn More
## Papers
More details can be found in our initial paper, [Decentralized & Collaborative AI on Blockchain][overview-paper], which describes the framework, accepted to Blockchain-2019, The IEEE International Conference on Blockchain.
An analysis of several machine learning models with the self-assessment incentive mechanism can be found in our second paper, [Analysis of Models for Decentralized and Collaborative AI on Blockchain][self-assessment-analysis-paper], which was accepted to [The 2020 International Conference on Blockchain](http://blockchain1000.org/2020/).
# Contributing
This project welcomes contributions and suggestions. Most contributions require you to agree to a
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
the rights to use your contribution. For details, visit https://cla.microsoft.com.
When you submit a pull request, a CLA-bot will automatically determine whether you need to provide
a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions
provided by the bot. You will only need to do this once across all repos using our CLA.
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
contact [[email protected]](mailto:[email protected]) with any additional questions or comments.
[demo-folder]: demo/
[simulation-folder]: simulation/
[demo]: https://aka.ms/0xDeCA10B-demo
[blog1]: https://aka.ms/0xDeCA10B-blog1
[overview-paper]: https://aka.ms/0xDeCA10B-paper
[self-assessment-analysis-paper]: https://arxiv.org/abs/2009.06756
|
0xDeCA10B/README.md/0
|
{
"file_path": "0xDeCA10B/README.md",
"repo_id": "0xDeCA10B",
"token_count": 1993
}
| 0 |
#!/bin/bash
set -e
# Default to development environment.
export NODE_ENVIRONMENT=${NODE_ENVIRONMENT:-development}
truffle compile
truffle migrate
react-scripts start
|
0xDeCA10B/demo/client/deploy_client.sh/0
|
{
"file_path": "0xDeCA10B/demo/client/deploy_client.sh",
"repo_id": "0xDeCA10B",
"token_count": 59
}
| 1 |
import Button from '@material-ui/core/Button'
import Container from '@material-ui/core/Container'
import Grid from '@material-ui/core/Grid'
import InputLabel from '@material-ui/core/InputLabel'
import Link from '@material-ui/core/Link'
import MenuItem from '@material-ui/core/MenuItem'
import Paper from '@material-ui/core/Paper'
import Select from '@material-ui/core/Select'
import { withStyles } from '@material-ui/core/styles'
import Table from '@material-ui/core/Table'
import TableBody from '@material-ui/core/TableBody'
import TableCell from '@material-ui/core/TableCell'
import TableHead from '@material-ui/core/TableHead'
import TableRow from '@material-ui/core/TableRow'
import TextField from '@material-ui/core/TextField'
import Tooltip from '@material-ui/core/Tooltip'
import Typography from '@material-ui/core/Typography'
import update from 'immutability-helper'
import { withSnackbar } from 'notistack'
import PropTypes from 'prop-types'
import React from 'react'
import Dropzone from 'react-dropzone'
import CollaborativeTrainer64 from '../contracts/compiled/CollaborativeTrainer64.json'
import DataHandler64 from '../contracts/compiled/DataHandler64.json'
import Points64 from '../contracts/compiled/Points64.json'
import Stakeable64 from '../contracts/compiled/Stakeable64.json'
import { Encoder } from '../encoding/encoder'
import { getNetworkType, getWeb3 } from '../getWeb3'
import { ModelDeployer } from '../ml-models/deploy-model'
import { ModelInformation } from '../storage/data-store'
import { DataStoreFactory } from '../storage/data-store-factory'
import { BASE_TITLE } from '../title'
import { checkStorages, renderStorageSelector } from './storageSelector'
const styles = theme => ({
root: {
...theme.mixins.gutters(),
paddingTop: theme.spacing(2),
paddingBottom: theme.spacing(2),
marginTop: theme.spacing(2),
marginBottom: theme.spacing(2),
},
form: {
display: 'flex',
flex: 1,
flexDirection: 'column'
},
button: {
marginTop: 20,
alignSelf: 'start',
},
selectorLabel: {
marginTop: 8,
},
selector: {
paddingTop: theme.spacing(1),
marginBottom: 8,
},
numberTextField: {
// Some of the labels are long so we need long input boxes to show the entire label nicely.
width: 300,
},
dropPaper: {
...theme.mixins.gutters(),
cursor: 'pointer',
paddingTop: theme.spacing(2),
paddingBottom: theme.spacing(2),
},
table: {
wordBreak: 'break-word',
},
})
class AddModel extends React.Component {
constructor(props) {
super(props)
this.classes = props.classes
this.web3 = null
// Default to local storage for storing original data.
const storageType = localStorage.getItem('storageType') || 'local'
this.storages = DataStoreFactory.getAll()
this.state = {
name: "",
description: "",
toFloat: 1E9,
modelType: 'Classifier64',
modelFileName: undefined,
encoder: Encoder.None,
incentiveMechanism: 'Points64',
refundTimeWaitTimeS: 0,
ownerClaimWaitTimeS: 0,
anyAddressClaimWaitTimeS: 0,
costWeight: 1E15,
deploymentInfo: {
dataHandler: {
transactionHash: undefined,
address: undefined,
},
incentiveMechanism: {
transactionHash: undefined,
address: undefined,
},
model: {
transactionHash: undefined,
address: undefined,
},
main: {
transactionHash: undefined,
address: undefined,
},
},
storageType,
permittedStorageTypes: [],
}
this.notify = this.notify.bind(this)
this.dismissNotification = this.dismissNotification.bind(this)
this.saveAddress = this.saveAddress.bind(this)
this.saveTransactionHash = this.saveTransactionHash.bind(this)
this.save = this.save.bind(this)
this.handleInputChange = this.handleInputChange.bind(this)
this.processUploadedModel = this.processUploadedModel.bind(this)
}
componentDidMount = async () => {
document.title = `Add Model - ${BASE_TITLE}`
checkStorages(this.storages).then(permittedStorageTypes => {
permittedStorageTypes.push('none')
this.setState({ permittedStorageTypes })
})
window.ethereum.on('chainChanged', _ => {
this.setupWeb3()
})
this.setupWeb3()
}
async setupWeb3() {
try {
this.web3 = await getWeb3()
this.deployer = new ModelDeployer(this.web3)
this.setState({ networkType: await getNetworkType() })
} catch (error) {
this.notify("Failed to load web3, accounts, or contract. Check console for details.", { variant: 'error' })
console.error(error)
}
}
notify(...args) {
return this.props.enqueueSnackbar(...args)
}
dismissNotification(...args) {
return this.props.closeSnackbar(...args)
}
saveTransactionHash(key, transactionHash) {
this.setState({ deploymentInfo: update(this.state.deploymentInfo, { [key]: { transactionHash: { $set: transactionHash } } }) })
}
saveAddress(key, address) {
this.setState({ deploymentInfo: update(this.state.deploymentInfo, { [key]: { address: { $set: address } } }) })
}
handleInputChange(event) {
const target = event.target
let value = target.type === "checkbox" ? target.checked : target.value
if (event.target.type === 'number') {
value = parseInt(value)
}
const name = target.name
let valid = true
if (['costWeight', 'refundTimeWaitTimeS', 'ownerClaimWaitTimeS', 'anyAddressClaimWaitTimeS'].indexOf(name) >= 0) {
if (value < 0) {
this.notify(`The value for ${name} must be at least 0`, { variant: 'error' })
valid = false
}
}
if (valid) {
this.setState({
[name]: value
}, _ => {
if (name === 'storageType') {
localStorage.setItem(name, value)
}
})
}
}
processUploadedModel(acceptedFiles) {
this.setState({ model: undefined, modelFileName: undefined }, _ => {
if (acceptedFiles.length !== 1) {
this.notify("Please only provide one file", { variant: 'error' })
return
}
const reader = new FileReader()
const file = acceptedFiles[0]
reader.onabort = () => console.error("File reading was aborted.")
reader.onerror = () => console.error("File reading has failed.")
reader.onload = () => {
try {
const binaryStr = reader.result
const model = JSON.parse(binaryStr)
if (!(model.type in ModelDeployer.modelTypes)) {
this.notify(`The "type" of the model must be one of ${JSON.stringify(Object.keys(ModelDeployer.modelTypes))}`, { variant: 'error' })
} else {
this.setState({
model, modelFileName: file.path,
encoder: model.encoder || this.state.encoder,
})
}
} catch (err) {
console.error(`Error reading "${file.path}".`)
console.error(err)
this.notify(`There was an error reading ${file.path}. See the console for details.`, { variant: 'error' })
}
}
reader.readAsBinaryString(file)
})
}
getDisabledReason() {
if (this.state.deploymentInfo.main.address !== undefined) {
return "Already deployed"
}
if (this.state.model === undefined) {
return "A model file must be uploaded"
}
if (!(this.state.refundTimeWaitTimeS <= this.state.ownerClaimWaitTimeS)) {
return "The owner wait time must greater than or equal to the refund/reward wait time"
}
if (!(this.state.ownerClaimWaitTimeS <= this.state.anyAddressClaimWaitTimeS)) {
return "The full deposit take wait time greather than or equal to the owner wait time"
}
if (this.state.costWeight < 0) {
return "The deposit wait must be greater than or equal to 0"
}
return null
}
render() {
let disableReason = this.getDisabledReason()
return (
<Container>
<Paper className={this.classes.root} elevation={1}>
<Typography variant="h5" component="h3">
Add your model
</Typography>
<Typography component="p">
Provide the information for the model and then deploy it to a blockchain.
You can hover over (or long press for touch screens) certain items to get more details.
</Typography>
<Typography component="p">
If you want to use a model that is already deployed, then you can add its information <Link href='/addDeployedModel'>here</Link>.
</Typography>
<Typography component="p">
⚠ WARNING When you click/tap on the SAVE button, transactions will be created for you to approve in your browser's tool (e.g. MetaMask).
If the transactions are approved, you might be sending data to a public dencentralized blockchain not controlled by Microsoft.
Before approving, you should understand the implications of interacting with a public blockchain.
You can learn more <Link href='/about' target='_blank'>here</Link>.
</Typography>
<form className={this.classes.container} noValidate autoComplete="off">
<div className={this.classes.form}>
<TextField
name="name"
label="Model name"
inputProps={{ 'aria-label': "Model name" }}
margin="normal"
onChange={this.handleInputChange}
/>
<TextField
name="description"
label="Model description"
inputProps={{ 'aria-label': "Model description" }}
margin="normal"
onChange={this.handleInputChange}
/>
{/* Encoder */}
<Typography variant="h6" component="h6">
Encoder
</Typography>
<Typography component="p">
An encoder is the method that is used to convert the input (text, image, etc.) into a machine readable format.
</Typography>
<Select className={this.classes.selector}
onChange={this.handleInputChange}
value={this.state.encoder}
inputProps={{
name: 'encoder',
}}
>
<Tooltip value={Encoder.None} placement="top-start"
title="No transformation will be applied (except for whatever is required to send the data to the contract such as converting to hexadecimal)">
<MenuItem>None (for raw integer data)</MenuItem>
</Tooltip>
<Tooltip value={Encoder.Mult1E9Round} placement="top-start"
title="Each number will be multiplied by 10^9 and then rounded since smart contracts use integers instead of decimal numbers">
<MenuItem>Multiply by 10^9, then round (for raw decimal numbers)</MenuItem>
</Tooltip>
<Tooltip value={Encoder.MurmurHash3} placement="top-start"
title="Convert each word to a 32-bit number using MurmurHash3. Separates word using spaces.">
<MenuItem>MurmurHash3 (for text with sparse models)</MenuItem>
</Tooltip>
<Tooltip value={Encoder.ImdbVocab} placement="top-start"
title="Convert each word in English text to a number using the 1000 most frequent words in the IMDB review dataset">
<MenuItem>IMDB vocab (for a limited set of English text)</MenuItem>
</Tooltip>
<Tooltip value={Encoder.USE} placement="top-start"
title="Use the Universal Sentence Encoder to convert English text to a vector of numbers">
<MenuItem>Universal Sentence Encoder (for English text with dense models)</MenuItem>
</Tooltip>
<Tooltip value={Encoder.MobileNetV2} placement="top-start"
title="Use MobileNetV2 to convert images to a vector of numbers">
<MenuItem>MobileNetV2 (for images with dense models)</MenuItem>
</Tooltip>
</Select>
{/* Model */}
{/* Don't need to ask for the model type since there is only one option and in the future, it should be inferred from the provided file.
<InputLabel className={this.classes.selectorLabel} htmlFor="model-type">Model type</InputLabel>
<Select className={this.classes.selector}
onChange={this.handleInputChange}
value={this.state.modelType}
inputProps={{
name: 'modelType',
}}
>
<MenuItem value={"Classifier64"}>Classifier64</MenuItem>
</Select> */}
<Typography variant="h6" component="h6">
Model
</Typography>
<Typography component="p">
Provide a file containing the model's information.
The syntax for the file can be found <Link href='https://github.com/microsoft/0xDeCA10B/wiki/Models#model-files' target='_blank'>here</Link>.
</Typography>
<Dropzone onDrop={this.processUploadedModel}>
{({ getRootProps, getInputProps }) => (
<Paper {...getRootProps()} className={this.classes.dropPaper}>
<input {...getInputProps()} />
<Typography component="p">
Drag and drop a model file here, or click to select a file
{this.state.modelFileName && ` (using ${this.state.modelFileName})`}
</Typography>
</Paper>
)}
</Dropzone>
{/* Incentive Mechanism */}
<Tooltip placement="top-start"
title={"The system that will be used to determine rewards for data that is determined to be \"good\"."}>
<InputLabel className={this.classes.selectorLabel} htmlFor="incentiveMechanism">Incentive mechanism (IM)</InputLabel>
</Tooltip>
<Select className={this.classes.selector}
onChange={this.handleInputChange}
value={this.state.incentiveMechanism}
inputProps={{
name: 'incentiveMechanism',
}}
>
<Tooltip value="Points64" placement="top-start"
title="Collect and earn points. No deposits required.">
<MenuItem>Points</MenuItem>
</Tooltip>
<Tooltip value="Stakeable64" placement="top-start"
title="Stake a deposit when giving data. Contributors have the possibility to earn rewards by taking the deposits of others.">
<MenuItem>Stakeable</MenuItem>
</Tooltip>
</Select>
{this.state.incentiveMechanism === 'Points64' &&
this.renderPointsOptions()
}
{this.state.incentiveMechanism === 'Stakeable64' &&
this.renderStakeableOptions()
}
{/* Storage */}
<Typography variant="h6" component="h6">
Model Meta-data Storage
</Typography>
<Typography component="p">
When you click the save button below, you will be prompted to store your model on a blockchain.
In the next selection dropdown, you can choose if you want to store meta-data for this model so that you can easily find it using this demo website.
</Typography>
<div className={this.classes.selector}>
{renderStorageSelector("Where to store the supplied meta-data about this model like its address",
this.state.storageType, this.handleInputChange, this.state.permittedStorageTypes)}
</div>
{this.state.networkType === 'main' && <Typography component="p">
{"⚠ You are currently set up to deploy to a main network. Please consider deploying to a test network before deploying to a main network. "}
</Typography>}
{disableReason !== null && <Typography component="p">
⚠ {disableReason}
</Typography>}
<Button className={this.classes.button} variant="outlined" color="primary" onClick={this.save}
disabled={disableReason !== null}
>
Save
</Button>
</div>
</form>
</Paper>
<Paper className={this.classes.root} elevation={1}>
<Typography component="h3">
Deployment Status
</Typography>
<Table className={this.classes.table} aria-label="Deployment Information Table">
<TableHead>
<TableRow>
<TableCell>Contract</TableCell>
<TableCell>Transaction Hash</TableCell>
<TableCell>Address</TableCell>
</TableRow>
</TableHead>
<TableBody>
<TableRow>
<TableCell component="th">Data Handler</TableCell>
<TableCell>{this.state.deploymentInfo.dataHandler.transactionHash}</TableCell>
<TableCell>{this.state.deploymentInfo.dataHandler.address}</TableCell>
</TableRow>
<TableRow>
<TableCell component="th">Incentive Mechanism</TableCell>
<TableCell>{this.state.deploymentInfo.incentiveMechanism.transactionHash}</TableCell>
<TableCell>{this.state.deploymentInfo.incentiveMechanism.address}</TableCell>
</TableRow>
<TableRow>
<TableCell component="th">Model</TableCell>
<TableCell>{this.state.deploymentInfo.model.transactionHash}</TableCell>
<TableCell>{this.state.deploymentInfo.model.address}</TableCell>
</TableRow>
<TableRow>
<TableCell component="th">Main Entry Point</TableCell>
<TableCell>{this.state.deploymentInfo.main.transactionHash}</TableCell>
<TableCell>{this.state.deploymentInfo.main.address}</TableCell>
</TableRow>
</TableBody>
</Table>
</Paper>
</Container>
)
}
renderCommonImOptions() {
return <Grid container spacing={2}>
<Grid item xs={12} sm={6}>
<Tooltip placement="top-start"
title={"The amount of time that anyone must wait after submitting data before requesting a refund and to verify data you claim is correct. \
This is also the amount of time that anyone must wait before reporting another account's data as incorrect."}>
<TextField name="refundTimeWaitTimeS" label="Refund/reward wait time (seconds)"
inputProps={{ 'aria-label': "Refund wait time in seconds" }}
className={this.classes.numberTextField}
value={this.state.refundTimeWaitTimeS}
type="number"
margin="normal"
onChange={this.handleInputChange} />
</Tooltip>
</Grid>
<Grid item xs={12} sm={6}>
<Tooltip placement="top-start"
title={"The amount of time that the \"owner\" of the smart contracts must wait before taking another account's full deposit given with their data contribution"}>
<TextField name="ownerClaimWaitTimeS" label="Full deposit take wait time for owner (seconds)"
inputProps={{ 'aria-label': "Owner claim wait time in seconds" }}
className={this.classes.numberTextField}
value={this.state.ownerClaimWaitTimeS}
type="number"
margin="normal"
onChange={this.handleInputChange} />
</Tooltip>
</Grid>
<Grid item xs={12} sm={6}>
<Tooltip placement="top-start"
title={"The amount of time that anyone must wait before taking another account's full deposit given with their data contribution"}>
<TextField name="anyAddressClaimWaitTimeS" label="Full deposit take wait time (seconds)"
inputProps={{ 'aria-label': "Any address claim wait time in seconds" }}
className={this.classes.numberTextField}
value={this.state.anyAddressClaimWaitTimeS}
type="number"
margin="normal"
onChange={this.handleInputChange} />
</Tooltip>
</Grid>
</Grid>
}
renderStakeableOptions() {
return <div>
{this.renderCommonImOptions()}
<Grid container spacing={2}>
<Grid item xs={12} sm={12}>
<Tooltip placement="top-start"
title={"A multiplicative factor to the required deposit. \
Setting this to 0 will mean that no deposit is required but will allow you to stil use the IM to track \"good\" and \"bad\" contributions."}>
<TextField name="costWeight" label="Deposit weight (in wei)"
inputProps={{ 'aria-label': "Deposit weight in wei" }}
className={this.classes.numberTextField}
value={this.state.costWeight}
type="number"
margin="normal"
onChange={this.handleInputChange} />
</Tooltip>
</Grid>
</Grid>
</div>
}
renderPointsOptions() {
return <div>
<Typography component="p">
No deposits will be required.
</Typography>
{this.renderCommonImOptions()}
</div>
}
async save() {
// TODO Keep track of contract addresses of whatever has been deployed so far so that the process can be recovered.
const { name, description, model, modelType, encoder } = this.state
const modelInfo = new ModelInformation({ name, description, modelType, encoder })
// Validate
if (!name) {
this.notify("Please provide a name", { variant: 'error' })
return
}
if (modelType === undefined || model === undefined) {
this.notify("You must select model type and provide a model file", { variant: 'error' })
return
}
this.web3.eth.getAccounts(async (err, accounts) => {
if (err) {
throw err
}
const account = accounts[0]
// Deploy the model first since it is more likely something will go wrong with deploying it compared to the other contracts.
const model = await this.deployer.deployModel(this.state.model, {
account,
toFloat: this.state.toFloat,
notify: this.notify, dismissNotification: this.dismissNotification,
saveTransactionHash: this.saveTransactionHash, saveAddress: this.saveAddress,
})
const [dataHandler, incentiveMechanism] = await Promise.all([
this.deployDataHandler(account),
this.deployIncentiveMechanism(account),
])
const mainContract = await this.deployMainEntryPoint(account, dataHandler, incentiveMechanism, model)
modelInfo.address = mainContract.options.address
if (this.state.storageType !== 'none') {
// Save to a database.
const storage = this.storages[this.state.storageType]
storage.saveModelInformation(modelInfo).then(() => {
// Redirect
const redirectWaitS = 5
this.notify(`Saved. Will redirect in ${redirectWaitS} seconds.`, { variant: 'success' })
setTimeout(_ => {
this.props.history.push(`/model?address=${mainContract.options.address}&metaDataLocation=${this.state.storageType}`)
}, redirectWaitS * 1000)
}).catch(err => {
console.error(err)
if (err.response && err.response.data && err.response.data.message) {
console.error(err.response.data.message)
}
this.notify("There was an error saving the model information. Check the console for details.",
{ variant: 'error' })
})
}
})
}
async deployIncentiveMechanism(account) {
let contractInfo, notification, args
const { incentiveMechanism,
refundTimeWaitTimeS, ownerClaimWaitTimeS, anyAddressClaimWaitTimeS,
costWeight } = this.state
switch (incentiveMechanism) {
case 'Points64':
contractInfo = Points64
args = [refundTimeWaitTimeS, ownerClaimWaitTimeS, anyAddressClaimWaitTimeS]
break
case 'Stakeable64':
contractInfo = Stakeable64
args = [refundTimeWaitTimeS, ownerClaimWaitTimeS, anyAddressClaimWaitTimeS, costWeight]
break
default:
// Should not happen.
this.notify(`Unrecognized incentive mechanism: "${incentiveMechanism}"`, { variant: 'error' })
throw new Error(`Unrecognized incentive mechanism: "${incentiveMechanism}"`)
}
const imContract = new this.web3.eth.Contract(contractInfo.abi, {
from: account,
})
const pleaseAcceptKey = this.notify("Please accept the prompt to deploy the incentive mechanism contract")
const result = imContract.deploy({
data: contractInfo.bytecode,
arguments: args,
}).send({
}).on('transactionHash', transactionHash => {
this.dismissNotification(pleaseAcceptKey)
notification = this.notify(`Submitted the incentive mechanism with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
this.saveTransactionHash('incentiveMechanism', transactionHash)
}).on('receipt', receipt => {
if (notification !== undefined) {
this.dismissNotification(notification)
}
this.notify(`The incentive mechanism contract has been deployed to ${receipt.contractAddress}`, { variant: 'success' })
this.saveAddress('incentiveMechanism', receipt.contractAddress)
}).on('error', err => {
this.dismissNotification(pleaseAcceptKey)
console.error(err)
this.notify("Error deploying the incentive mechanism", { variant: 'error' })
throw err
})
return result
}
async deployDataHandler(account) {
const pleaseAcceptKey = this.notify("Please accept the prompt to deploy the data handler")
let notification
const dataHandlerContract = new this.web3.eth.Contract(DataHandler64.abi, {
from: account,
})
return dataHandlerContract.deploy({
data: DataHandler64.bytecode,
}).send({
}).on('transactionHash', transactionHash => {
this.dismissNotification(pleaseAcceptKey)
notification = this.notify(`Submitted the data handler with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
this.saveTransactionHash('dataHandler', transactionHash)
}).on('receipt', receipt => {
if (notification !== undefined) {
this.dismissNotification(notification)
}
this.notify(`The data handler contract has been deployed to ${receipt.contractAddress}`, { variant: 'success' })
this.saveAddress('dataHandler', receipt.contractAddress)
}).on('error', err => {
this.dismissNotification(pleaseAcceptKey)
console.error(err)
this.notify("Error deploying the data handler", { variant: 'error' })
throw err
})
}
async deployMainEntryPoint(account, dataHandler, incentiveMechanism, model) {
const pleaseAcceptKey = this.notify("Please accept the prompt to deploy the main entry point contact")
let notification
const collaborativeTrainer64Contract = new this.web3.eth.Contract(CollaborativeTrainer64.abi, {
from: account,
})
return collaborativeTrainer64Contract.deploy({
data: CollaborativeTrainer64.bytecode,
arguments: [
this.state.name, this.state.description, this.state.encoder,
dataHandler.options.address, incentiveMechanism.options.address, model.options.address
],
}).send({
}).on('transactionHash', transactionHash => {
this.dismissNotification(pleaseAcceptKey)
notification = this.notify(`Submitted the main entry point with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
this.saveTransactionHash('main', transactionHash)
}).on('receipt', receipt => {
if (notification !== undefined) {
this.dismissNotification(notification)
}
this.notify(`The main entry point contract has been deployed to ${receipt.contractAddress}`, { variant: 'success' })
this.saveAddress('main', receipt.contractAddress)
}).on('error', err => {
this.dismissNotification(pleaseAcceptKey)
console.error(err)
this.notify(`Error deploying the main entry point contract`, { variant: 'error' })
throw err
}).then(newContractInstance => {
notification = this.notify(`Please accept the next 3 transactions to transfer ownership of the components to the main entry point contract`)
return Promise.all([
dataHandler.methods.transferOwnership(newContractInstance.options.address).send(),
incentiveMechanism.methods.transferOwnership(newContractInstance.options.address).send(),
model.methods.transferOwnership(newContractInstance.options.address).send(),
]).then(_ => {
this.dismissNotification(notification)
return newContractInstance
})
})
}
}
AddModel.propTypes = {
classes: PropTypes.object.isRequired,
}
export default withSnackbar(withStyles(styles)(AddModel))
|
0xDeCA10B/demo/client/src/components/addModel.js/0
|
{
"file_path": "0xDeCA10B/demo/client/src/components/addModel.js",
"repo_id": "0xDeCA10B",
"token_count": 10671
}
| 2 |
pragma solidity ^0.6;
import "../../../lib/Math.sol";
import "../../../lib/SafeMath.sol";
import "../../../lib/SignedSafeMath.sol";
import {IncentiveMechanism, IncentiveMechanism64} from "./IncentiveMechanism.sol";
import {Ownable} from "../ownership/Ownable.sol";
/**
* A base class for contracts that want to accept deposits to incentivise good contributions of information.
*/
contract Stakeable is Ownable, IncentiveMechanism {
using SafeMath for uint256;
/**
* A refund has been issued.
*/
event Refund(
/**
* The recipient of the refund which is the one who originally submitted the data contribution.
*/
address recipient,
/**
* The amount refunded.
*/
uint amount
);
/**
* An award for reporting data has been issued.
*/
event Report(
/**
* The one who submitted the report.
*/
address recipient,
/**
* The amount awarded.
*/
uint amount
);
/**
* Multiplicative factor for the cost calculation.
*/
uint public costWeight;
/**
* The last time that data was updated in seconds since the epoch.
*/
uint public lastUpdateTimeS;
constructor(
// Parameters in chronological order.
uint32 _refundWaitTimeS,
uint32 _ownerClaimWaitTimeS,
uint32 _anyAddressClaimWaitTimeS,
uint80 _costWeight
) Ownable() IncentiveMechanism(_refundWaitTimeS, _ownerClaimWaitTimeS, _anyAddressClaimWaitTimeS) public {
require(_refundWaitTimeS <= _ownerClaimWaitTimeS, "Owner claim wait time must be at least the refund wait time.");
require(_ownerClaimWaitTimeS <= _anyAddressClaimWaitTimeS, "Owner claim wait time must be less than the any address claim wait time.");
costWeight = _costWeight;
lastUpdateTimeS = now; // solium-disable-line security/no-block-members
}
/**
* @return The amount of wei required to add data now.
*
* Note that since this method uses `now` which depends on the last block time,
* when testing, the output of this function may not change over time unless blocks are created.
* @dev see also `getNextAddDataCost(uint)`
*/
function getNextAddDataCost() public override view returns (uint) {
return getNextAddDataCost(now); // solium-disable-line security/no-block-members
}
/**
* @param currentTimeS The current time in seconds since the epoch.
*
* @return The amount of wei required to add data at `currentTimeS`.
*/
function getNextAddDataCost(uint currentTimeS) public override view returns (uint) {
if (costWeight == 0) {
return 0;
}
// Value sent is in wei (1E18 wei = 1 ether).
require(lastUpdateTimeS <= currentTimeS, "The last update time is after the current time.");
// No SafeMath check needed because already done above.
uint divisor = currentTimeS - lastUpdateTimeS;
if (divisor == 0) {
divisor = 1;
} else {
divisor = Math.sqrt(divisor);
// TODO Check that sqrt is "safe".
}
return costWeight.mul(1 hours).div(divisor);
}
}
contract Stakeable64 is IncentiveMechanism64, Stakeable {
using SafeMath for uint256;
using SignedSafeMath for int256;
constructor(
uint32 _refundWaitTimeS,
uint32 _ownerClaimWaitTimeS,
uint32 _anyAddressClaimWaitTimeS,
uint80 _costWeight
) Stakeable(_refundWaitTimeS, _ownerClaimWaitTimeS, _anyAddressClaimWaitTimeS, _costWeight) public {
// solium-disable-previous-line no-empty-blocks
}
function getNextAddDataCost(int64[] memory /* data */, uint64 /* classification */)
public override view
returns (uint) {
// Do not consider the data.
return getNextAddDataCost();
}
function handleAddData(uint msgValue, int64[] memory data, uint64 classification) public override onlyOwner returns (uint cost) {
cost = getNextAddDataCost(data, classification);
require(msgValue >= cost, "Didn't pay enough for the deposit.");
lastUpdateTimeS = now; // solium-disable-line security/no-block-members
totalSubmitted = totalSubmitted.add(1);
}
function handleRefund(
address submitter,
int64[] memory /* data */, uint64 classification,
uint addedTime,
uint claimableAmount, bool claimedBySubmitter,
uint64 prediction,
uint /* numClaims */)
public override onlyOwner
returns (uint refundAmount) {
refundAmount = claimableAmount;
// Make sure deposit can be taken.
require(!claimedBySubmitter, "Deposit already claimed by submitter.");
require(refundAmount > 0, "There is no reward left to claim.");
require(now - addedTime >= refundWaitTimeS, "Not enough time has passed."); // solium-disable-line security/no-block-members
require(prediction == classification, "The model doesn't agree with your contribution.");
addressStats[submitter].numValid += 1;
totalGoodDataCount = totalGoodDataCount.add(1);
emit Refund(submitter, refundAmount);
}
function handleReport(
address reporter,
int64[] memory /* data */, uint64 classification,
uint addedTime, address originalAuthor,
uint initialDeposit, uint claimableAmount, bool claimedByReporter,
uint64 prediction,
uint /* numClaims */)
public override onlyOwner
returns (uint rewardAmount) {
// Make sure deposit can be taken.
require(claimableAmount > 0, "There is no reward left to claim.");
uint timeSinceAddedS = now - addedTime; // solium-disable-line security/no-block-members
if (timeSinceAddedS >= ownerClaimWaitTimeS && reporter == owner) {
rewardAmount = claimableAmount;
} else if (timeSinceAddedS >= anyAddressClaimWaitTimeS) {
// Enough time has passed, give the entire remaining deposit to the reporter.
rewardAmount = claimableAmount;
} else {
// Don't allow someone to claim back their own deposit if their data was wrong.
// They can still claim it from another address but they will have had to have sent good data from that address.
require(reporter != originalAuthor, "Cannot take your own deposit.");
require(!claimedByReporter, "Deposit already claimed by reporter.");
require(timeSinceAddedS >= refundWaitTimeS, "Not enough time has passed.");
require(prediction != classification, "The model should not agree with the contribution.");
uint numGoodForReporter = addressStats[reporter].numValid;
require(numGoodForReporter > 0, "The sender has not sent any good data.");
// Weight the reward by the proportion of good data sent (maybe square the resulting value).
// One nice reason to do this is to discourage someone from adding bad data through one address
// and then just using another address to get their full deposit back.
rewardAmount = initialDeposit.mul(numGoodForReporter).div(totalGoodDataCount);
if (rewardAmount == 0 || rewardAmount > claimableAmount) {
// There is too little left to divide up. Just give everything to this reporter.
rewardAmount = claimableAmount;
}
}
emit Report(reporter, rewardAmount);
}
}
|
0xDeCA10B/demo/client/src/contracts/incentive/Stakeable.sol/0
|
{
"file_path": "0xDeCA10B/demo/client/src/contracts/incentive/Stakeable.sol",
"repo_id": "0xDeCA10B",
"token_count": 2817
}
| 3 |
import Web3 from 'web3'
import { Contract } from 'web3-eth-contract'
import DensePerceptron from '../contracts/compiled/DensePerceptron.json'
import NaiveBayesClassifier from '../contracts/compiled/NaiveBayesClassifier.json'
import NearestCentroidClassifier from '../contracts/compiled/NearestCentroidClassifier.json'
import SparseNearestCentroidClassifier from '../contracts/compiled/SparseNearestCentroidClassifier.json'
import SparsePerceptron from '../contracts/compiled/SparsePerceptron.json'
import { convertData, convertNum } from '../float-utils'
import { DensePerceptronModel, Model, NaiveBayesModel, NearestCentroidModel, SparseNearestCentroidModel, SparsePerceptronModel } from './model-interfaces'
export class ModelDeployer {
/**
* The default value for toFloat.
*/
private static readonly toFloat = 1E9
/**
* Block gas limit by most miners as of October 2019.
*/
public readonly gasLimit = 8.9E6
static readonly modelTypes: any = {
'naive bayes': NaiveBayesClassifier,
'nearest centroid classifier': NearestCentroidClassifier,
'dense nearest centroid classifier': NearestCentroidClassifier,
'sparse nearest centroid classifier': SparseNearestCentroidClassifier,
'perceptron': DensePerceptron,
'dense perceptron': DensePerceptron,
'sparse perceptron': SparsePerceptron,
}
constructor(private web3: Web3) {
}
async deployNaiveBayes(model: NaiveBayesModel, options: any): Promise<Contract> {
const { account, toFloat,
notify, dismissNotification,
saveTransactionHash, saveAddress,
initialChunkSize = 150, chunkSize = 350,
} = options
const defaultSmoothingFactor = 1
const { classifications, classCounts, featureCounts, totalNumFeatures } = model
const smoothingFactor = convertNum(model.smoothingFactor || defaultSmoothingFactor, this.web3, toFloat)
const ContractInfo = ModelDeployer.modelTypes[model.type]
const contract = new this.web3.eth.Contract(ContractInfo.abi, undefined, { from: account })
const pleaseAcceptKey = notify(`Please accept the prompt to deploy the Naive Bayes classifier`)
return contract.deploy({
data: ContractInfo.bytecode,
arguments: [[classifications[0]], [classCounts[0]], [featureCounts[0].slice(0, initialChunkSize)], totalNumFeatures, smoothingFactor]
}).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', transactionHash => {
dismissNotification(pleaseAcceptKey)
notify(`Submitted the model with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
saveTransactionHash('model', transactionHash)
}).on('error', err => {
dismissNotification(pleaseAcceptKey)
notify("Error deploying the model", { variant: 'error' })
console.error(err)
}).then(async newContractInstance => {
const addClassPromises = []
for (let i = 1; i < classifications.length; ++i) {
addClassPromises.push(new Promise((resolve, reject) => {
const notification = notify(`Please accept the prompt to create the "${classifications[i]}" class`)
newContractInstance.methods.addClass(
classCounts[i], featureCounts[i].slice(0, initialChunkSize), classifications[i]
).send({
from: account,
// Block gas limit by most miners as of October 2019.
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error creating the "${classifications[i]}" class`, { variant: 'error' })
reject(err)
}).then(resolve)
}))
}
return Promise.all(addClassPromises).then(async _ => {
// Add remaining feature counts.
for (let classification = 0; classification < classifications.length; ++classification) {
for (let j = initialChunkSize; j < featureCounts[classification].length; j += chunkSize) {
const notification = notify(`Please accept the prompt to upload the features [${j},${Math.min(j + chunkSize, featureCounts[classification].length)}) for the "${classifications[classification]}" class`)
await newContractInstance.methods.initializeCounts(
featureCounts[classification].slice(j, j + chunkSize), classification).send()
.on('transactionHash', () => {
dismissNotification(notification)
})
.on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting feature indices for [${j},${Math.min(j + chunkSize, featureCounts[classification].length)}) for the "${classifications[classification]}" class`, { variant: 'error' })
throw err
})
}
}
notify(`The model contract has been deployed to ${newContractInstance.options.address}`, { variant: 'success' })
saveAddress('model', newContractInstance.options.address)
return newContractInstance
})
})
}
async deployNearestCentroidClassifier(model: NearestCentroidModel | SparseNearestCentroidModel, options: any): Promise<Contract> {
const { account, toFloat,
notify, dismissNotification,
saveTransactionHash, saveAddress,
initialChunkSize = 200, chunkSize = 250,
} = options
const classifications: string[] = []
const centroids: number[][] | number[][][] = []
const dataCounts: number[] = []
let numDimensions = null
for (const [classification, centroidInfo] of Object.entries(model.centroids)) {
classifications.push(classification)
dataCounts.push(centroidInfo.dataCount)
if (Array.isArray(centroidInfo.centroid) && model.type !== 'sparse nearest centroid classifier') {
centroids.push(convertData(centroidInfo.centroid, this.web3, toFloat))
if (numDimensions === null) {
numDimensions = centroidInfo.centroid.length
} else {
if (centroidInfo.centroid.length !== numDimensions) {
throw new Error(`Found a centroid with ${centroidInfo.centroid.length} dimensions. Expected: ${numDimensions}.`)
}
}
} else {
const sparseCentroid: number[][] = []
// `centroidInfo.centroid` could be an array or dict.
for (const [featureIndexKey, value] of Object.entries(centroidInfo.centroid)) {
const featureIndex = parseInt(featureIndexKey)
sparseCentroid.push([featureIndex, convertNum(value, this.web3, toFloat)])
}
centroids.push(sparseCentroid as any)
}
}
const ContractInfo = ModelDeployer.modelTypes[model.type]
const contract = new this.web3.eth.Contract(ContractInfo.abi, undefined, { from: account })
const pleaseAcceptKey = notify("Please accept the prompt to deploy the first class for the Nearest Centroid classifier")
return contract.deploy({
data: ContractInfo.bytecode,
arguments: [[classifications[0]], [centroids[0].slice(0, initialChunkSize)], [dataCounts[0]]],
}).send({
from: account,
// Block gas limit by most miners as of October 2019.
gas: this.gasLimit,
}).on('transactionHash', transactionHash => {
dismissNotification(pleaseAcceptKey)
notify(`Submitted the model with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
saveTransactionHash('model', transactionHash)
}).on('error', err => {
dismissNotification(pleaseAcceptKey)
notify("Error deploying the model", { variant: 'error' })
console.error(err)
}).then(async newContractInstance => {
// Set up each class.
const addClassPromises = []
for (let i = 1; i < classifications.length; ++i) {
addClassPromises.push(new Promise((resolve, reject) => {
const notification = notify(`Please accept the prompt to create the "${classifications[i]}" class`)
newContractInstance.methods.addClass(centroids[i].slice(0, initialChunkSize), classifications[i], dataCounts[i]).send({
from: account,
// Block gas limit by most miners as of October 2019.
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error creating the "${classifications[i]}" class`, { variant: 'error' })
reject(err)
}).then(resolve)
}))
}
await Promise.all(addClassPromises)
// Extend each class.
for (let classification = 0; classification < classifications.length; ++classification) {
for (let j = initialChunkSize; j < centroids[classification].length; j += chunkSize) {
const notification = notify(`Please accept the prompt to upload the values for dimensions [${j},${Math.min(j + chunkSize, centroids[classification].length)}) for the "${classifications[classification]}" class`)
// Not parallel since order matters.
await newContractInstance.methods.extendCentroid(
centroids[classification].slice(j, j + chunkSize), classification).send()
.on('transactionHash', () => {
dismissNotification(notification)
})
.on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting feature indices for [${j},${Math.min(j + chunkSize, centroids[classification].length)}) for the "${classifications[classification]}" class`, { variant: 'error' })
throw err
})
}
}
notify(`The model contract has been deployed to ${newContractInstance.options.address}`, { variant: 'success' })
saveAddress('model', newContractInstance.options.address)
return newContractInstance
})
}
async deployPerceptron(model: DensePerceptronModel | SparsePerceptronModel, options: any): Promise<Contract> {
const { account, toFloat,
notify, dismissNotification,
saveTransactionHash, saveAddress,
chunkSize = 350,
} = options
const defaultLearningRate = 0.5
const { classifications, featureIndices } = model
let weightsArray: any[] = []
const sparseWeights: any[][] = []
if ('sparseWeights' in model) {
const sparseModel = model as SparsePerceptronModel
if (typeof sparseModel.sparseWeights === 'object' && sparseModel.sparseWeights !== null) {
for (const [featureIndexKey, weight] of Object.entries(sparseModel.sparseWeights)) {
const featureIndex = parseInt(featureIndexKey, 10)
sparseWeights.push([featureIndex, convertNum(weight, this.web3, toFloat)])
}
}
}
if (model.weights !== undefined && model.weights !== null && Array.isArray(model.weights)) {
weightsArray = convertData(model.weights, this.web3, toFloat)
}
const intercept = convertNum(model.intercept, this.web3, toFloat)
const learningRate = convertNum(model.learningRate || defaultLearningRate, this.web3, toFloat)
if (featureIndices !== undefined && featureIndices.length !== weightsArray.length + sparseWeights.length) {
return Promise.reject("The number of features must match the number of weights.")
}
const ContractInfo = ModelDeployer.modelTypes[model.type]
const contract = new this.web3.eth.Contract(ContractInfo.abi, undefined, { from: account })
const pleaseAcceptKey = notify(`Please accept the prompt to deploy the Perceptron classifier with the first ${Math.min(weightsArray.length, chunkSize)} weights`)
return contract.deploy({
data: ContractInfo.bytecode,
arguments: [classifications, weightsArray.slice(0, chunkSize), intercept, learningRate],
}).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', transactionHash => {
dismissNotification(pleaseAcceptKey)
notify(`Submitted the model with transaction hash: ${transactionHash}. Please wait for a deployment confirmation.`)
saveTransactionHash('model', transactionHash)
}).on('error', err => {
dismissNotification(pleaseAcceptKey)
notify("Error deploying the model", { variant: 'error' })
console.error(err)
}).then(async newContractInstance => {
// Add remaining weights.
for (let i = chunkSize; i < weightsArray.length; i += chunkSize) {
// Not parallel since order matters for the dense model.
// Even for the sparse model, it nice not to be bombarded with many notifications that can look out of order.
let transaction: any
if (model.type === 'dense perceptron' || model.type === 'perceptron') {
transaction = newContractInstance.methods.initializeWeights(weightsArray.slice(i, i + chunkSize))
} else if (model.type === 'sparse perceptron') {
transaction = newContractInstance.methods.initializeWeights(i, weightsArray.slice(i, i + chunkSize))
} else {
throw new Error(`Unrecognized model type: "${model.type}"`)
}
// Subtract 1 from the count because the first chunk has already been uploaded.
const notification = notify(`Please accept the prompt to upload classifier weights [${i},${Math.min(i + chunkSize, weightsArray.length)}) (${i / chunkSize}/${Math.ceil(weightsArray.length / chunkSize) - 1})`)
await transaction.send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting weights classifier weights [${i},${Math.min(i + chunkSize, weightsArray.length)})`, { variant: 'error' })
console.error(err)
})
}
if (featureIndices !== undefined) {
// Add feature indices to use.
for (let i = 0; i < featureIndices.length; i += chunkSize) {
const notification = notify(`Please accept the prompt to upload the feature indices [${i},${Math.min(i + chunkSize, featureIndices.length)})`)
await newContractInstance.methods.addFeatureIndices(featureIndices.slice(i, i + chunkSize)).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting feature indices for [${i},${Math.min(i + chunkSize, featureIndices.length)})`, { variant: 'error' })
console.error(err)
})
}
}
const sparseWeightsChunkSize = Math.round(chunkSize / 2)
for (let i = 0; i < sparseWeights.length; i += sparseWeightsChunkSize) {
const notification = notify(`Please accept the prompt to upload sparse classifier weights [${i},${Math.min(i + sparseWeightsChunkSize, sparseWeights.length)}) out of ${sparseWeights.length}`)
await newContractInstance.methods.initializeSparseWeights(sparseWeights.slice(i, i + sparseWeightsChunkSize)).send({
from: account,
gas: this.gasLimit,
}).on('transactionHash', () => {
dismissNotification(notification)
}).on('error', (err: any) => {
dismissNotification(notification)
notify(`Error setting sparse classifier weights [${i},${Math.min(i + sparseWeightsChunkSize, sparseWeights.length)}) out of ${sparseWeights.length}`, { variant: 'error' })
throw err
})
}
notify(`The model contract has been deployed to ${newContractInstance.options.address}`, { variant: 'success' })
saveAddress('model', newContractInstance.options.address)
return newContractInstance
})
}
/**
* @returns The contract for the model, an instance of `Classifier64`
* along with the the total amount of gas used to deploy the model.
*/
async deployModel(model: Model, options: any): Promise<Contract> {
if (options.toFloat === undefined) {
options.toFloat = ModelDeployer.toFloat
}
const noop: () => void = () => { return }
if (options.notify === undefined) {
options.notify = noop
}
if (options.dismissNotification === undefined) {
options.dismissNotification = noop
}
if (options.saveAddress === undefined) {
options.saveAddress = noop
}
if (options.saveTransactionHash === undefined) {
options.saveTransactionHash = noop
}
switch (model.type.toLocaleLowerCase('en')) {
case 'dense perceptron':
case 'sparse perceptron':
case 'perceptron':
if ('sparseWeights' in model) {
return this.deployPerceptron(model as SparsePerceptronModel, options)
} else {
return this.deployPerceptron(model as DensePerceptronModel, options)
}
case 'naive bayes':
return this.deployNaiveBayes(model as NaiveBayesModel, options)
case 'dense nearest centroid classifier':
case 'nearest centroid classifier':
return this.deployNearestCentroidClassifier(model as NearestCentroidModel, options)
case 'sparse nearest centroid classifier':
return this.deployNearestCentroidClassifier(model as SparseNearestCentroidModel, options)
default:
// Should not happen.
throw new Error(`Unrecognized model type: "${model.type}"`)
}
}
}
|
0xDeCA10B/demo/client/src/ml-models/deploy-model.ts/0
|
{
"file_path": "0xDeCA10B/demo/client/src/ml-models/deploy-model.ts",
"repo_id": "0xDeCA10B",
"token_count": 5722
}
| 4 |
import config from './config'
export class OnlineSafetyValidator {
private enabled: boolean = process.env.REACT_APP_ENABLE_ONLINE_SAFETY !== undefined
&& process.env.REACT_APP_ENABLE_ONLINE_SAFETY.toLocaleLowerCase('en') === 'true'
private verified: Set<string>
constructor() {
this.verified = new Set(config.verified.map(obj => this.normalize(obj.network, obj.address)))
}
private normalize(networkType: string, address: string): string {
// Assume addresses are valid and do not have any '-'s.
return `${networkType}-${address.toLocaleLowerCase('en')}`
}
/**
* @returns `true` if validation is enabled, `false` otherwise.
*/
isEnabled(): boolean {
return this.enabled
}
/**
* @param networkType The type of the network.
* @param address The address of the smart contract.
* @returns `true` is the information in the smart contract (name, description, classifications, etc.) can be displayed.
*/
isPermitted(networkType: string, address: string): boolean {
if (!this.enabled) {
// Everything is permitted.
return true
}
return this.verified.has(this.normalize(networkType, address))
}
}
|
0xDeCA10B/demo/client/src/safety/validator.ts/0
|
{
"file_path": "0xDeCA10B/demo/client/src/safety/validator.ts",
"repo_id": "0xDeCA10B",
"token_count": 367
}
| 5 |
const CollaborativeTrainer64 = artifacts.require("./CollaborativeTrainer64")
const DataHandler64 = artifacts.require("./data/DataHandler64")
const Classifier = artifacts.require("./classification/SparsePerceptron")
const Stakeable64 = artifacts.require("./incentive/Stakeable64")
const { convertData, convertNum } = require('../../../src/float-utils-node')
const { deployModel } = require('../../../src/ml-models/deploy-model-node')
contract('CollaborativeTrainer with Perceptron', function (accounts) {
const toFloat = 1E9
const refundTimeS = 1
const ownerClaimWaitTimeS = 2
const anyAddressClaimWaitTimeS = 3
const classifications = ["Negative", "Positive"]
const weights = convertData([0, 5, -1], web3, toFloat)
const intercept = convertNum(0, web3, toFloat)
const learningRate = convertNum(1, web3, toFloat)
let dataHandler, incentiveMechanism, classifier, instance
function parseBN(num) {
if (web3.utils.isBN(num)) {
return num.toNumber()
} else {
assert.typeOf(num, 'number')
return num
}
}
function parseFloatBN(bn) {
assert(web3.utils.isBN(bn), `${bn} is not a BN`)
// Can't divide first since a BN can only be an integer.
return bn.toNumber() / toFloat
}
before("deploy", function () {
// Weight for deposit cost in wei.
const costWeight = 1E12
console.log(`Deploying DataHandler.`)
return DataHandler64.new().then(d => {
dataHandler = d
console.log(` Deployed data handler to ${dataHandler.address}.`)
return Stakeable64.new(
refundTimeS,
ownerClaimWaitTimeS,
anyAddressClaimWaitTimeS,
costWeight
).then(inc => {
incentiveMechanism = inc
console.log(` Deployed incentive mechanism to ${incentiveMechanism.address}.`)
console.log(`Deploying classifier with ${weights.length} weights.`)
return Classifier.new(classifications, weights, intercept, learningRate).then(m => {
classifier = m
console.log(` Deployed classifier to ${classifier.address}.`)
console.log(`Deploying collaborative trainer.`)
return CollaborativeTrainer64.new(
"name", "description", "encoder",
dataHandler.address,
incentiveMechanism.address,
classifier.address
).then(i => {
instance = i
console.log(` Deployed collaborative trainer to ${i.address}.`)
return Promise.all([
dataHandler.transferOwnership(instance.address),
incentiveMechanism.transferOwnership(instance.address),
classifier.transferOwnership(instance.address),
])
})
})
})
})
})
it("...should get last update time", function () {
return incentiveMechanism.lastUpdateTimeS().then(parseBN).then(lastUpdateTimeS => {
assert.isAtLeast(lastUpdateTimeS, 2)
})
})
it("...should get first weight", function () {
return classifier.weights(0).then(parseFloatBN).then((firstWeight) => {
assert.equal(firstWeight, parseFloatBN(weights[0]), `First weight is wrong ${firstWeight} != ${weights[0]}.`)
})
})
it("...should get the classifications", function () {
const expectedClassifications = classifications
return classifier.getNumClassifications().then(numClassifications => {
assert.equal(numClassifications, expectedClassifications.length, "Number of classifications is wrong.")
const promises = expectedClassifications.map((expectedClassification, i) => {
return classifier.classifications(i)
})
return Promise.all(promises).then(results => {
assert.deepEqual(results, expectedClassifications, "Wrong classifications.")
})
})
})
it("...should predict the classification 0", function () {
return instance.classifier()
.then(Classifier.at)
.then(m => m.predict([2]))
.then(prediction => {
assert.equal(prediction, 0, "Wrong classification.")
})
})
it("...should predict the classification 1", function () {
return instance.classifier()
.then(Classifier.at)
.then(m => m.predict([0, 1]))
.then(prediction => {
assert.equal(prediction, 1, "Wrong classification.")
})
})
it("...should get the cost", function () {
return instance.incentiveMechanism()
.then(Stakeable64.at)
.then(inc => inc.getNextAddDataCost())
.then((cost) => {
assert(cost.gtn(0), "Cost should be positive.")
})
})
it("...should add data", function () {
return instance.incentiveMechanism()
.then(Stakeable64.at)
.then(inc => inc.getNextAddDataCost())
.then(cost => {
assert(cost.gtn(0), "Cost should be positive.")
return instance.addData([0, 1], 0, { from: accounts[0], value: cost }).then((result) => {
return classifier.weights(0).then(parseFloatBN).then(result => {
assert.equal(result, parseFloatBN(weights[0]) - parseFloatBN(learningRate), "First weight is wrong.")
assert.equal(result, -1, "First weight is wrong.")
}).then(() => {
return classifier.weights(1).then(parseFloatBN).then(result => {
assert.equal(result, parseFloatBN(weights[1]) - parseFloatBN(learningRate), "Second weight is wrong.")
assert.equal(result, 4, "Second weight is wrong.")
})
}).then(() => {
return classifier.predict([0]).then((result) => {
assert.equal(result, 0, "Wrong classification.")
})
}).then(() => {
return classifier.predict([1]).then((result) => {
assert.equal(result, 1, "Wrong classification.")
})
}).then(() => {
// Pick a large number.
var weightsLength = 2 ** 20
var data = []
var requiredSampleLength = 60
var sample1 = Array(requiredSampleLength).fill(weightsLength + 1, 0)
sample1[0] = 0
var sample2 = Array(requiredSampleLength).fill(weightsLength + 1, 0)
sample2[0] = 1
var data = [sample1, sample2]
return classifier.evaluateBatch(data, [0, 1]).then(result => {
assert.equal(result, 2, "Wrong number correct.")
})
})
})
})
})
it("...should not refund", function () {
return instance.refund([0, 2], 1, 3).then(() => {
assert.fail("Shouldn't be allowed to refund.")
}).catch(err => {
// We can't test the error message yet.
assert.equal(err, "Error: Returned error: VM Exception while processing transaction: revert Data not found. -- Reason given: Data not found..")
})
})
it("...should refund", function (done) {
assert.isAtLeast(accounts.length, 2, "At least 2 accounts are required.")
var contributor = accounts[0]
var badContributor = accounts[1]
instance.incentiveMechanism()
.then(Stakeable64.at)
.then(inc => inc.getNextAddDataCost()).then((maxCost) => {
return web3.eth.getBalance(contributor).then(balanceBeforeAdd => {
balanceBeforeAdd = web3.utils.toBN(balanceBeforeAdd)
// contributor adds good data so that they can get a refund and take someone else's deposit later.
return instance.addData([0, 1], 1, { from: contributor, value: maxCost }).then((result) => {
var addedTime = new Date().getTime()
var e = result.logs.filter(e => e.event == 'AddData')[0]
assert.exists(e, "AddData event not found.")
var time = e.args.t
var cost = e.args.cost
assert(web3.utils.isBN(cost))
assert(cost.lte(maxCost))
return web3.eth.getBalance(contributor).then(balanceAfterAdd => {
balanceAfterAdd = web3.utils.toBN(balanceAfterAdd)
// Ideally balanceAfterAdd = balanceBeforeAdd - cost
// but because of transaction fees:
// balanceAfterAdd = balanceBeforeAdd - cost - fee < balanceBeforeAdd - cost
// balanceAfterAdd < balanceBeforeAdd - cost
assert(balanceAfterAdd.lte(balanceBeforeAdd.sub(cost)))
// badContributor adds bad data.
return instance.incentiveMechanism()
.then(Stakeable64.at)
.then(inc => inc.getNextAddDataCost())
.then((result) => {
var badAddCost = result
return instance.addData([0, 1], 0, { from: badContributor, value: badAddCost }).then((result) => {
const badAddedTime = new Date().getTime()
var e = result.logs.filter(e => e.event == 'AddData')[0]
assert.exists(e, "AddData event not found.")
var badTime = e.args.t
setTimeout(_ => {
return instance.refund([0, 1], 1, time, { from: contributor }).then(result => {
return web3.eth.getBalance(contributor).then(balanceAfterRefund => {
balanceAfterRefund = web3.utils.toBN(balanceAfterRefund)
// Ideally balanceAfterRefund = balanceAfterAdd + cost
// but because of transaction fees:
// balanceAfterRefund < balanceAfterAdd + cost
assert(balanceAfterRefund.lte(balanceAfterAdd.add(cost)))
// Ideally balanceAfterRefund > balanceAfterAdd
// but we can't be sure because of transaction fees.
return dataHandler.getClaimableAmount([0, 1], 1, time, contributor).then(result => {
assert.equal(result, 0)
return dataHandler.hasClaimed([0, 1], 1, time, contributor, contributor).then(hasClaimed => {
assert.equal(hasClaimed, true)
// Now that good data has been verified, badContributor's deposit can be taken.
setTimeout(_ => {
return instance.report([0, 1], 0, badTime, badContributor).then(_ => {
return dataHandler.getClaimableAmount([0, 1], 0, badTime, badContributor).then(result => {
assert.equal(result, 0)
return dataHandler.hasClaimed([0, 1], 0, badTime, badContributor, contributor).then(hasClaimed => {
assert.equal(hasClaimed, true)
done()
})
})
})
},
// Delay enough time to allow a refund.
refundTimeS * 1000 - (new Date().getTime() - badAddedTime))
})
})
})
})
},
// Delay enough time to allow a refund.
refundTimeS * 1000 - (new Date().getTime() - addedTime))
})
})
})
})
})
})
})
it("...should initializeSparseWeights", async function () {
const model = {
type: 'sparse perceptron',
classifications,
weights: [0, 5, -1, 2.1, -1.4],
sparseWeights: { '8': 9.1, '12': -7.3 },
intercept: 2,
learningRate: 0.5,
}
const { classifierContract } = await deployModel(model, web3, { toFloat })
assert.equal(await classifierContract.intercept().then(parseFloatBN), model.intercept)
assert.equal(await classifierContract.learningRate().then(parseFloatBN), model.learningRate)
for (let i = 0; i < model.weights; ++i) {
assert.equal(await classifierContract.weights(i).then(parseFloatBN), model.weights[i])
}
for (const [featureIndex, weight] of Object.entries(model.sparseWeights)) {
assert.equal(await classifierContract.weights(parseInt(featureIndex, 10)).then(parseFloatBN), weight)
}
})
})
|
0xDeCA10B/demo/client/test/contracts/classification/sparseperceptron.js/0
|
{
"file_path": "0xDeCA10B/demo/client/test/contracts/classification/sparseperceptron.js",
"repo_id": "0xDeCA10B",
"token_count": 4333
}
| 6 |
.git/
.vscode/
**/README.md
**/*.Dockerfile
**/Dockerfile
saved_models/
saved_runs/
training_data/
*.egg-info/
.pytesst_cache/
**/*~
|
0xDeCA10B/simulation/.dockerignore/0
|
{
"file_path": "0xDeCA10B/simulation/.dockerignore",
"repo_id": "0xDeCA10B",
"token_count": 69
}
| 7 |
import os
from sklearn.linear_model import SGDClassifier
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
class PerceptronModule(SciKitClassifierModule):
def __init__(self, class_weight=None):
super().__init__(
_model_initializer=lambda: SGDClassifier(
loss='perceptron',
n_jobs=max(1, os.cpu_count() - 2),
random_state=0xDeCA10B,
learning_rate='optimal',
class_weight=class_weight,
# Don't really care about tol, just setting it to remove a warning.
tol=1e-3,
penalty=None))
|
0xDeCA10B/simulation/decai/simulation/contract/classification/perceptron.py/0
|
{
"file_path": "0xDeCA10B/simulation/decai/simulation/contract/classification/perceptron.py",
"repo_id": "0xDeCA10B",
"token_count": 319
}
| 8 |
from dataclasses import dataclass
from logging import Logger
from typing import List
from injector import inject, Module
from keras.datasets import boston_housing
from decai.simulation.data.data_loader import DataLoader
@inject
@dataclass
class BhpDataLoader(DataLoader):
"""
Load data from Boston Housing Prices.
https://keras.io/datasets/#boston-housing-price-regression-dataset
"""
_logger: Logger
def classifications(self) -> List[str]:
raise NotImplementedError
def load_data(self, train_size: int = None, test_size: int = None) -> (tuple, tuple):
self._logger.info("Loading Boston housing prices data.")
(x_train, y_train), (x_test, y_test) = boston_housing.load_data()
if train_size is not None:
x_train, y_train = x_train[:train_size], y_train[:train_size]
if test_size is not None:
x_test, y_test = x_test[:test_size], y_test[:test_size]
self._logger.info("Done loading data.")
return (x_train, y_train), (x_test, y_test)
@dataclass
class BhpDataModule(Module):
def configure(self, binder):
binder.bind(DataLoader, to=BhpDataLoader)
|
0xDeCA10B/simulation/decai/simulation/data/bhp_data_loader.py/0
|
{
"file_path": "0xDeCA10B/simulation/decai/simulation/data/bhp_data_loader.py",
"repo_id": "0xDeCA10B",
"token_count": 462
}
| 9 |
# MIT License
# Copyright (c) Microsoft Corporation.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE
import logging
import random
import sys
from collections import OrderedDict
import numpy as np
import torch
def reset_seed(seed):
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
try:
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
except:
# maybe not available
pass
def prepare_logger(args):
time_format = '%m/%d %H:%M:%S'
fmt = '[%(asctime)s] %(levelname)s (%(name)s) %(message)s'
formatter = logging.Formatter(fmt, time_format)
logger = logging.getLogger()
if logger.hasHandlers():
logger.handlers.clear()
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
def accuracy(outputs, targets):
_, predict = torch.max(outputs.data, 1)
correct = predict.eq(targets.data).cpu().sum().item()
return correct / outputs.size(0)
class AverageMeterGroup:
"""Average meter group for multiple average meters"""
def __init__(self):
self.meters = OrderedDict()
def update(self, data, n=1):
for k, v in data.items():
if k not in self.meters:
self.meters[k] = AverageMeter(k, ':4f')
self.meters[k].update(v, n=n)
def __getattr__(self, item):
return self.meters[item]
def __getitem__(self, item):
return self.meters[item]
def __str__(self):
return ' '.join(str(v) for v in self.meters.values())
def summary(self):
return ' '.join(v.summary() for v in self.meters.values())
def average_items(self):
return {k: v.avg for k, v in self.meters.items()}
class AverageMeter:
"""Computes and stores the average and current value"""
def __init__(self, name, fmt=':f'):
"""
Initialization of AverageMeter
Parameters
----------
name : str
Name to display.
fmt : str
Format string to print the values.
"""
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
def summary(self):
fmtstr = '{name}: {avg' + self.fmt + '}'
return fmtstr.format(**self.__dict__)
|
AI-System/Labs/AdvancedLabs/Lab8/hpo-answer/utils.py/0
|
{
"file_path": "AI-System/Labs/AdvancedLabs/Lab8/hpo-answer/utils.py",
"repo_id": "AI-System",
"token_count": 1474
}
| 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.