function ud(e,t){for(var n=0;ni[r]})}}}return Object.freeze(Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}))}(function(){const t=document.createElement("link").relList;if(t&&t.supports&&t.supports("modulepreload"))return;for(const r of document.querySelectorAll('link[rel="modulepreload"]'))i(r);new MutationObserver(r=>{for(const a of r)if(a.type==="childList")for(const o of a.addedNodes)o.tagName==="LINK"&&o.rel==="modulepreload"&&i(o)}).observe(document,{childList:!0,subtree:!0});function n(r){const a={};return r.integrity&&(a.integrity=r.integrity),r.referrerPolicy&&(a.referrerPolicy=r.referrerPolicy),r.crossOrigin==="use-credentials"?a.credentials="include":r.crossOrigin==="anonymous"?a.credentials="omit":a.credentials="same-origin",a}function i(r){if(r.ep)return;r.ep=!0;const a=n(r);fetch(r.href,a)}})();function cd(e){return e&&e.__esModule&&Object.prototype.hasOwnProperty.call(e,"default")?e.default:e}var dd={exports:{}},eo={},fd={exports:{}},X={};/** * @license React * react.production.min.js * * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */var Pr=Symbol.for("react.element"),Ym=Symbol.for("react.portal"),Jm=Symbol.for("react.fragment"),Zm=Symbol.for("react.strict_mode"),Gm=Symbol.for("react.profiler"),eh=Symbol.for("react.provider"),th=Symbol.for("react.context"),nh=Symbol.for("react.forward_ref"),ih=Symbol.for("react.suspense"),rh=Symbol.for("react.memo"),ah=Symbol.for("react.lazy"),Tu=Symbol.iterator;function oh(e){return e===null||typeof e!="object"?null:(e=Tu&&e[Tu]||e["@@iterator"],typeof e=="function"?e:null)}var pd={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},md=Object.assign,hd={};function Pi(e,t,n){this.props=e,this.context=t,this.refs=hd,this.updater=n||pd}Pi.prototype.isReactComponent={};Pi.prototype.setState=function(e,t){if(typeof e!="object"&&typeof e!="function"&&e!=null)throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,e,t,"setState")};Pi.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")};function gd(){}gd.prototype=Pi.prototype;function ol(e,t,n){this.props=e,this.context=t,this.refs=hd,this.updater=n||pd}var sl=ol.prototype=new gd;sl.constructor=ol;md(sl,Pi.prototype);sl.isPureReactComponent=!0;var Cu=Array.isArray,yd=Object.prototype.hasOwnProperty,ll={current:null},vd={key:!0,ref:!0,__self:!0,__source:!0};function wd(e,t,n){var i,r={},a=null,o=null;if(t!=null)for(i in t.ref!==void 0&&(o=t.ref),t.key!==void 0&&(a=""+t.key),t)yd.call(t,i)&&!vd.hasOwnProperty(i)&&(r[i]=t[i]);var s=arguments.length-2;if(s===1)r.children=n;else if(1>>1,ue=P[ee];if(0>>1;eer(yt,q))Ver(Dt,yt)?(P[ee]=Dt,P[Ve]=q,ee=Ve):(P[ee]=yt,P[De]=q,ee=De);else if(Ver(Dt,q))P[ee]=Dt,P[Ve]=q,ee=Ve;else break e}}return H}function r(P,H){var q=P.sortIndex-H.sortIndex;return q!==0?q:P.id-H.id}if(typeof performance=="object"&&typeof performance.now=="function"){var a=performance;e.unstable_now=function(){return a.now()}}else{var o=Date,s=o.now();e.unstable_now=function(){return o.now()-s}}var l=[],u=[],m=1,f=null,h=3,v=!1,_=!1,k=!1,I=typeof setTimeout=="function"?setTimeout:null,g=typeof clearTimeout=="function"?clearTimeout:null,p=typeof setImmediate<"u"?setImmediate:null;typeof navigator<"u"&&navigator.scheduling!==void 0&&navigator.scheduling.isInputPending!==void 0&&navigator.scheduling.isInputPending.bind(navigator.scheduling);function w(P){for(var H=n(u);H!==null;){if(H.callback===null)i(u);else if(H.startTime<=P)i(u),H.sortIndex=H.expirationTime,t(l,H);else break;H=n(u)}}function E(P){if(k=!1,w(P),!_)if(n(l)!==null)_=!0,Yt(j);else{var H=n(u);H!==null&&ke(E,H.startTime-P)}}function j(P,H){_=!1,k&&(k=!1,g(A),A=-1),v=!0;var q=h;try{for(w(H),f=n(l);f!==null&&(!(f.expirationTime>H)||P&&!re());){var ee=f.callback;if(typeof ee=="function"){f.callback=null,h=f.priorityLevel;var ue=ee(f.expirationTime<=H);H=e.unstable_now(),typeof ue=="function"?f.callback=ue:f===n(l)&&i(l),w(H)}else i(l);f=n(l)}if(f!==null)var Tt=!0;else{var De=n(u);De!==null&&ke(E,De.startTime-H),Tt=!1}return Tt}finally{f=null,h=q,v=!1}}var x=!1,L=null,A=-1,U=5,F=-1;function re(){return!(e.unstable_now()-FP||125ee?(P.sortIndex=q,t(u,P),n(l)===null&&P===n(u)&&(k?(g(A),A=-1):k=!0,ke(E,q-ee))):(P.sortIndex=ue,t(l,P),_||v||(_=!0,Yt(j))),P},e.unstable_shouldYield=re,e.unstable_wrapCallback=function(P){var H=h;return function(){var q=h;h=H;try{return P.apply(this,arguments)}finally{h=q}}}})(Sd);_d.exports=Sd;var wh=_d.exports;/** * @license React * react-dom.production.min.js * * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */var xh=d,ot=wh;function C(e){for(var t="https://reactjs.org/docs/error-decoder.html?invariant="+e,n=1;n"u"||typeof window.document>"u"||typeof window.document.createElement>"u"),os=Object.prototype.hasOwnProperty,bh=/^[:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD][:A-Z_a-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\-.0-9\u00B7\u0300-\u036F\u203F-\u2040]*$/,Iu={},ju={};function kh(e){return os.call(ju,e)?!0:os.call(Iu,e)?!1:bh.test(e)?ju[e]=!0:(Iu[e]=!0,!1)}function _h(e,t,n,i){if(n!==null&&n.type===0)return!1;switch(typeof t){case"function":case"symbol":return!0;case"boolean":return i?!1:n!==null?!n.acceptsBooleans:(e=e.toLowerCase().slice(0,5),e!=="data-"&&e!=="aria-");default:return!1}}function Sh(e,t,n,i){if(t===null||typeof t>"u"||_h(e,t,n,i))return!0;if(i)return!1;if(n!==null)switch(n.type){case 3:return!t;case 4:return t===!1;case 5:return isNaN(t);case 6:return isNaN(t)||1>t}return!1}function Xe(e,t,n,i,r,a,o){this.acceptsBooleans=t===2||t===3||t===4,this.attributeName=i,this.attributeNamespace=r,this.mustUseProperty=n,this.propertyName=e,this.type=t,this.sanitizeURL=a,this.removeEmptyString=o}var Fe={};"children dangerouslySetInnerHTML defaultValue defaultChecked innerHTML suppressContentEditableWarning suppressHydrationWarning style".split(" ").forEach(function(e){Fe[e]=new Xe(e,0,!1,e,null,!1,!1)});[["acceptCharset","accept-charset"],["className","class"],["htmlFor","for"],["httpEquiv","http-equiv"]].forEach(function(e){var t=e[0];Fe[t]=new Xe(t,1,!1,e[1],null,!1,!1)});["contentEditable","draggable","spellCheck","value"].forEach(function(e){Fe[e]=new Xe(e,2,!1,e.toLowerCase(),null,!1,!1)});["autoReverse","externalResourcesRequired","focusable","preserveAlpha"].forEach(function(e){Fe[e]=new Xe(e,2,!1,e,null,!1,!1)});"allowFullScreen async autoFocus autoPlay controls default defer disabled disablePictureInPicture disableRemotePlayback formNoValidate hidden loop noModule noValidate open playsInline readOnly required reversed scoped seamless itemScope".split(" ").forEach(function(e){Fe[e]=new Xe(e,3,!1,e.toLowerCase(),null,!1,!1)});["checked","multiple","muted","selected"].forEach(function(e){Fe[e]=new Xe(e,3,!0,e,null,!1,!1)});["capture","download"].forEach(function(e){Fe[e]=new Xe(e,4,!1,e,null,!1,!1)});["cols","rows","size","span"].forEach(function(e){Fe[e]=new Xe(e,6,!1,e,null,!1,!1)});["rowSpan","start"].forEach(function(e){Fe[e]=new Xe(e,5,!1,e.toLowerCase(),null,!1,!1)});var cl=/[\-:]([a-z])/g;function dl(e){return e[1].toUpperCase()}"accent-height alignment-baseline arabic-form baseline-shift cap-height clip-path clip-rule color-interpolation color-interpolation-filters color-profile color-rendering dominant-baseline enable-background fill-opacity fill-rule flood-color flood-opacity font-family font-size font-size-adjust font-stretch font-style font-variant font-weight glyph-name glyph-orientation-horizontal glyph-orientation-vertical horiz-adv-x horiz-origin-x image-rendering letter-spacing lighting-color marker-end marker-mid marker-start overline-position overline-thickness paint-order panose-1 pointer-events rendering-intent shape-rendering stop-color stop-opacity strikethrough-position strikethrough-thickness stroke-dasharray stroke-dashoffset stroke-linecap stroke-linejoin stroke-miterlimit stroke-opacity stroke-width text-anchor text-decoration text-rendering underline-position underline-thickness unicode-bidi unicode-range units-per-em v-alphabetic v-hanging v-ideographic v-mathematical vector-effect vert-adv-y vert-origin-x vert-origin-y word-spacing writing-mode xmlns:xlink x-height".split(" ").forEach(function(e){var t=e.replace(cl,dl);Fe[t]=new Xe(t,1,!1,e,null,!1,!1)});"xlink:actuate xlink:arcrole xlink:role xlink:show xlink:title xlink:type".split(" ").forEach(function(e){var t=e.replace(cl,dl);Fe[t]=new Xe(t,1,!1,e,"http://www.w3.org/1999/xlink",!1,!1)});["xml:base","xml:lang","xml:space"].forEach(function(e){var t=e.replace(cl,dl);Fe[t]=new Xe(t,1,!1,e,"http://www.w3.org/XML/1998/namespace",!1,!1)});["tabIndex","crossOrigin"].forEach(function(e){Fe[e]=new Xe(e,1,!1,e.toLowerCase(),null,!1,!1)});Fe.xlinkHref=new Xe("xlinkHref",1,!1,"xlink:href","http://www.w3.org/1999/xlink",!0,!1);["src","href","action","formAction"].forEach(function(e){Fe[e]=new Xe(e,1,!1,e.toLowerCase(),null,!0,!0)});function fl(e,t,n,i){var r=Fe.hasOwnProperty(t)?Fe[t]:null;(r!==null?r.type!==0:i||!(2s||r[o]!==a[s]){var l=` `+r[o].replace(" at new "," at ");return e.displayName&&l.includes("")&&(l=l.replace("",e.displayName)),l}while(1<=o&&0<=s);break}}}finally{Po=!1,Error.prepareStackTrace=n}return(e=e?e.displayName||e.name:"")?Zi(e):""}function Eh(e){switch(e.tag){case 5:return Zi(e.type);case 16:return Zi("Lazy");case 13:return Zi("Suspense");case 19:return Zi("SuspenseList");case 0:case 2:case 15:return e=Ro(e.type,!1),e;case 11:return e=Ro(e.type.render,!1),e;case 1:return e=Ro(e.type,!0),e;default:return""}}function cs(e){if(e==null)return null;if(typeof e=="function")return e.displayName||e.name||null;if(typeof e=="string")return e;switch(e){case ai:return"Fragment";case ri:return"Portal";case ss:return"Profiler";case pl:return"StrictMode";case ls:return"Suspense";case us:return"SuspenseList"}if(typeof e=="object")switch(e.$$typeof){case Cd:return(e.displayName||"Context")+".Consumer";case Td:return(e._context.displayName||"Context")+".Provider";case ml:var t=e.render;return e=e.displayName,e||(e=t.displayName||t.name||"",e=e!==""?"ForwardRef("+e+")":"ForwardRef"),e;case hl:return t=e.displayName||null,t!==null?t:cs(e.type)||"Memo";case an:t=e._payload,e=e._init;try{return cs(e(t))}catch{}}return null}function Th(e){var t=e.type;switch(e.tag){case 24:return"Cache";case 9:return(t.displayName||"Context")+".Consumer";case 10:return(t._context.displayName||"Context")+".Provider";case 18:return"DehydratedFragment";case 11:return e=t.render,e=e.displayName||e.name||"",t.displayName||(e!==""?"ForwardRef("+e+")":"ForwardRef");case 7:return"Fragment";case 5:return t;case 4:return"Portal";case 3:return"Root";case 6:return"Text";case 16:return cs(t);case 8:return t===pl?"StrictMode":"Mode";case 22:return"Offscreen";case 12:return"Profiler";case 21:return"Scope";case 13:return"Suspense";case 19:return"SuspenseList";case 25:return"TracingMarker";case 1:case 0:case 17:case 2:case 14:case 15:if(typeof t=="function")return t.displayName||t.name||null;if(typeof t=="string")return t}return null}function bn(e){switch(typeof e){case"boolean":case"number":case"string":case"undefined":return e;case"object":return e;default:return""}}function Id(e){var t=e.type;return(e=e.nodeName)&&e.toLowerCase()==="input"&&(t==="checkbox"||t==="radio")}function Ch(e){var t=Id(e)?"checked":"value",n=Object.getOwnPropertyDescriptor(e.constructor.prototype,t),i=""+e[t];if(!e.hasOwnProperty(t)&&typeof n<"u"&&typeof n.get=="function"&&typeof n.set=="function"){var r=n.get,a=n.set;return Object.defineProperty(e,t,{configurable:!0,get:function(){return r.call(this)},set:function(o){i=""+o,a.call(this,o)}}),Object.defineProperty(e,t,{enumerable:n.enumerable}),{getValue:function(){return i},setValue:function(o){i=""+o},stopTracking:function(){e._valueTracker=null,delete e[t]}}}}function Qr(e){e._valueTracker||(e._valueTracker=Ch(e))}function jd(e){if(!e)return!1;var t=e._valueTracker;if(!t)return!0;var n=t.getValue(),i="";return e&&(i=Id(e)?e.checked?"true":"false":e.value),e=i,e!==n?(t.setValue(e),!0):!1}function Ta(e){if(e=e||(typeof document<"u"?document:void 0),typeof e>"u")return null;try{return e.activeElement||e.body}catch{return e.body}}function ds(e,t){var n=t.checked;return ye({},t,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:n??e._wrapperState.initialChecked})}function Pu(e,t){var n=t.defaultValue==null?"":t.defaultValue,i=t.checked!=null?t.checked:t.defaultChecked;n=bn(t.value!=null?t.value:n),e._wrapperState={initialChecked:i,initialValue:n,controlled:t.type==="checkbox"||t.type==="radio"?t.checked!=null:t.value!=null}}function Ld(e,t){t=t.checked,t!=null&&fl(e,"checked",t,!1)}function fs(e,t){Ld(e,t);var n=bn(t.value),i=t.type;if(n!=null)i==="number"?(n===0&&e.value===""||e.value!=n)&&(e.value=""+n):e.value!==""+n&&(e.value=""+n);else if(i==="submit"||i==="reset"){e.removeAttribute("value");return}t.hasOwnProperty("value")?ps(e,t.type,n):t.hasOwnProperty("defaultValue")&&ps(e,t.type,bn(t.defaultValue)),t.checked==null&&t.defaultChecked!=null&&(e.defaultChecked=!!t.defaultChecked)}function Ru(e,t,n){if(t.hasOwnProperty("value")||t.hasOwnProperty("defaultValue")){var i=t.type;if(!(i!=="submit"&&i!=="reset"||t.value!==void 0&&t.value!==null))return;t=""+e._wrapperState.initialValue,n||t===e.value||(e.value=t),e.defaultValue=t}n=e.name,n!==""&&(e.name=""),e.defaultChecked=!!e._wrapperState.initialChecked,n!==""&&(e.name=n)}function ps(e,t,n){(t!=="number"||Ta(e.ownerDocument)!==e)&&(n==null?e.defaultValue=""+e._wrapperState.initialValue:e.defaultValue!==""+n&&(e.defaultValue=""+n))}var Gi=Array.isArray;function wi(e,t,n,i){if(e=e.options,t){t={};for(var r=0;r"+t.valueOf().toString()+"",t=Xr.firstChild;e.firstChild;)e.removeChild(e.firstChild);for(;t.firstChild;)e.appendChild(t.firstChild)}});function mr(e,t){if(t){var n=e.firstChild;if(n&&n===e.lastChild&&n.nodeType===3){n.nodeValue=t;return}}e.textContent=t}var ir={animationIterationCount:!0,aspectRatio:!0,borderImageOutset:!0,borderImageSlice:!0,borderImageWidth:!0,boxFlex:!0,boxFlexGroup:!0,boxOrdinalGroup:!0,columnCount:!0,columns:!0,flex:!0,flexGrow:!0,flexPositive:!0,flexShrink:!0,flexNegative:!0,flexOrder:!0,gridArea:!0,gridRow:!0,gridRowEnd:!0,gridRowSpan:!0,gridRowStart:!0,gridColumn:!0,gridColumnEnd:!0,gridColumnSpan:!0,gridColumnStart:!0,fontWeight:!0,lineClamp:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,tabSize:!0,widows:!0,zIndex:!0,zoom:!0,fillOpacity:!0,floodOpacity:!0,stopOpacity:!0,strokeDasharray:!0,strokeDashoffset:!0,strokeMiterlimit:!0,strokeOpacity:!0,strokeWidth:!0},Ah=["Webkit","ms","Moz","O"];Object.keys(ir).forEach(function(e){Ah.forEach(function(t){t=t+e.charAt(0).toUpperCase()+e.substring(1),ir[t]=ir[e]})});function Nd(e,t,n){return t==null||typeof t=="boolean"||t===""?"":n||typeof t!="number"||t===0||ir.hasOwnProperty(e)&&ir[e]?(""+t).trim():t+"px"}function Md(e,t){e=e.style;for(var n in t)if(t.hasOwnProperty(n)){var i=n.indexOf("--")===0,r=Nd(n,t[n],i);n==="float"&&(n="cssFloat"),i?e.setProperty(n,r):e[n]=r}}var Ih=ye({menuitem:!0},{area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,keygen:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0});function gs(e,t){if(t){if(Ih[e]&&(t.children!=null||t.dangerouslySetInnerHTML!=null))throw Error(C(137,e));if(t.dangerouslySetInnerHTML!=null){if(t.children!=null)throw Error(C(60));if(typeof t.dangerouslySetInnerHTML!="object"||!("__html"in t.dangerouslySetInnerHTML))throw Error(C(61))}if(t.style!=null&&typeof t.style!="object")throw Error(C(62))}}function ys(e,t){if(e.indexOf("-")===-1)return typeof t.is=="string";switch(e){case"annotation-xml":case"color-profile":case"font-face":case"font-face-src":case"font-face-uri":case"font-face-format":case"font-face-name":case"missing-glyph":return!1;default:return!0}}var vs=null;function gl(e){return e=e.target||e.srcElement||window,e.correspondingUseElement&&(e=e.correspondingUseElement),e.nodeType===3?e.parentNode:e}var ws=null,xi=null,bi=null;function Mu(e){if(e=Nr(e)){if(typeof ws!="function")throw Error(C(280));var t=e.stateNode;t&&(t=ao(t),ws(e.stateNode,e.type,t))}}function Od(e){xi?bi?bi.push(e):bi=[e]:xi=e}function Fd(){if(xi){var e=xi,t=bi;if(bi=xi=null,Mu(e),t)for(e=0;e>>=0,e===0?32:31-(zh(e)/$h|0)|0}var Yr=64,Jr=4194304;function er(e){switch(e&-e){case 1:return 1;case 2:return 2;case 4:return 4;case 8:return 8;case 16:return 16;case 32:return 32;case 64:case 128:case 256:case 512:case 1024:case 2048:case 4096:case 8192:case 16384:case 32768:case 65536:case 131072:case 262144:case 524288:case 1048576:case 2097152:return e&4194240;case 4194304:case 8388608:case 16777216:case 33554432:case 67108864:return e&130023424;case 134217728:return 134217728;case 268435456:return 268435456;case 536870912:return 536870912;case 1073741824:return 1073741824;default:return e}}function ja(e,t){var n=e.pendingLanes;if(n===0)return 0;var i=0,r=e.suspendedLanes,a=e.pingedLanes,o=n&268435455;if(o!==0){var s=o&~r;s!==0?i=er(s):(a&=o,a!==0&&(i=er(a)))}else o=n&~r,o!==0?i=er(o):a!==0&&(i=er(a));if(i===0)return 0;if(t!==0&&t!==i&&!(t&r)&&(r=i&-i,a=t&-t,r>=a||r===16&&(a&4194240)!==0))return t;if(i&4&&(i|=n&16),t=e.entangledLanes,t!==0)for(e=e.entanglements,t&=i;0n;n++)t.push(e);return t}function Rr(e,t,n){e.pendingLanes|=t,t!==536870912&&(e.suspendedLanes=0,e.pingedLanes=0),e=e.eventTimes,t=31-_t(t),e[t]=n}function qh(e,t){var n=e.pendingLanes&~t;e.pendingLanes=t,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=t,e.mutableReadLanes&=t,e.entangledLanes&=t,t=e.entanglements;var i=e.eventTimes;for(e=e.expirationTimes;0=ar),qu=" ",Wu=!1;function af(e,t){switch(e){case"keyup":return wg.indexOf(t.keyCode)!==-1;case"keydown":return t.keyCode!==229;case"keypress":case"mousedown":case"focusout":return!0;default:return!1}}function of(e){return e=e.detail,typeof e=="object"&&"data"in e?e.data:null}var oi=!1;function bg(e,t){switch(e){case"compositionend":return of(t);case"keypress":return t.which!==32?null:(Wu=!0,qu);case"textInput":return e=t.data,e===qu&&Wu?null:e;default:return null}}function kg(e,t){if(oi)return e==="compositionend"||!Sl&&af(e,t)?(e=nf(),ha=bl=un=null,oi=!1,e):null;switch(e){case"paste":return null;case"keypress":if(!(t.ctrlKey||t.altKey||t.metaKey)||t.ctrlKey&&t.altKey){if(t.char&&1=t)return{node:n,offset:t-e};e=i}e:{for(;n;){if(n.nextSibling){n=n.nextSibling;break e}n=n.parentNode}n=void 0}n=Yu(n)}}function cf(e,t){return e&&t?e===t?!0:e&&e.nodeType===3?!1:t&&t.nodeType===3?cf(e,t.parentNode):"contains"in e?e.contains(t):e.compareDocumentPosition?!!(e.compareDocumentPosition(t)&16):!1:!1}function df(){for(var e=window,t=Ta();t instanceof e.HTMLIFrameElement;){try{var n=typeof t.contentWindow.location.href=="string"}catch{n=!1}if(n)e=t.contentWindow;else break;t=Ta(e.document)}return t}function El(e){var t=e&&e.nodeName&&e.nodeName.toLowerCase();return t&&(t==="input"&&(e.type==="text"||e.type==="search"||e.type==="tel"||e.type==="url"||e.type==="password")||t==="textarea"||e.contentEditable==="true")}function Lg(e){var t=df(),n=e.focusedElem,i=e.selectionRange;if(t!==n&&n&&n.ownerDocument&&cf(n.ownerDocument.documentElement,n)){if(i!==null&&El(n)){if(t=i.start,e=i.end,e===void 0&&(e=t),"selectionStart"in n)n.selectionStart=t,n.selectionEnd=Math.min(e,n.value.length);else if(e=(t=n.ownerDocument||document)&&t.defaultView||window,e.getSelection){e=e.getSelection();var r=n.textContent.length,a=Math.min(i.start,r);i=i.end===void 0?a:Math.min(i.end,r),!e.extend&&a>i&&(r=i,i=a,a=r),r=Ju(n,a);var o=Ju(n,i);r&&o&&(e.rangeCount!==1||e.anchorNode!==r.node||e.anchorOffset!==r.offset||e.focusNode!==o.node||e.focusOffset!==o.offset)&&(t=t.createRange(),t.setStart(r.node,r.offset),e.removeAllRanges(),a>i?(e.addRange(t),e.extend(o.node,o.offset)):(t.setEnd(o.node,o.offset),e.addRange(t)))}}for(t=[],e=n;e=e.parentNode;)e.nodeType===1&&t.push({element:e,left:e.scrollLeft,top:e.scrollTop});for(typeof n.focus=="function"&&n.focus(),n=0;n=document.documentMode,si=null,Es=null,sr=null,Ts=!1;function Zu(e,t,n){var i=n.window===n?n.document:n.nodeType===9?n:n.ownerDocument;Ts||si==null||si!==Ta(i)||(i=si,"selectionStart"in i&&El(i)?i={start:i.selectionStart,end:i.selectionEnd}:(i=(i.ownerDocument&&i.ownerDocument.defaultView||window).getSelection(),i={anchorNode:i.anchorNode,anchorOffset:i.anchorOffset,focusNode:i.focusNode,focusOffset:i.focusOffset}),sr&&xr(sr,i)||(sr=i,i=Ra(Es,"onSelect"),0ci||(e.current=Ps[ci],Ps[ci]=null,ci--)}function le(e,t){ci++,Ps[ci]=e.current,e.current=t}var kn={},Be=Sn(kn),Ge=Sn(!1),$n=kn;function Ti(e,t){var n=e.type.contextTypes;if(!n)return kn;var i=e.stateNode;if(i&&i.__reactInternalMemoizedUnmaskedChildContext===t)return i.__reactInternalMemoizedMaskedChildContext;var r={},a;for(a in n)r[a]=t[a];return i&&(e=e.stateNode,e.__reactInternalMemoizedUnmaskedChildContext=t,e.__reactInternalMemoizedMaskedChildContext=r),r}function et(e){return e=e.childContextTypes,e!=null}function Na(){de(Ge),de(Be)}function ac(e,t,n){if(Be.current!==kn)throw Error(C(168));le(Be,t),le(Ge,n)}function xf(e,t,n){var i=e.stateNode;if(t=t.childContextTypes,typeof i.getChildContext!="function")return n;i=i.getChildContext();for(var r in i)if(!(r in t))throw Error(C(108,Th(e)||"Unknown",r));return ye({},n,i)}function Ma(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||kn,$n=Be.current,le(Be,e),le(Ge,Ge.current),!0}function oc(e,t,n){var i=e.stateNode;if(!i)throw Error(C(169));n?(e=xf(e,t,$n),i.__reactInternalMemoizedMergedChildContext=e,de(Ge),de(Be),le(Be,e)):de(Ge),le(Ge,n)}var Ft=null,oo=!1,Ko=!1;function bf(e){Ft===null?Ft=[e]:Ft.push(e)}function Vg(e){oo=!0,bf(e)}function En(){if(!Ko&&Ft!==null){Ko=!0;var e=0,t=ne;try{var n=Ft;for(ne=1;e>=o,r-=o,Ut=1<<32-_t(t)+r|n<A?(U=L,L=null):U=L.sibling;var F=h(g,L,w[A],E);if(F===null){L===null&&(L=U);break}e&&L&&F.alternate===null&&t(g,L),p=a(F,p,A),x===null?j=F:x.sibling=F,x=F,L=U}if(A===w.length)return n(g,L),pe&&Ln(g,A),j;if(L===null){for(;AA?(U=L,L=null):U=L.sibling;var re=h(g,L,F.value,E);if(re===null){L===null&&(L=U);break}e&&L&&re.alternate===null&&t(g,L),p=a(re,p,A),x===null?j=re:x.sibling=re,x=re,L=U}if(F.done)return n(g,L),pe&&Ln(g,A),j;if(L===null){for(;!F.done;A++,F=w.next())F=f(g,F.value,E),F!==null&&(p=a(F,p,A),x===null?j=F:x.sibling=F,x=F);return pe&&Ln(g,A),j}for(L=i(g,L);!F.done;A++,F=w.next())F=v(L,g,A,F.value,E),F!==null&&(e&&F.alternate!==null&&L.delete(F.key===null?A:F.key),p=a(F,p,A),x===null?j=F:x.sibling=F,x=F);return e&&L.forEach(function(me){return t(g,me)}),pe&&Ln(g,A),j}function I(g,p,w,E){if(typeof w=="object"&&w!==null&&w.type===ai&&w.key===null&&(w=w.props.children),typeof w=="object"&&w!==null){switch(w.$$typeof){case Kr:e:{for(var j=w.key,x=p;x!==null;){if(x.key===j){if(j=w.type,j===ai){if(x.tag===7){n(g,x.sibling),p=r(x,w.props.children),p.return=g,g=p;break e}}else if(x.elementType===j||typeof j=="object"&&j!==null&&j.$$typeof===an&&uc(j)===x.type){n(g,x.sibling),p=r(x,w.props),p.ref=Wi(g,x,w),p.return=g,g=p;break e}n(g,x);break}else t(g,x);x=x.sibling}w.type===ai?(p=zn(w.props.children,g.mode,E,w.key),p.return=g,g=p):(E=_a(w.type,w.key,w.props,null,g.mode,E),E.ref=Wi(g,p,w),E.return=g,g=E)}return o(g);case ri:e:{for(x=w.key;p!==null;){if(p.key===x)if(p.tag===4&&p.stateNode.containerInfo===w.containerInfo&&p.stateNode.implementation===w.implementation){n(g,p.sibling),p=r(p,w.children||[]),p.return=g,g=p;break e}else{n(g,p);break}else t(g,p);p=p.sibling}p=ts(w,g.mode,E),p.return=g,g=p}return o(g);case an:return x=w._init,I(g,p,x(w._payload),E)}if(Gi(w))return _(g,p,w,E);if($i(w))return k(g,p,w,E);ra(g,w)}return typeof w=="string"&&w!==""||typeof w=="number"?(w=""+w,p!==null&&p.tag===6?(n(g,p.sibling),p=r(p,w),p.return=g,g=p):(n(g,p),p=es(w,g.mode,E),p.return=g,g=p),o(g)):n(g,p)}return I}var Ai=Ef(!0),Tf=Ef(!1),Ua=Sn(null),za=null,pi=null,Il=null;function jl(){Il=pi=za=null}function Ll(e){var t=Ua.current;de(Ua),e._currentValue=t}function Ns(e,t,n){for(;e!==null;){var i=e.alternate;if((e.childLanes&t)!==t?(e.childLanes|=t,i!==null&&(i.childLanes|=t)):i!==null&&(i.childLanes&t)!==t&&(i.childLanes|=t),e===n)break;e=e.return}}function _i(e,t){za=e,Il=pi=null,e=e.dependencies,e!==null&&e.firstContext!==null&&(e.lanes&t&&(Ze=!0),e.firstContext=null)}function ht(e){var t=e._currentValue;if(Il!==e)if(e={context:e,memoizedValue:t,next:null},pi===null){if(za===null)throw Error(C(308));pi=e,za.dependencies={lanes:0,firstContext:e}}else pi=pi.next=e;return t}var Mn=null;function Pl(e){Mn===null?Mn=[e]:Mn.push(e)}function Cf(e,t,n,i){var r=t.interleaved;return r===null?(n.next=n,Pl(t)):(n.next=r.next,r.next=n),t.interleaved=n,qt(e,i)}function qt(e,t){e.lanes|=t;var n=e.alternate;for(n!==null&&(n.lanes|=t),n=e,e=e.return;e!==null;)e.childLanes|=t,n=e.alternate,n!==null&&(n.childLanes|=t),n=e,e=e.return;return n.tag===3?n.stateNode:null}var on=!1;function Rl(e){e.updateQueue={baseState:e.memoizedState,firstBaseUpdate:null,lastBaseUpdate:null,shared:{pending:null,interleaved:null,lanes:0},effects:null}}function Af(e,t){e=e.updateQueue,t.updateQueue===e&&(t.updateQueue={baseState:e.baseState,firstBaseUpdate:e.firstBaseUpdate,lastBaseUpdate:e.lastBaseUpdate,shared:e.shared,effects:e.effects})}function $t(e,t){return{eventTime:e,lane:t,tag:0,payload:null,callback:null,next:null}}function yn(e,t,n){var i=e.updateQueue;if(i===null)return null;if(i=i.shared,Z&2){var r=i.pending;return r===null?t.next=t:(t.next=r.next,r.next=t),i.pending=t,qt(e,n)}return r=i.interleaved,r===null?(t.next=t,Pl(i)):(t.next=r.next,r.next=t),i.interleaved=t,qt(e,n)}function ya(e,t,n){if(t=t.updateQueue,t!==null&&(t=t.shared,(n&4194240)!==0)){var i=t.lanes;i&=e.pendingLanes,n|=i,t.lanes=n,vl(e,n)}}function cc(e,t){var n=e.updateQueue,i=e.alternate;if(i!==null&&(i=i.updateQueue,n===i)){var r=null,a=null;if(n=n.firstBaseUpdate,n!==null){do{var o={eventTime:n.eventTime,lane:n.lane,tag:n.tag,payload:n.payload,callback:n.callback,next:null};a===null?r=a=o:a=a.next=o,n=n.next}while(n!==null);a===null?r=a=t:a=a.next=t}else r=a=t;n={baseState:i.baseState,firstBaseUpdate:r,lastBaseUpdate:a,shared:i.shared,effects:i.effects},e.updateQueue=n;return}e=n.lastBaseUpdate,e===null?n.firstBaseUpdate=t:e.next=t,n.lastBaseUpdate=t}function $a(e,t,n,i){var r=e.updateQueue;on=!1;var a=r.firstBaseUpdate,o=r.lastBaseUpdate,s=r.shared.pending;if(s!==null){r.shared.pending=null;var l=s,u=l.next;l.next=null,o===null?a=u:o.next=u,o=l;var m=e.alternate;m!==null&&(m=m.updateQueue,s=m.lastBaseUpdate,s!==o&&(s===null?m.firstBaseUpdate=u:s.next=u,m.lastBaseUpdate=l))}if(a!==null){var f=r.baseState;o=0,m=u=l=null,s=a;do{var h=s.lane,v=s.eventTime;if((i&h)===h){m!==null&&(m=m.next={eventTime:v,lane:0,tag:s.tag,payload:s.payload,callback:s.callback,next:null});e:{var _=e,k=s;switch(h=t,v=n,k.tag){case 1:if(_=k.payload,typeof _=="function"){f=_.call(v,f,h);break e}f=_;break e;case 3:_.flags=_.flags&-65537|128;case 0:if(_=k.payload,h=typeof _=="function"?_.call(v,f,h):_,h==null)break e;f=ye({},f,h);break e;case 2:on=!0}}s.callback!==null&&s.lane!==0&&(e.flags|=64,h=r.effects,h===null?r.effects=[s]:h.push(s))}else v={eventTime:v,lane:h,tag:s.tag,payload:s.payload,callback:s.callback,next:null},m===null?(u=m=v,l=f):m=m.next=v,o|=h;if(s=s.next,s===null){if(s=r.shared.pending,s===null)break;h=s,s=h.next,h.next=null,r.lastBaseUpdate=h,r.shared.pending=null}}while(!0);if(m===null&&(l=f),r.baseState=l,r.firstBaseUpdate=u,r.lastBaseUpdate=m,t=r.shared.interleaved,t!==null){r=t;do o|=r.lane,r=r.next;while(r!==t)}else a===null&&(r.shared.lanes=0);Hn|=o,e.lanes=o,e.memoizedState=f}}function dc(e,t,n){if(e=t.effects,t.effects=null,e!==null)for(t=0;tn?n:4,e(!0);var i=Xo.transition;Xo.transition={};try{e(!1),t()}finally{ne=n,Xo.transition=i}}function qf(){return gt().memoizedState}function Kg(e,t,n){var i=wn(e);if(n={lane:i,action:n,hasEagerState:!1,eagerState:null,next:null},Wf(e))Kf(t,n);else if(n=Cf(e,t,n,i),n!==null){var r=Ke();St(n,e,i,r),Qf(n,t,i)}}function Qg(e,t,n){var i=wn(e),r={lane:i,action:n,hasEagerState:!1,eagerState:null,next:null};if(Wf(e))Kf(t,r);else{var a=e.alternate;if(e.lanes===0&&(a===null||a.lanes===0)&&(a=t.lastRenderedReducer,a!==null))try{var o=t.lastRenderedState,s=a(o,n);if(r.hasEagerState=!0,r.eagerState=s,Et(s,o)){var l=t.interleaved;l===null?(r.next=r,Pl(t)):(r.next=l.next,l.next=r),t.interleaved=r;return}}catch{}finally{}n=Cf(e,t,r,i),n!==null&&(r=Ke(),St(n,e,i,r),Qf(n,t,i))}}function Wf(e){var t=e.alternate;return e===ge||t!==null&&t===ge}function Kf(e,t){lr=Va=!0;var n=e.pending;n===null?t.next=t:(t.next=n.next,n.next=t),e.pending=t}function Qf(e,t,n){if(n&4194240){var i=t.lanes;i&=e.pendingLanes,n|=i,t.lanes=n,vl(e,n)}}var Ha={readContext:ht,useCallback:Ue,useContext:Ue,useEffect:Ue,useImperativeHandle:Ue,useInsertionEffect:Ue,useLayoutEffect:Ue,useMemo:Ue,useReducer:Ue,useRef:Ue,useState:Ue,useDebugValue:Ue,useDeferredValue:Ue,useTransition:Ue,useMutableSource:Ue,useSyncExternalStore:Ue,useId:Ue,unstable_isNewReconciler:!1},Xg={readContext:ht,useCallback:function(e,t){return It().memoizedState=[e,t===void 0?null:t],e},useContext:ht,useEffect:pc,useImperativeHandle:function(e,t,n){return n=n!=null?n.concat([e]):null,wa(4194308,4,zf.bind(null,t,e),n)},useLayoutEffect:function(e,t){return wa(4194308,4,e,t)},useInsertionEffect:function(e,t){return wa(4,2,e,t)},useMemo:function(e,t){var n=It();return t=t===void 0?null:t,e=e(),n.memoizedState=[e,t],e},useReducer:function(e,t,n){var i=It();return t=n!==void 0?n(t):t,i.memoizedState=i.baseState=t,e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:t},i.queue=e,e=e.dispatch=Kg.bind(null,ge,e),[i.memoizedState,e]},useRef:function(e){var t=It();return e={current:e},t.memoizedState=e},useState:fc,useDebugValue:$l,useDeferredValue:function(e){return It().memoizedState=e},useTransition:function(){var e=fc(!1),t=e[0];return e=Wg.bind(null,e[1]),It().memoizedState=e,[t,e]},useMutableSource:function(){},useSyncExternalStore:function(e,t,n){var i=ge,r=It();if(pe){if(n===void 0)throw Error(C(407));n=n()}else{if(n=t(),Re===null)throw Error(C(349));Vn&30||Pf(i,t,n)}r.memoizedState=n;var a={value:n,getSnapshot:t};return r.queue=a,pc(Df.bind(null,i,a,e),[e]),i.flags|=2048,Ar(9,Rf.bind(null,i,a,n,t),void 0,null),n},useId:function(){var e=It(),t=Re.identifierPrefix;if(pe){var n=zt,i=Ut;n=(i&~(1<<32-_t(i)-1)).toString(32)+n,t=":"+t+"R"+n,n=Tr++,0<\/script>",e=e.removeChild(e.firstChild)):typeof i.is=="string"?e=o.createElement(n,{is:i.is}):(e=o.createElement(n),n==="select"&&(o=e,i.multiple?o.multiple=!0:i.size&&(o.size=i.size))):e=o.createElementNS(e,n),e[jt]=t,e[_r]=i,rp(e,t,!1,!1),t.stateNode=e;e:{switch(o=ys(n,i),n){case"dialog":ce("cancel",e),ce("close",e),r=i;break;case"iframe":case"object":case"embed":ce("load",e),r=i;break;case"video":case"audio":for(r=0;rLi&&(t.flags|=128,i=!0,Ki(a,!1),t.lanes=4194304)}else{if(!i)if(e=Ba(o),e!==null){if(t.flags|=128,i=!0,n=e.updateQueue,n!==null&&(t.updateQueue=n,t.flags|=4),Ki(a,!0),a.tail===null&&a.tailMode==="hidden"&&!o.alternate&&!pe)return ze(t),null}else 2*_e()-a.renderingStartTime>Li&&n!==1073741824&&(t.flags|=128,i=!0,Ki(a,!1),t.lanes=4194304);a.isBackwards?(o.sibling=t.child,t.child=o):(n=a.last,n!==null?n.sibling=o:t.child=o,a.last=o)}return a.tail!==null?(t=a.tail,a.rendering=t,a.tail=t.sibling,a.renderingStartTime=_e(),t.sibling=null,n=he.current,le(he,i?n&1|2:n&1),t):(ze(t),null);case 22:case 23:return Kl(),i=t.memoizedState!==null,e!==null&&e.memoizedState!==null!==i&&(t.flags|=8192),i&&t.mode&1?nt&1073741824&&(ze(t),t.subtreeFlags&6&&(t.flags|=8192)):ze(t),null;case 24:return null;case 25:return null}throw Error(C(156,t.tag))}function iy(e,t){switch(Cl(t),t.tag){case 1:return et(t.type)&&Na(),e=t.flags,e&65536?(t.flags=e&-65537|128,t):null;case 3:return Ii(),de(Ge),de(Be),Ml(),e=t.flags,e&65536&&!(e&128)?(t.flags=e&-65537|128,t):null;case 5:return Nl(t),null;case 13:if(de(he),e=t.memoizedState,e!==null&&e.dehydrated!==null){if(t.alternate===null)throw Error(C(340));Ci()}return e=t.flags,e&65536?(t.flags=e&-65537|128,t):null;case 19:return de(he),null;case 4:return Ii(),null;case 10:return Ll(t.type._context),null;case 22:case 23:return Kl(),null;case 24:return null;default:return null}}var oa=!1,$e=!1,ry=typeof WeakSet=="function"?WeakSet:Set,R=null;function mi(e,t){var n=e.ref;if(n!==null)if(typeof n=="function")try{n(null)}catch(i){xe(e,t,i)}else n.current=null}function Hs(e,t,n){try{n()}catch(i){xe(e,t,i)}}var Sc=!1;function ay(e,t){if(Cs=La,e=df(),El(e)){if("selectionStart"in e)var n={start:e.selectionStart,end:e.selectionEnd};else e:{n=(n=e.ownerDocument)&&n.defaultView||window;var i=n.getSelection&&n.getSelection();if(i&&i.rangeCount!==0){n=i.anchorNode;var r=i.anchorOffset,a=i.focusNode;i=i.focusOffset;try{n.nodeType,a.nodeType}catch{n=null;break e}var o=0,s=-1,l=-1,u=0,m=0,f=e,h=null;t:for(;;){for(var v;f!==n||r!==0&&f.nodeType!==3||(s=o+r),f!==a||i!==0&&f.nodeType!==3||(l=o+i),f.nodeType===3&&(o+=f.nodeValue.length),(v=f.firstChild)!==null;)h=f,f=v;for(;;){if(f===e)break t;if(h===n&&++u===r&&(s=o),h===a&&++m===i&&(l=o),(v=f.nextSibling)!==null)break;f=h,h=f.parentNode}f=v}n=s===-1||l===-1?null:{start:s,end:l}}else n=null}n=n||{start:0,end:0}}else n=null;for(As={focusedElem:e,selectionRange:n},La=!1,R=t;R!==null;)if(t=R,e=t.child,(t.subtreeFlags&1028)!==0&&e!==null)e.return=t,R=e;else for(;R!==null;){t=R;try{var _=t.alternate;if(t.flags&1024)switch(t.tag){case 0:case 11:case 15:break;case 1:if(_!==null){var k=_.memoizedProps,I=_.memoizedState,g=t.stateNode,p=g.getSnapshotBeforeUpdate(t.elementType===t.type?k:wt(t.type,k),I);g.__reactInternalSnapshotBeforeUpdate=p}break;case 3:var w=t.stateNode.containerInfo;w.nodeType===1?w.textContent="":w.nodeType===9&&w.documentElement&&w.removeChild(w.documentElement);break;case 5:case 6:case 4:case 17:break;default:throw Error(C(163))}}catch(E){xe(t,t.return,E)}if(e=t.sibling,e!==null){e.return=t.return,R=e;break}R=t.return}return _=Sc,Sc=!1,_}function ur(e,t,n){var i=t.updateQueue;if(i=i!==null?i.lastEffect:null,i!==null){var r=i=i.next;do{if((r.tag&e)===e){var a=r.destroy;r.destroy=void 0,a!==void 0&&Hs(t,n,a)}r=r.next}while(r!==i)}}function uo(e,t){if(t=t.updateQueue,t=t!==null?t.lastEffect:null,t!==null){var n=t=t.next;do{if((n.tag&e)===e){var i=n.create;n.destroy=i()}n=n.next}while(n!==t)}}function qs(e){var t=e.ref;if(t!==null){var n=e.stateNode;switch(e.tag){case 5:e=n;break;default:e=n}typeof t=="function"?t(e):t.current=e}}function sp(e){var t=e.alternate;t!==null&&(e.alternate=null,sp(t)),e.child=null,e.deletions=null,e.sibling=null,e.tag===5&&(t=e.stateNode,t!==null&&(delete t[jt],delete t[_r],delete t[Ls],delete t[$g],delete t[Bg])),e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function lp(e){return e.tag===5||e.tag===3||e.tag===4}function Ec(e){e:for(;;){for(;e.sibling===null;){if(e.return===null||lp(e.return))return null;e=e.return}for(e.sibling.return=e.return,e=e.sibling;e.tag!==5&&e.tag!==6&&e.tag!==18;){if(e.flags&2||e.child===null||e.tag===4)continue e;e.child.return=e,e=e.child}if(!(e.flags&2))return e.stateNode}}function Ws(e,t,n){var i=e.tag;if(i===5||i===6)e=e.stateNode,t?n.nodeType===8?n.parentNode.insertBefore(e,t):n.insertBefore(e,t):(n.nodeType===8?(t=n.parentNode,t.insertBefore(e,n)):(t=n,t.appendChild(e)),n=n._reactRootContainer,n!=null||t.onclick!==null||(t.onclick=Da));else if(i!==4&&(e=e.child,e!==null))for(Ws(e,t,n),e=e.sibling;e!==null;)Ws(e,t,n),e=e.sibling}function Ks(e,t,n){var i=e.tag;if(i===5||i===6)e=e.stateNode,t?n.insertBefore(e,t):n.appendChild(e);else if(i!==4&&(e=e.child,e!==null))for(Ks(e,t,n),e=e.sibling;e!==null;)Ks(e,t,n),e=e.sibling}var Me=null,xt=!1;function Gt(e,t,n){for(n=n.child;n!==null;)up(e,t,n),n=n.sibling}function up(e,t,n){if(Lt&&typeof Lt.onCommitFiberUnmount=="function")try{Lt.onCommitFiberUnmount(to,n)}catch{}switch(n.tag){case 5:$e||mi(n,t);case 6:var i=Me,r=xt;Me=null,Gt(e,t,n),Me=i,xt=r,Me!==null&&(xt?(e=Me,n=n.stateNode,e.nodeType===8?e.parentNode.removeChild(n):e.removeChild(n)):Me.removeChild(n.stateNode));break;case 18:Me!==null&&(xt?(e=Me,n=n.stateNode,e.nodeType===8?Wo(e.parentNode,n):e.nodeType===1&&Wo(e,n),vr(e)):Wo(Me,n.stateNode));break;case 4:i=Me,r=xt,Me=n.stateNode.containerInfo,xt=!0,Gt(e,t,n),Me=i,xt=r;break;case 0:case 11:case 14:case 15:if(!$e&&(i=n.updateQueue,i!==null&&(i=i.lastEffect,i!==null))){r=i=i.next;do{var a=r,o=a.destroy;a=a.tag,o!==void 0&&(a&2||a&4)&&Hs(n,t,o),r=r.next}while(r!==i)}Gt(e,t,n);break;case 1:if(!$e&&(mi(n,t),i=n.stateNode,typeof i.componentWillUnmount=="function"))try{i.props=n.memoizedProps,i.state=n.memoizedState,i.componentWillUnmount()}catch(s){xe(n,t,s)}Gt(e,t,n);break;case 21:Gt(e,t,n);break;case 22:n.mode&1?($e=(i=$e)||n.memoizedState!==null,Gt(e,t,n),$e=i):Gt(e,t,n);break;default:Gt(e,t,n)}}function Tc(e){var t=e.updateQueue;if(t!==null){e.updateQueue=null;var n=e.stateNode;n===null&&(n=e.stateNode=new ry),t.forEach(function(i){var r=my.bind(null,e,i);n.has(i)||(n.add(i),i.then(r,r))})}}function vt(e,t){var n=t.deletions;if(n!==null)for(var i=0;ir&&(r=o),i&=~a}if(i=r,i=_e()-i,i=(120>i?120:480>i?480:1080>i?1080:1920>i?1920:3e3>i?3e3:4320>i?4320:1960*sy(i/1960))-i,10e?16:e,cn===null)var i=!1;else{if(e=cn,cn=null,Ka=0,Z&6)throw Error(C(331));var r=Z;for(Z|=4,R=e.current;R!==null;){var a=R,o=a.child;if(R.flags&16){var s=a.deletions;if(s!==null){for(var l=0;l_e()-ql?Un(e,0):Hl|=n),tt(e,t)}function yp(e,t){t===0&&(e.mode&1?(t=Jr,Jr<<=1,!(Jr&130023424)&&(Jr=4194304)):t=1);var n=Ke();e=qt(e,t),e!==null&&(Rr(e,t,n),tt(e,n))}function py(e){var t=e.memoizedState,n=0;t!==null&&(n=t.retryLane),yp(e,n)}function my(e,t){var n=0;switch(e.tag){case 13:var i=e.stateNode,r=e.memoizedState;r!==null&&(n=r.retryLane);break;case 19:i=e.stateNode;break;default:throw Error(C(314))}i!==null&&i.delete(t),yp(e,n)}var vp;vp=function(e,t,n){if(e!==null)if(e.memoizedProps!==t.pendingProps||Ge.current)Ze=!0;else{if(!(e.lanes&n)&&!(t.flags&128))return Ze=!1,ty(e,t,n);Ze=!!(e.flags&131072)}else Ze=!1,pe&&t.flags&1048576&&kf(t,Fa,t.index);switch(t.lanes=0,t.tag){case 2:var i=t.type;xa(e,t),e=t.pendingProps;var r=Ti(t,Be.current);_i(t,n),r=Fl(null,t,i,e,r,n);var a=Ul();return t.flags|=1,typeof r=="object"&&r!==null&&typeof r.render=="function"&&r.$$typeof===void 0?(t.tag=1,t.memoizedState=null,t.updateQueue=null,et(i)?(a=!0,Ma(t)):a=!1,t.memoizedState=r.state!==null&&r.state!==void 0?r.state:null,Rl(t),r.updater=lo,t.stateNode=r,r._reactInternals=t,Os(t,i,e,n),t=zs(null,t,i,!0,a,n)):(t.tag=0,pe&&a&&Tl(t),We(null,t,r,n),t=t.child),t;case 16:i=t.elementType;e:{switch(xa(e,t),e=t.pendingProps,r=i._init,i=r(i._payload),t.type=i,r=t.tag=gy(i),e=wt(i,e),r){case 0:t=Us(null,t,i,e,n);break e;case 1:t=bc(null,t,i,e,n);break e;case 11:t=wc(null,t,i,e,n);break e;case 14:t=xc(null,t,i,wt(i.type,e),n);break e}throw Error(C(306,i,""))}return t;case 0:return i=t.type,r=t.pendingProps,r=t.elementType===i?r:wt(i,r),Us(e,t,i,r,n);case 1:return i=t.type,r=t.pendingProps,r=t.elementType===i?r:wt(i,r),bc(e,t,i,r,n);case 3:e:{if(tp(t),e===null)throw Error(C(387));i=t.pendingProps,a=t.memoizedState,r=a.element,Af(e,t),$a(t,i,null,n);var o=t.memoizedState;if(i=o.element,a.isDehydrated)if(a={element:i,isDehydrated:!1,cache:o.cache,pendingSuspenseBoundaries:o.pendingSuspenseBoundaries,transitions:o.transitions},t.updateQueue.baseState=a,t.memoizedState=a,t.flags&256){r=ji(Error(C(423)),t),t=kc(e,t,i,n,r);break e}else if(i!==r){r=ji(Error(C(424)),t),t=kc(e,t,i,n,r);break e}else for(rt=gn(t.stateNode.containerInfo.firstChild),at=t,pe=!0,kt=null,n=Tf(t,null,i,n),t.child=n;n;)n.flags=n.flags&-3|4096,n=n.sibling;else{if(Ci(),i===r){t=Wt(e,t,n);break e}We(e,t,i,n)}t=t.child}return t;case 5:return If(t),e===null&&Ds(t),i=t.type,r=t.pendingProps,a=e!==null?e.memoizedProps:null,o=r.children,Is(i,r)?o=null:a!==null&&Is(i,a)&&(t.flags|=32),ep(e,t),We(e,t,o,n),t.child;case 6:return e===null&&Ds(t),null;case 13:return np(e,t,n);case 4:return Dl(t,t.stateNode.containerInfo),i=t.pendingProps,e===null?t.child=Ai(t,null,i,n):We(e,t,i,n),t.child;case 11:return i=t.type,r=t.pendingProps,r=t.elementType===i?r:wt(i,r),wc(e,t,i,r,n);case 7:return We(e,t,t.pendingProps,n),t.child;case 8:return We(e,t,t.pendingProps.children,n),t.child;case 12:return We(e,t,t.pendingProps.children,n),t.child;case 10:e:{if(i=t.type._context,r=t.pendingProps,a=t.memoizedProps,o=r.value,le(Ua,i._currentValue),i._currentValue=o,a!==null)if(Et(a.value,o)){if(a.children===r.children&&!Ge.current){t=Wt(e,t,n);break e}}else for(a=t.child,a!==null&&(a.return=t);a!==null;){var s=a.dependencies;if(s!==null){o=a.child;for(var l=s.firstContext;l!==null;){if(l.context===i){if(a.tag===1){l=$t(-1,n&-n),l.tag=2;var u=a.updateQueue;if(u!==null){u=u.shared;var m=u.pending;m===null?l.next=l:(l.next=m.next,m.next=l),u.pending=l}}a.lanes|=n,l=a.alternate,l!==null&&(l.lanes|=n),Ns(a.return,n,t),s.lanes|=n;break}l=l.next}}else if(a.tag===10)o=a.type===t.type?null:a.child;else if(a.tag===18){if(o=a.return,o===null)throw Error(C(341));o.lanes|=n,s=o.alternate,s!==null&&(s.lanes|=n),Ns(o,n,t),o=a.sibling}else o=a.child;if(o!==null)o.return=a;else for(o=a;o!==null;){if(o===t){o=null;break}if(a=o.sibling,a!==null){a.return=o.return,o=a;break}o=o.return}a=o}We(e,t,r.children,n),t=t.child}return t;case 9:return r=t.type,i=t.pendingProps.children,_i(t,n),r=ht(r),i=i(r),t.flags|=1,We(e,t,i,n),t.child;case 14:return i=t.type,r=wt(i,t.pendingProps),r=wt(i.type,r),xc(e,t,i,r,n);case 15:return Zf(e,t,t.type,t.pendingProps,n);case 17:return i=t.type,r=t.pendingProps,r=t.elementType===i?r:wt(i,r),xa(e,t),t.tag=1,et(i)?(e=!0,Ma(t)):e=!1,_i(t,n),Xf(t,i,r),Os(t,i,r,n),zs(null,t,i,!0,e,n);case 19:return ip(e,t,n);case 22:return Gf(e,t,n)}throw Error(C(156,t.tag))};function wp(e,t){return qd(e,t)}function hy(e,t,n,i){this.tag=e,this.key=n,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=t,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=i,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function pt(e,t,n,i){return new hy(e,t,n,i)}function Xl(e){return e=e.prototype,!(!e||!e.isReactComponent)}function gy(e){if(typeof e=="function")return Xl(e)?1:0;if(e!=null){if(e=e.$$typeof,e===ml)return 11;if(e===hl)return 14}return 2}function xn(e,t){var n=e.alternate;return n===null?(n=pt(e.tag,t,e.key,e.mode),n.elementType=e.elementType,n.type=e.type,n.stateNode=e.stateNode,n.alternate=e,e.alternate=n):(n.pendingProps=t,n.type=e.type,n.flags=0,n.subtreeFlags=0,n.deletions=null),n.flags=e.flags&14680064,n.childLanes=e.childLanes,n.lanes=e.lanes,n.child=e.child,n.memoizedProps=e.memoizedProps,n.memoizedState=e.memoizedState,n.updateQueue=e.updateQueue,t=e.dependencies,n.dependencies=t===null?null:{lanes:t.lanes,firstContext:t.firstContext},n.sibling=e.sibling,n.index=e.index,n.ref=e.ref,n}function _a(e,t,n,i,r,a){var o=2;if(i=e,typeof e=="function")Xl(e)&&(o=1);else if(typeof e=="string")o=5;else e:switch(e){case ai:return zn(n.children,r,a,t);case pl:o=8,r|=8;break;case ss:return e=pt(12,n,t,r|2),e.elementType=ss,e.lanes=a,e;case ls:return e=pt(13,n,t,r),e.elementType=ls,e.lanes=a,e;case us:return e=pt(19,n,t,r),e.elementType=us,e.lanes=a,e;case Ad:return fo(n,r,a,t);default:if(typeof e=="object"&&e!==null)switch(e.$$typeof){case Td:o=10;break e;case Cd:o=9;break e;case ml:o=11;break e;case hl:o=14;break e;case an:o=16,i=null;break e}throw Error(C(130,e==null?e:typeof e,""))}return t=pt(o,n,t,r),t.elementType=e,t.type=i,t.lanes=a,t}function zn(e,t,n,i){return e=pt(7,e,i,t),e.lanes=n,e}function fo(e,t,n,i){return e=pt(22,e,i,t),e.elementType=Ad,e.lanes=n,e.stateNode={isHidden:!1},e}function es(e,t,n){return e=pt(6,e,null,t),e.lanes=n,e}function ts(e,t,n){return t=pt(4,e.children!==null?e.children:[],e.key,t),t.lanes=n,t.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},t}function yy(e,t,n,i,r){this.tag=t,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.callbackNode=this.pendingContext=this.context=null,this.callbackPriority=0,this.eventTimes=No(0),this.expirationTimes=No(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=No(0),this.identifierPrefix=i,this.onRecoverableError=r,this.mutableSourceEagerHydrationData=null}function Yl(e,t,n,i,r,a,o,s,l){return e=new yy(e,t,n,s,l),t===1?(t=1,a===!0&&(t|=8)):t=0,a=pt(3,null,null,t),e.current=a,a.stateNode=e,a.memoizedState={element:i,isDehydrated:n,cache:null,transitions:null,pendingSuspenseBoundaries:null},Rl(a),e}function vy(e,t,n){var i=3"u"||typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE!="function"))try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(_p)}catch(e){console.error(e)}}_p(),kd.exports=st;var eu=kd.exports;const _y=cd(eu),Sy=ud({__proto__:null,default:_y},[eu]);var Sp,Dc=eu;Sp=Dc.createRoot,Dc.hydrateRoot;/** * @remix-run/router v1.21.0 * * Copyright (c) Remix Software Inc. * * This source code is licensed under the MIT license found in the * LICENSE.md file in the root directory of this source tree. * * @license MIT */function fe(){return fe=Object.assign?Object.assign.bind():function(e){for(var t=1;t"u")throw new Error(t)}function Wn(e,t){if(!e){typeof console<"u"&&console.warn(t);try{throw new Error(t)}catch{}}}function Ty(){return Math.random().toString(36).substr(2,8)}function Mc(e,t){return{usr:e.state,key:e.key,idx:t}}function jr(e,t,n,i){return n===void 0&&(n=null),fe({pathname:typeof e=="string"?e:e.pathname,search:"",hash:""},typeof t=="string"?Qt(t):t,{state:n,key:t&&t.key||i||Ty()})}function Kn(e){let{pathname:t="/",search:n="",hash:i=""}=e;return n&&n!=="?"&&(t+=n.charAt(0)==="?"?n:"?"+n),i&&i!=="#"&&(t+=i.charAt(0)==="#"?i:"#"+i),t}function Qt(e){let t={};if(e){let n=e.indexOf("#");n>=0&&(t.hash=e.substr(n),e=e.substr(0,n));let i=e.indexOf("?");i>=0&&(t.search=e.substr(i),e=e.substr(0,i)),e&&(t.pathname=e)}return t}function Cy(e,t,n,i){i===void 0&&(i={});let{window:r=document.defaultView,v5Compat:a=!1}=i,o=r.history,s=Ce.Pop,l=null,u=m();u==null&&(u=0,o.replaceState(fe({},o.state,{idx:u}),""));function m(){return(o.state||{idx:null}).idx}function f(){s=Ce.Pop;let I=m(),g=I==null?null:I-u;u=I,l&&l({action:s,location:k.location,delta:g})}function h(I,g){s=Ce.Push;let p=jr(k.location,I,g);n&&n(p,I),u=m()+1;let w=Mc(p,u),E=k.createHref(p);try{o.pushState(w,"",E)}catch(j){if(j instanceof DOMException&&j.name==="DataCloneError")throw j;r.location.assign(E)}a&&l&&l({action:s,location:k.location,delta:1})}function v(I,g){s=Ce.Replace;let p=jr(k.location,I,g);n&&n(p,I),u=m();let w=Mc(p,u),E=k.createHref(p);o.replaceState(w,"",E),a&&l&&l({action:s,location:k.location,delta:0})}function _(I){let g=r.location.origin!=="null"?r.location.origin:r.location.href,p=typeof I=="string"?I:Kn(I);return p=p.replace(/ $/,"%20"),Q(g,"No window.location.(origin|href) available to create URL for href: "+p),new URL(p,g)}let k={get action(){return s},get location(){return e(r,o)},listen(I){if(l)throw new Error("A history only accepts one active listener");return r.addEventListener(Nc,f),l=I,()=>{r.removeEventListener(Nc,f),l=null}},createHref(I){return t(r,I)},createURL:_,encodeLocation(I){let g=_(I);return{pathname:g.pathname,search:g.search,hash:g.hash}},push:h,replace:v,go(I){return o.go(I)}};return k}var ae;(function(e){e.data="data",e.deferred="deferred",e.redirect="redirect",e.error="error"})(ae||(ae={}));const Ay=new Set(["lazy","caseSensitive","path","id","index","children"]);function Iy(e){return e.index===!0}function Ya(e,t,n,i){return n===void 0&&(n=[]),i===void 0&&(i={}),e.map((r,a)=>{let o=[...n,String(a)],s=typeof r.id=="string"?r.id:o.join("-");if(Q(r.index!==!0||!r.children,"Cannot specify children on an index route"),Q(!i[s],'Found a route id collision on id "'+s+`". Route id's must be globally unique within Data Router usages`),Iy(r)){let l=fe({},r,t(r),{id:s});return i[s]=l,l}else{let l=fe({},r,t(r),{id:s,children:void 0});return i[s]=l,r.children&&(l.children=Ya(r.children,t,o,i)),l}})}function Rn(e,t,n){return n===void 0&&(n="/"),Sa(e,t,n,!1)}function Sa(e,t,n,i){let r=typeof t=="string"?Qt(t):t,a=Ni(r.pathname||"/",n);if(a==null)return null;let o=Ep(e);Ly(o);let s=null;for(let l=0;s==null&&l{let l={relativePath:s===void 0?a.path||"":s,caseSensitive:a.caseSensitive===!0,childrenIndex:o,route:a};l.relativePath.startsWith("/")&&(Q(l.relativePath.startsWith(i),'Absolute route path "'+l.relativePath+'" nested under path '+('"'+i+'" is not valid. An absolute child route path ')+"must start with the combined path of all its parent routes."),l.relativePath=l.relativePath.slice(i.length));let u=Bt([i,l.relativePath]),m=n.concat(l);a.children&&a.children.length>0&&(Q(a.index!==!0,"Index routes must not have child routes. Please remove "+('all child routes from route path "'+u+'".')),Ep(a.children,t,m,u)),!(a.path==null&&!a.index)&&t.push({path:u,score:Fy(u,a.index),routesMeta:m})};return e.forEach((a,o)=>{var s;if(a.path===""||!((s=a.path)!=null&&s.includes("?")))r(a,o);else for(let l of Tp(a.path))r(a,o,l)}),t}function Tp(e){let t=e.split("/");if(t.length===0)return[];let[n,...i]=t,r=n.endsWith("?"),a=n.replace(/\?$/,"");if(i.length===0)return r?[a,""]:[a];let o=Tp(i.join("/")),s=[];return s.push(...o.map(l=>l===""?a:[a,l].join("/"))),r&&s.push(...o),s.map(l=>e.startsWith("/")&&l===""?"/":l)}function Ly(e){e.sort((t,n)=>t.score!==n.score?n.score-t.score:Uy(t.routesMeta.map(i=>i.childrenIndex),n.routesMeta.map(i=>i.childrenIndex)))}const Py=/^:[\w-]+$/,Ry=3,Dy=2,Ny=1,My=10,Oy=-2,Oc=e=>e==="*";function Fy(e,t){let n=e.split("/"),i=n.length;return n.some(Oc)&&(i+=Oy),t&&(i+=Dy),n.filter(r=>!Oc(r)).reduce((r,a)=>r+(Py.test(a)?Ry:a===""?Ny:My),i)}function Uy(e,t){return e.length===t.length&&e.slice(0,-1).every((i,r)=>i===t[r])?e[e.length-1]-t[t.length-1]:0}function zy(e,t,n){n===void 0&&(n=!1);let{routesMeta:i}=e,r={},a="/",o=[];for(let s=0;s{let{paramName:h,isOptional:v}=m;if(h==="*"){let k=s[f]||"";o=a.slice(0,a.length-k.length).replace(/(.)\/+$/,"$1")}const _=s[f];return v&&!_?u[h]=void 0:u[h]=(_||"").replace(/%2F/g,"/"),u},{}),pathname:a,pathnameBase:o,pattern:e}}function $y(e,t,n){t===void 0&&(t=!1),n===void 0&&(n=!0),Wn(e==="*"||!e.endsWith("*")||e.endsWith("/*"),'Route path "'+e+'" will be treated as if it were '+('"'+e.replace(/\*$/,"/*")+'" because the `*` character must ')+"always follow a `/` in the pattern. To get rid of this warning, "+('please change the route path to "'+e.replace(/\*$/,"/*")+'".'));let i=[],r="^"+e.replace(/\/*\*?$/,"").replace(/^\/*/,"/").replace(/[\\.*+^${}|()[\]]/g,"\\$&").replace(/\/:([\w-]+)(\?)?/g,(o,s,l)=>(i.push({paramName:s,isOptional:l!=null}),l?"/?([^\\/]+)?":"/([^\\/]+)"));return e.endsWith("*")?(i.push({paramName:"*"}),r+=e==="*"||e==="/*"?"(.*)$":"(?:\\/(.+)|\\/*)$"):n?r+="\\/*$":e!==""&&e!=="/"&&(r+="(?:(?=\\/|$))"),[new RegExp(r,t?void 0:"i"),i]}function By(e){try{return e.split("/").map(t=>decodeURIComponent(t).replace(/\//g,"%2F")).join("/")}catch(t){return Wn(!1,'The URL path "'+e+'" could not be decoded because it is is a malformed URL segment. This is probably due to a bad percent '+("encoding ("+t+").")),e}}function Ni(e,t){if(t==="/")return e;if(!e.toLowerCase().startsWith(t.toLowerCase()))return null;let n=t.endsWith("/")?t.length-1:t.length,i=e.charAt(n);return i&&i!=="/"?null:e.slice(n)||"/"}function Vy(e,t){t===void 0&&(t="/");let{pathname:n,search:i="",hash:r=""}=typeof e=="string"?Qt(e):e;return{pathname:n?n.startsWith("/")?n:Hy(n,t):t,search:Wy(i),hash:Ky(r)}}function Hy(e,t){let n=t.replace(/\/+$/,"").split("/");return e.split("/").forEach(r=>{r===".."?n.length>1&&n.pop():r!=="."&&n.push(r)}),n.length>1?n.join("/"):"/"}function ns(e,t,n,i){return"Cannot include a '"+e+"' character in a manually specified "+("`to."+t+"` field ["+JSON.stringify(i)+"]. Please separate it out to the ")+("`to."+n+"` field. Alternatively you may provide the full path as ")+'a string in and the router will parse it for you.'}function Cp(e){return e.filter((t,n)=>n===0||t.route.path&&t.route.path.length>0)}function yo(e,t){let n=Cp(e);return t?n.map((i,r)=>r===n.length-1?i.pathname:i.pathnameBase):n.map(i=>i.pathnameBase)}function vo(e,t,n,i){i===void 0&&(i=!1);let r;typeof e=="string"?r=Qt(e):(r=fe({},e),Q(!r.pathname||!r.pathname.includes("?"),ns("?","pathname","search",r)),Q(!r.pathname||!r.pathname.includes("#"),ns("#","pathname","hash",r)),Q(!r.search||!r.search.includes("#"),ns("#","search","hash",r)));let a=e===""||r.pathname==="",o=a?"/":r.pathname,s;if(o==null)s=n;else{let f=t.length-1;if(!i&&o.startsWith("..")){let h=o.split("/");for(;h[0]==="..";)h.shift(),f-=1;r.pathname=h.join("/")}s=f>=0?t[f]:"/"}let l=Vy(r,s),u=o&&o!=="/"&&o.endsWith("/"),m=(a||o===".")&&n.endsWith("/");return!l.pathname.endsWith("/")&&(u||m)&&(l.pathname+="/"),l}const Bt=e=>e.join("/").replace(/\/\/+/g,"/"),qy=e=>e.replace(/\/+$/,"").replace(/^\/*/,"/"),Wy=e=>!e||e==="?"?"":e.startsWith("?")?e:"?"+e,Ky=e=>!e||e==="#"?"":e.startsWith("#")?e:"#"+e;class Ja{constructor(t,n,i,r){r===void 0&&(r=!1),this.status=t,this.statusText=n||"",this.internal=r,i instanceof Error?(this.data=i.toString(),this.error=i):this.data=i}}function wo(e){return e!=null&&typeof e.status=="number"&&typeof e.statusText=="string"&&typeof e.internal=="boolean"&&"data"in e}const Ap=["post","put","patch","delete"],Qy=new Set(Ap),Xy=["get",...Ap],Yy=new Set(Xy),Jy=new Set([301,302,303,307,308]),Zy=new Set([307,308]),is={state:"idle",location:void 0,formMethod:void 0,formAction:void 0,formEncType:void 0,formData:void 0,json:void 0,text:void 0},Gy={state:"idle",data:void 0,formMethod:void 0,formAction:void 0,formEncType:void 0,formData:void 0,json:void 0,text:void 0},Xi={state:"unblocked",proceed:void 0,reset:void 0,location:void 0},tu=/^(?:[a-z][a-z0-9+.-]*:|\/\/)/i,ev=e=>({hasErrorBoundary:!!e.hasErrorBoundary}),Ip="remix-router-transitions";function tv(e){const t=e.window?e.window:typeof window<"u"?window:void 0,n=typeof t<"u"&&typeof t.document<"u"&&typeof t.document.createElement<"u",i=!n;Q(e.routes.length>0,"You must provide a non-empty routes array to createRouter");let r;if(e.mapRouteProperties)r=e.mapRouteProperties;else if(e.detectErrorBoundary){let y=e.detectErrorBoundary;r=b=>({hasErrorBoundary:y(b)})}else r=ev;let a={},o=Ya(e.routes,r,void 0,a),s,l=e.basename||"/",u=e.dataStrategy||av,m=e.patchRoutesOnNavigation,f=fe({v7_fetcherPersist:!1,v7_normalizeFormMethod:!1,v7_partialHydration:!1,v7_prependBasename:!1,v7_relativeSplatPath:!1,v7_skipActionErrorRevalidation:!1},e.future),h=null,v=new Set,_=null,k=null,I=null,g=e.hydrationData!=null,p=Rn(o,e.history.location,l),w=null;if(p==null&&!m){let y=Ye(404,{pathname:e.history.location.pathname}),{matches:b,route:S}=Xc(o);p=b,w={[S.id]:y}}p&&!e.hydrationData&&Br(p,o,e.history.location.pathname).active&&(p=null);let E;if(p)if(p.some(y=>y.route.lazy))E=!1;else if(!p.some(y=>y.route.loader))E=!0;else if(f.v7_partialHydration){let y=e.hydrationData?e.hydrationData.loaderData:null,b=e.hydrationData?e.hydrationData.errors:null;if(b){let S=p.findIndex(T=>b[T.route.id]!==void 0);E=p.slice(0,S+1).every(T=>!Gs(T.route,y,b))}else E=p.every(S=>!Gs(S.route,y,b))}else E=e.hydrationData!=null;else if(E=!1,p=[],f.v7_partialHydration){let y=Br(null,o,e.history.location.pathname);y.active&&y.matches&&(p=y.matches)}let j,x={historyAction:e.history.action,location:e.history.location,matches:p,initialized:E,navigation:is,restoreScrollPosition:e.hydrationData!=null?!1:null,preventScrollReset:!1,revalidation:"idle",loaderData:e.hydrationData&&e.hydrationData.loaderData||{},actionData:e.hydrationData&&e.hydrationData.actionData||null,errors:e.hydrationData&&e.hydrationData.errors||w,fetchers:new Map,blockers:new Map},L=Ce.Pop,A=!1,U,F=!1,re=new Map,me=null,Se=!1,Ee=!1,Xt=[],Yt=new Set,ke=new Map,P=0,H=-1,q=new Map,ee=new Set,ue=new Map,Tt=new Map,De=new Set,yt=new Map,Ve=new Map,Dt;function Rm(){if(h=e.history.listen(y=>{let{action:b,location:S,delta:T}=y;if(Dt){Dt(),Dt=void 0;return}Wn(Ve.size===0||T!=null,"You are trying to use a blocker on a POP navigation to a location that was not created by @remix-run/router. This will fail silently in production. This can happen if you are navigating outside the router via `window.history.pushState`/`window.location.hash` instead of using router navigation APIs. This can also happen if you are using createHashRouter and the user manually changes the URL.");let D=ku({currentLocation:x.location,nextLocation:S,historyAction:b});if(D&&T!=null){let $=new Promise(W=>{Dt=W});e.history.go(T*-1),$r(D,{state:"blocked",location:S,proceed(){$r(D,{state:"proceeding",proceed:void 0,reset:void 0,location:S}),$.then(()=>e.history.go(T))},reset(){let W=new Map(x.blockers);W.set(D,Xi),He({blockers:W})}});return}return An(b,S)}),n){wv(t,re);let y=()=>xv(t,re);t.addEventListener("pagehide",y),me=()=>t.removeEventListener("pagehide",y)}return x.initialized||An(Ce.Pop,x.location,{initialHydration:!0}),j}function Dm(){h&&h(),me&&me(),v.clear(),U&&U.abort(),x.fetchers.forEach((y,b)=>zr(b)),x.blockers.forEach((y,b)=>bu(b))}function Nm(y){return v.add(y),()=>v.delete(y)}function He(y,b){b===void 0&&(b={}),x=fe({},x,y);let S=[],T=[];f.v7_fetcherPersist&&x.fetchers.forEach((D,$)=>{D.state==="idle"&&(De.has($)?T.push($):S.push($))}),[...v].forEach(D=>D(x,{deletedFetchers:T,viewTransitionOpts:b.viewTransitionOpts,flushSync:b.flushSync===!0})),f.v7_fetcherPersist&&(S.forEach(D=>x.fetchers.delete(D)),T.forEach(D=>zr(D)))}function Yn(y,b,S){var T,D;let{flushSync:$}=S===void 0?{}:S,W=x.actionData!=null&&x.navigation.formMethod!=null&&bt(x.navigation.formMethod)&&x.navigation.state==="loading"&&((T=y.state)==null?void 0:T._isRedirect)!==!0,M;b.actionData?Object.keys(b.actionData).length>0?M=b.actionData:M=null:W?M=x.actionData:M=null;let O=b.loaderData?Kc(x.loaderData,b.loaderData,b.matches||[],b.errors):x.loaderData,N=x.blockers;N.size>0&&(N=new Map(N),N.forEach((J,Ne)=>N.set(Ne,Xi)));let B=A===!0||x.navigation.formMethod!=null&&bt(x.navigation.formMethod)&&((D=y.state)==null?void 0:D._isRedirect)!==!0;s&&(o=s,s=void 0),Se||L===Ce.Pop||(L===Ce.Push?e.history.push(y,y.state):L===Ce.Replace&&e.history.replace(y,y.state));let Y;if(L===Ce.Pop){let J=re.get(x.location.pathname);J&&J.has(y.pathname)?Y={currentLocation:x.location,nextLocation:y}:re.has(y.pathname)&&(Y={currentLocation:y,nextLocation:x.location})}else if(F){let J=re.get(x.location.pathname);J?J.add(y.pathname):(J=new Set([y.pathname]),re.set(x.location.pathname,J)),Y={currentLocation:x.location,nextLocation:y}}He(fe({},b,{actionData:M,loaderData:O,historyAction:L,location:y,initialized:!0,navigation:is,revalidation:"idle",restoreScrollPosition:Su(y,b.matches||x.matches),preventScrollReset:B,blockers:N}),{viewTransitionOpts:Y,flushSync:$===!0}),L=Ce.Pop,A=!1,F=!1,Se=!1,Ee=!1,Xt=[]}async function mu(y,b){if(typeof y=="number"){e.history.go(y);return}let S=Zs(x.location,x.matches,l,f.v7_prependBasename,y,f.v7_relativeSplatPath,b==null?void 0:b.fromRouteId,b==null?void 0:b.relative),{path:T,submission:D,error:$}=Uc(f.v7_normalizeFormMethod,!1,S,b),W=x.location,M=jr(x.location,T,b&&b.state);M=fe({},M,e.history.encodeLocation(M));let O=b&&b.replace!=null?b.replace:void 0,N=Ce.Push;O===!0?N=Ce.Replace:O===!1||D!=null&&bt(D.formMethod)&&D.formAction===x.location.pathname+x.location.search&&(N=Ce.Replace);let B=b&&"preventScrollReset"in b?b.preventScrollReset===!0:void 0,Y=(b&&b.flushSync)===!0,J=ku({currentLocation:W,nextLocation:M,historyAction:N});if(J){$r(J,{state:"blocked",location:M,proceed(){$r(J,{state:"proceeding",proceed:void 0,reset:void 0,location:M}),mu(y,b)},reset(){let Ne=new Map(x.blockers);Ne.set(J,Xi),He({blockers:Ne})}});return}return await An(N,M,{submission:D,pendingError:$,preventScrollReset:B,replace:b&&b.replace,enableViewTransition:b&&b.viewTransition,flushSync:Y})}function Mm(){if(To(),He({revalidation:"loading"}),x.navigation.state!=="submitting"){if(x.navigation.state==="idle"){An(x.historyAction,x.location,{startUninterruptedRevalidation:!0});return}An(L||x.historyAction,x.navigation.location,{overrideNavigation:x.navigation,enableViewTransition:F===!0})}}async function An(y,b,S){U&&U.abort(),U=null,L=y,Se=(S&&S.startUninterruptedRevalidation)===!0,Wm(x.location,x.matches),A=(S&&S.preventScrollReset)===!0,F=(S&&S.enableViewTransition)===!0;let T=s||o,D=S&&S.overrideNavigation,$=Rn(T,b,l),W=(S&&S.flushSync)===!0,M=Br($,T,b.pathname);if(M.active&&M.matches&&($=M.matches),!$){let{error:se,notFoundMatches:te,route:ve}=Co(b.pathname);Yn(b,{matches:te,loaderData:{},errors:{[ve.id]:se}},{flushSync:W});return}if(x.initialized&&!Ee&&dv(x.location,b)&&!(S&&S.submission&&bt(S.submission.formMethod))){Yn(b,{matches:$},{flushSync:W});return}U=new AbortController;let O=ti(e.history,b,U.signal,S&&S.submission),N;if(S&&S.pendingError)N=[Dn($).route.id,{type:ae.error,error:S.pendingError}];else if(S&&S.submission&&bt(S.submission.formMethod)){let se=await Om(O,b,S.submission,$,M.active,{replace:S.replace,flushSync:W});if(se.shortCircuited)return;if(se.pendingActionResult){let[te,ve]=se.pendingActionResult;if(it(ve)&&wo(ve.error)&&ve.error.status===404){U=null,Yn(b,{matches:se.matches,loaderData:{},errors:{[te]:ve.error}});return}}$=se.matches||$,N=se.pendingActionResult,D=rs(b,S.submission),W=!1,M.active=!1,O=ti(e.history,O.url,O.signal)}let{shortCircuited:B,matches:Y,loaderData:J,errors:Ne}=await Fm(O,b,$,M.active,D,S&&S.submission,S&&S.fetcherSubmission,S&&S.replace,S&&S.initialHydration===!0,W,N);B||(U=null,Yn(b,fe({matches:Y||$},Qc(N),{loaderData:J,errors:Ne})))}async function Om(y,b,S,T,D,$){$===void 0&&($={}),To();let W=yv(b,S);if(He({navigation:W},{flushSync:$.flushSync===!0}),D){let N=await Vr(T,b.pathname,y.signal);if(N.type==="aborted")return{shortCircuited:!0};if(N.type==="error"){let B=Dn(N.partialMatches).route.id;return{matches:N.partialMatches,pendingActionResult:[B,{type:ae.error,error:N.error}]}}else if(N.matches)T=N.matches;else{let{notFoundMatches:B,error:Y,route:J}=Co(b.pathname);return{matches:B,pendingActionResult:[J.id,{type:ae.error,error:Y}]}}}let M,O=nr(T,b);if(!O.route.action&&!O.route.lazy)M={type:ae.error,error:Ye(405,{method:y.method,pathname:b.pathname,routeId:O.route.id})};else if(M=(await Oi("action",x,y,[O],T,null))[O.route.id],y.signal.aborted)return{shortCircuited:!0};if(Fn(M)){let N;return $&&$.replace!=null?N=$.replace:N=Hc(M.response.headers.get("Location"),new URL(y.url),l)===x.location.pathname+x.location.search,await In(y,M,!0,{submission:S,replace:N}),{shortCircuited:!0}}if(dn(M))throw Ye(400,{type:"defer-action"});if(it(M)){let N=Dn(T,O.route.id);return($&&$.replace)!==!0&&(L=Ce.Push),{matches:T,pendingActionResult:[N.route.id,M]}}return{matches:T,pendingActionResult:[O.route.id,M]}}async function Fm(y,b,S,T,D,$,W,M,O,N,B){let Y=D||rs(b,$),J=$||W||Jc(Y),Ne=!Se&&(!f.v7_partialHydration||!O);if(T){if(Ne){let we=hu(B);He(fe({navigation:Y},we!==void 0?{actionData:we}:{}),{flushSync:N})}let G=await Vr(S,b.pathname,y.signal);if(G.type==="aborted")return{shortCircuited:!0};if(G.type==="error"){let we=Dn(G.partialMatches).route.id;return{matches:G.partialMatches,loaderData:{},errors:{[we]:G.error}}}else if(G.matches)S=G.matches;else{let{error:we,notFoundMatches:Zn,route:zi}=Co(b.pathname);return{matches:Zn,loaderData:{},errors:{[zi.id]:we}}}}let se=s||o,[te,ve]=$c(e.history,x,S,J,b,f.v7_partialHydration&&O===!0,f.v7_skipActionErrorRevalidation,Ee,Xt,Yt,De,ue,ee,se,l,B);if(Ao(G=>!(S&&S.some(we=>we.route.id===G))||te&&te.some(we=>we.route.id===G)),H=++P,te.length===0&&ve.length===0){let G=wu();return Yn(b,fe({matches:S,loaderData:{},errors:B&&it(B[1])?{[B[0]]:B[1].error}:null},Qc(B),G?{fetchers:new Map(x.fetchers)}:{}),{flushSync:N}),{shortCircuited:!0}}if(Ne){let G={};if(!T){G.navigation=Y;let we=hu(B);we!==void 0&&(G.actionData=we)}ve.length>0&&(G.fetchers=Um(ve)),He(G,{flushSync:N})}ve.forEach(G=>{Zt(G.key),G.controller&&ke.set(G.key,G.controller)});let Jn=()=>ve.forEach(G=>Zt(G.key));U&&U.signal.addEventListener("abort",Jn);let{loaderResults:Fi,fetcherResults:Mt}=await gu(x,S,te,ve,y);if(y.signal.aborted)return{shortCircuited:!0};U&&U.signal.removeEventListener("abort",Jn),ve.forEach(G=>ke.delete(G.key));let Ct=ua(Fi);if(Ct)return await In(y,Ct.result,!0,{replace:M}),{shortCircuited:!0};if(Ct=ua(Mt),Ct)return ee.add(Ct.key),await In(y,Ct.result,!0,{replace:M}),{shortCircuited:!0};let{loaderData:Io,errors:Ui}=Wc(x,S,Fi,B,ve,Mt,yt);yt.forEach((G,we)=>{G.subscribe(Zn=>{(Zn||G.done)&&yt.delete(we)})}),f.v7_partialHydration&&O&&x.errors&&(Ui=fe({},x.errors,Ui));let jn=wu(),Hr=xu(H),qr=jn||Hr||ve.length>0;return fe({matches:S,loaderData:Io,errors:Ui},qr?{fetchers:new Map(x.fetchers)}:{})}function hu(y){if(y&&!it(y[1]))return{[y[0]]:y[1].data};if(x.actionData)return Object.keys(x.actionData).length===0?null:x.actionData}function Um(y){return y.forEach(b=>{let S=x.fetchers.get(b.key),T=Yi(void 0,S?S.data:void 0);x.fetchers.set(b.key,T)}),new Map(x.fetchers)}function zm(y,b,S,T){if(i)throw new Error("router.fetch() was called during the server render, but it shouldn't be. You are likely calling a useFetcher() method in the body of your component. Try moving it to a useEffect or a callback.");Zt(y);let D=(T&&T.flushSync)===!0,$=s||o,W=Zs(x.location,x.matches,l,f.v7_prependBasename,S,f.v7_relativeSplatPath,b,T==null?void 0:T.relative),M=Rn($,W,l),O=Br(M,$,W);if(O.active&&O.matches&&(M=O.matches),!M){Nt(y,b,Ye(404,{pathname:W}),{flushSync:D});return}let{path:N,submission:B,error:Y}=Uc(f.v7_normalizeFormMethod,!0,W,T);if(Y){Nt(y,b,Y,{flushSync:D});return}let J=nr(M,N),Ne=(T&&T.preventScrollReset)===!0;if(B&&bt(B.formMethod)){$m(y,b,N,J,M,O.active,D,Ne,B);return}ue.set(y,{routeId:b,path:N}),Bm(y,b,N,J,M,O.active,D,Ne,B)}async function $m(y,b,S,T,D,$,W,M,O){To(),ue.delete(y);function N(Te){if(!Te.route.action&&!Te.route.lazy){let Gn=Ye(405,{method:O.formMethod,pathname:S,routeId:b});return Nt(y,b,Gn,{flushSync:W}),!0}return!1}if(!$&&N(T))return;let B=x.fetchers.get(y);Jt(y,vv(O,B),{flushSync:W});let Y=new AbortController,J=ti(e.history,S,Y.signal,O);if($){let Te=await Vr(D,S,J.signal);if(Te.type==="aborted")return;if(Te.type==="error"){Nt(y,b,Te.error,{flushSync:W});return}else if(Te.matches){if(D=Te.matches,T=nr(D,S),N(T))return}else{Nt(y,b,Ye(404,{pathname:S}),{flushSync:W});return}}ke.set(y,Y);let Ne=P,te=(await Oi("action",x,J,[T],D,y))[T.route.id];if(J.signal.aborted){ke.get(y)===Y&&ke.delete(y);return}if(f.v7_fetcherPersist&&De.has(y)){if(Fn(te)||it(te)){Jt(y,en(void 0));return}}else{if(Fn(te))if(ke.delete(y),H>Ne){Jt(y,en(void 0));return}else return ee.add(y),Jt(y,Yi(O)),In(J,te,!1,{fetcherSubmission:O,preventScrollReset:M});if(it(te)){Nt(y,b,te.error);return}}if(dn(te))throw Ye(400,{type:"defer-action"});let ve=x.navigation.location||x.location,Jn=ti(e.history,ve,Y.signal),Fi=s||o,Mt=x.navigation.state!=="idle"?Rn(Fi,x.navigation.location,l):x.matches;Q(Mt,"Didn't find any matches after fetcher action");let Ct=++P;q.set(y,Ct);let Io=Yi(O,te.data);x.fetchers.set(y,Io);let[Ui,jn]=$c(e.history,x,Mt,O,ve,!1,f.v7_skipActionErrorRevalidation,Ee,Xt,Yt,De,ue,ee,Fi,l,[T.route.id,te]);jn.filter(Te=>Te.key!==y).forEach(Te=>{let Gn=Te.key,Eu=x.fetchers.get(Gn),Xm=Yi(void 0,Eu?Eu.data:void 0);x.fetchers.set(Gn,Xm),Zt(Gn),Te.controller&&ke.set(Gn,Te.controller)}),He({fetchers:new Map(x.fetchers)});let Hr=()=>jn.forEach(Te=>Zt(Te.key));Y.signal.addEventListener("abort",Hr);let{loaderResults:qr,fetcherResults:G}=await gu(x,Mt,Ui,jn,Jn);if(Y.signal.aborted)return;Y.signal.removeEventListener("abort",Hr),q.delete(y),ke.delete(y),jn.forEach(Te=>ke.delete(Te.key));let we=ua(qr);if(we)return In(Jn,we.result,!1,{preventScrollReset:M});if(we=ua(G),we)return ee.add(we.key),In(Jn,we.result,!1,{preventScrollReset:M});let{loaderData:Zn,errors:zi}=Wc(x,Mt,qr,void 0,jn,G,yt);if(x.fetchers.has(y)){let Te=en(te.data);x.fetchers.set(y,Te)}xu(Ct),x.navigation.state==="loading"&&Ct>H?(Q(L,"Expected pending action"),U&&U.abort(),Yn(x.navigation.location,{matches:Mt,loaderData:Zn,errors:zi,fetchers:new Map(x.fetchers)})):(He({errors:zi,loaderData:Kc(x.loaderData,Zn,Mt,zi),fetchers:new Map(x.fetchers)}),Ee=!1)}async function Bm(y,b,S,T,D,$,W,M,O){let N=x.fetchers.get(y);Jt(y,Yi(O,N?N.data:void 0),{flushSync:W});let B=new AbortController,Y=ti(e.history,S,B.signal);if($){let te=await Vr(D,S,Y.signal);if(te.type==="aborted")return;if(te.type==="error"){Nt(y,b,te.error,{flushSync:W});return}else if(te.matches)D=te.matches,T=nr(D,S);else{Nt(y,b,Ye(404,{pathname:S}),{flushSync:W});return}}ke.set(y,B);let J=P,se=(await Oi("loader",x,Y,[T],D,y))[T.route.id];if(dn(se)&&(se=await nu(se,Y.signal,!0)||se),ke.get(y)===B&&ke.delete(y),!Y.signal.aborted){if(De.has(y)){Jt(y,en(void 0));return}if(Fn(se))if(H>J){Jt(y,en(void 0));return}else{ee.add(y),await In(Y,se,!1,{preventScrollReset:M});return}if(it(se)){Nt(y,b,se.error);return}Q(!dn(se),"Unhandled fetcher deferred data"),Jt(y,en(se.data))}}async function In(y,b,S,T){let{submission:D,fetcherSubmission:$,preventScrollReset:W,replace:M}=T===void 0?{}:T;b.response.headers.has("X-Remix-Revalidate")&&(Ee=!0);let O=b.response.headers.get("Location");Q(O,"Expected a Location header on the redirect Response"),O=Hc(O,new URL(y.url),l);let N=jr(x.location,O,{_isRedirect:!0});if(n){let te=!1;if(b.response.headers.has("X-Remix-Reload-Document"))te=!0;else if(tu.test(O)){const ve=e.history.createURL(O);te=ve.origin!==t.location.origin||Ni(ve.pathname,l)==null}if(te){M?t.location.replace(O):t.location.assign(O);return}}U=null;let B=M===!0||b.response.headers.has("X-Remix-Replace")?Ce.Replace:Ce.Push,{formMethod:Y,formAction:J,formEncType:Ne}=x.navigation;!D&&!$&&Y&&J&&Ne&&(D=Jc(x.navigation));let se=D||$;if(Zy.has(b.response.status)&&se&&bt(se.formMethod))await An(B,N,{submission:fe({},se,{formAction:O}),preventScrollReset:W||A,enableViewTransition:S?F:void 0});else{let te=rs(N,D);await An(B,N,{overrideNavigation:te,fetcherSubmission:$,preventScrollReset:W||A,enableViewTransition:S?F:void 0})}}async function Oi(y,b,S,T,D,$){let W,M={};try{W=await ov(u,y,b,S,T,D,$,a,r)}catch(O){return T.forEach(N=>{M[N.route.id]={type:ae.error,error:O}}),M}for(let[O,N]of Object.entries(W))if(fv(N)){let B=N.result;M[O]={type:ae.redirect,response:uv(B,S,O,D,l,f.v7_relativeSplatPath)}}else M[O]=await lv(N);return M}async function gu(y,b,S,T,D){let $=y.matches,W=Oi("loader",y,D,S,b,null),M=Promise.all(T.map(async B=>{if(B.matches&&B.match&&B.controller){let J=(await Oi("loader",y,ti(e.history,B.path,B.controller.signal),[B.match],B.matches,B.key))[B.match.route.id];return{[B.key]:J}}else return Promise.resolve({[B.key]:{type:ae.error,error:Ye(404,{pathname:B.path})}})})),O=await W,N=(await M).reduce((B,Y)=>Object.assign(B,Y),{});return await Promise.all([hv(b,O,D.signal,$,y.loaderData),gv(b,N,T)]),{loaderResults:O,fetcherResults:N}}function To(){Ee=!0,Xt.push(...Ao()),ue.forEach((y,b)=>{ke.has(b)&&Yt.add(b),Zt(b)})}function Jt(y,b,S){S===void 0&&(S={}),x.fetchers.set(y,b),He({fetchers:new Map(x.fetchers)},{flushSync:(S&&S.flushSync)===!0})}function Nt(y,b,S,T){T===void 0&&(T={});let D=Dn(x.matches,b);zr(y),He({errors:{[D.route.id]:S},fetchers:new Map(x.fetchers)},{flushSync:(T&&T.flushSync)===!0})}function yu(y){return f.v7_fetcherPersist&&(Tt.set(y,(Tt.get(y)||0)+1),De.has(y)&&De.delete(y)),x.fetchers.get(y)||Gy}function zr(y){let b=x.fetchers.get(y);ke.has(y)&&!(b&&b.state==="loading"&&q.has(y))&&Zt(y),ue.delete(y),q.delete(y),ee.delete(y),De.delete(y),Yt.delete(y),x.fetchers.delete(y)}function Vm(y){if(f.v7_fetcherPersist){let b=(Tt.get(y)||0)-1;b<=0?(Tt.delete(y),De.add(y)):Tt.set(y,b)}else zr(y);He({fetchers:new Map(x.fetchers)})}function Zt(y){let b=ke.get(y);b&&(b.abort(),ke.delete(y))}function vu(y){for(let b of y){let S=yu(b),T=en(S.data);x.fetchers.set(b,T)}}function wu(){let y=[],b=!1;for(let S of ee){let T=x.fetchers.get(S);Q(T,"Expected fetcher: "+S),T.state==="loading"&&(ee.delete(S),y.push(S),b=!0)}return vu(y),b}function xu(y){let b=[];for(let[S,T]of q)if(T0}function Hm(y,b){let S=x.blockers.get(y)||Xi;return Ve.get(y)!==b&&Ve.set(y,b),S}function bu(y){x.blockers.delete(y),Ve.delete(y)}function $r(y,b){let S=x.blockers.get(y)||Xi;Q(S.state==="unblocked"&&b.state==="blocked"||S.state==="blocked"&&b.state==="blocked"||S.state==="blocked"&&b.state==="proceeding"||S.state==="blocked"&&b.state==="unblocked"||S.state==="proceeding"&&b.state==="unblocked","Invalid blocker state transition: "+S.state+" -> "+b.state);let T=new Map(x.blockers);T.set(y,b),He({blockers:T})}function ku(y){let{currentLocation:b,nextLocation:S,historyAction:T}=y;if(Ve.size===0)return;Ve.size>1&&Wn(!1,"A router only supports one blocker at a time");let D=Array.from(Ve.entries()),[$,W]=D[D.length-1],M=x.blockers.get($);if(!(M&&M.state==="proceeding")&&W({currentLocation:b,nextLocation:S,historyAction:T}))return $}function Co(y){let b=Ye(404,{pathname:y}),S=s||o,{matches:T,route:D}=Xc(S);return Ao(),{notFoundMatches:T,route:D,error:b}}function Ao(y){let b=[];return yt.forEach((S,T)=>{(!y||y(T))&&(S.cancel(),b.push(T),yt.delete(T))}),b}function qm(y,b,S){if(_=y,I=b,k=S||null,!g&&x.navigation===is){g=!0;let T=Su(x.location,x.matches);T!=null&&He({restoreScrollPosition:T})}return()=>{_=null,I=null,k=null}}function _u(y,b){return k&&k(y,b.map(T=>jy(T,x.loaderData)))||y.key}function Wm(y,b){if(_&&I){let S=_u(y,b);_[S]=I()}}function Su(y,b){if(_){let S=_u(y,b),T=_[S];if(typeof T=="number")return T}return null}function Br(y,b,S){if(m)if(y){if(Object.keys(y[0].params).length>0)return{active:!0,matches:Sa(b,S,l,!0)}}else return{active:!0,matches:Sa(b,S,l,!0)||[]};return{active:!1,matches:null}}async function Vr(y,b,S){if(!m)return{type:"success",matches:y};let T=y;for(;;){let D=s==null,$=s||o,W=a;try{await m({path:b,matches:T,patch:(N,B)=>{S.aborted||Vc(N,B,$,W,r)}})}catch(N){return{type:"error",error:N,partialMatches:T}}finally{D&&!S.aborted&&(o=[...o])}if(S.aborted)return{type:"aborted"};let M=Rn($,b,l);if(M)return{type:"success",matches:M};let O=Sa($,b,l,!0);if(!O||T.length===O.length&&T.every((N,B)=>N.route.id===O[B].route.id))return{type:"success",matches:null};T=O}}function Km(y){a={},s=Ya(y,r,void 0,a)}function Qm(y,b){let S=s==null;Vc(y,b,s||o,a,r),S&&(o=[...o],He({}))}return j={get basename(){return l},get future(){return f},get state(){return x},get routes(){return o},get window(){return t},initialize:Rm,subscribe:Nm,enableScrollRestoration:qm,navigate:mu,fetch:zm,revalidate:Mm,createHref:y=>e.history.createHref(y),encodeLocation:y=>e.history.encodeLocation(y),getFetcher:yu,deleteFetcher:Vm,dispose:Dm,getBlocker:Hm,deleteBlocker:bu,patchRoutes:Qm,_internalFetchControllers:ke,_internalActiveDeferreds:yt,_internalSetRoutes:Km},j}function nv(e){return e!=null&&("formData"in e&&e.formData!=null||"body"in e&&e.body!==void 0)}function Zs(e,t,n,i,r,a,o,s){let l,u;if(o){l=[];for(let f of t)if(l.push(f),f.route.id===o){u=f;break}}else l=t,u=t[t.length-1];let m=vo(r||".",yo(l,a),Ni(e.pathname,n)||e.pathname,s==="path");if(r==null&&(m.search=e.search,m.hash=e.hash),(r==null||r===""||r===".")&&u){let f=iu(m.search);if(u.route.index&&!f)m.search=m.search?m.search.replace(/^\?/,"?index&"):"?index";else if(!u.route.index&&f){let h=new URLSearchParams(m.search),v=h.getAll("index");h.delete("index"),v.filter(k=>k).forEach(k=>h.append("index",k));let _=h.toString();m.search=_?"?"+_:""}}return i&&n!=="/"&&(m.pathname=m.pathname==="/"?n:Bt([n,m.pathname])),Kn(m)}function Uc(e,t,n,i){if(!i||!nv(i))return{path:n};if(i.formMethod&&!mv(i.formMethod))return{path:n,error:Ye(405,{method:i.formMethod})};let r=()=>({path:n,error:Ye(400,{type:"invalid-body"})}),a=i.formMethod||"get",o=e?a.toUpperCase():a.toLowerCase(),s=Pp(n);if(i.body!==void 0){if(i.formEncType==="text/plain"){if(!bt(o))return r();let h=typeof i.body=="string"?i.body:i.body instanceof FormData||i.body instanceof URLSearchParams?Array.from(i.body.entries()).reduce((v,_)=>{let[k,I]=_;return""+v+k+"="+I+` `},""):String(i.body);return{path:n,submission:{formMethod:o,formAction:s,formEncType:i.formEncType,formData:void 0,json:void 0,text:h}}}else if(i.formEncType==="application/json"){if(!bt(o))return r();try{let h=typeof i.body=="string"?JSON.parse(i.body):i.body;return{path:n,submission:{formMethod:o,formAction:s,formEncType:i.formEncType,formData:void 0,json:h,text:void 0}}}catch{return r()}}}Q(typeof FormData=="function","FormData is not available in this environment");let l,u;if(i.formData)l=el(i.formData),u=i.formData;else if(i.body instanceof FormData)l=el(i.body),u=i.body;else if(i.body instanceof URLSearchParams)l=i.body,u=qc(l);else if(i.body==null)l=new URLSearchParams,u=new FormData;else try{l=new URLSearchParams(i.body),u=qc(l)}catch{return r()}let m={formMethod:o,formAction:s,formEncType:i&&i.formEncType||"application/x-www-form-urlencoded",formData:u,json:void 0,text:void 0};if(bt(m.formMethod))return{path:n,submission:m};let f=Qt(n);return t&&f.search&&iu(f.search)&&l.append("index",""),f.search="?"+l,{path:Kn(f),submission:m}}function zc(e,t,n){n===void 0&&(n=!1);let i=e.findIndex(r=>r.route.id===t);return i>=0?e.slice(0,n?i+1:i):e}function $c(e,t,n,i,r,a,o,s,l,u,m,f,h,v,_,k){let I=k?it(k[1])?k[1].error:k[1].data:void 0,g=e.createURL(t.location),p=e.createURL(r),w=n;a&&t.errors?w=zc(n,Object.keys(t.errors)[0],!0):k&&it(k[1])&&(w=zc(n,k[0]));let E=k?k[1].statusCode:void 0,j=o&&E&&E>=400,x=w.filter((A,U)=>{let{route:F}=A;if(F.lazy)return!0;if(F.loader==null)return!1;if(a)return Gs(F,t.loaderData,t.errors);if(iv(t.loaderData,t.matches[U],A)||l.some(Se=>Se===A.route.id))return!0;let re=t.matches[U],me=A;return Bc(A,fe({currentUrl:g,currentParams:re.params,nextUrl:p,nextParams:me.params},i,{actionResult:I,actionStatus:E,defaultShouldRevalidate:j?!1:s||g.pathname+g.search===p.pathname+p.search||g.search!==p.search||jp(re,me)}))}),L=[];return f.forEach((A,U)=>{if(a||!n.some(Ee=>Ee.route.id===A.routeId)||m.has(U))return;let F=Rn(v,A.path,_);if(!F){L.push({key:U,routeId:A.routeId,path:A.path,matches:null,match:null,controller:null});return}let re=t.fetchers.get(U),me=nr(F,A.path),Se=!1;h.has(U)?Se=!1:u.has(U)?(u.delete(U),Se=!0):re&&re.state!=="idle"&&re.data===void 0?Se=s:Se=Bc(me,fe({currentUrl:g,currentParams:t.matches[t.matches.length-1].params,nextUrl:p,nextParams:n[n.length-1].params},i,{actionResult:I,actionStatus:E,defaultShouldRevalidate:j?!1:s})),Se&&L.push({key:U,routeId:A.routeId,path:A.path,matches:F,match:me,controller:new AbortController})}),[x,L]}function Gs(e,t,n){if(e.lazy)return!0;if(!e.loader)return!1;let i=t!=null&&t[e.id]!==void 0,r=n!=null&&n[e.id]!==void 0;return!i&&r?!1:typeof e.loader=="function"&&e.loader.hydrate===!0?!0:!i&&!r}function iv(e,t,n){let i=!t||n.route.id!==t.route.id,r=e[n.route.id]===void 0;return i||r}function jp(e,t){let n=e.route.path;return e.pathname!==t.pathname||n!=null&&n.endsWith("*")&&e.params["*"]!==t.params["*"]}function Bc(e,t){if(e.route.shouldRevalidate){let n=e.route.shouldRevalidate(t);if(typeof n=="boolean")return n}return t.defaultShouldRevalidate}function Vc(e,t,n,i,r){var a;let o;if(e){let u=i[e];Q(u,"No route found to patch children into: routeId = "+e),u.children||(u.children=[]),o=u.children}else o=n;let s=t.filter(u=>!o.some(m=>Lp(u,m))),l=Ya(s,r,[e||"_","patch",String(((a=o)==null?void 0:a.length)||"0")],i);o.push(...l)}function Lp(e,t){return"id"in e&&"id"in t&&e.id===t.id?!0:e.index===t.index&&e.path===t.path&&e.caseSensitive===t.caseSensitive?(!e.children||e.children.length===0)&&(!t.children||t.children.length===0)?!0:e.children.every((n,i)=>{var r;return(r=t.children)==null?void 0:r.some(a=>Lp(n,a))}):!1}async function rv(e,t,n){if(!e.lazy)return;let i=await e.lazy();if(!e.lazy)return;let r=n[e.id];Q(r,"No route found in manifest");let a={};for(let o in i){let l=r[o]!==void 0&&o!=="hasErrorBoundary";Wn(!l,'Route "'+r.id+'" has a static property "'+o+'" defined but its lazy function is also returning a value for this property. '+('The lazy route property "'+o+'" will be ignored.')),!l&&!Ay.has(o)&&(a[o]=i[o])}Object.assign(r,a),Object.assign(r,fe({},t(r),{lazy:void 0}))}async function av(e){let{matches:t}=e,n=t.filter(r=>r.shouldLoad);return(await Promise.all(n.map(r=>r.resolve()))).reduce((r,a,o)=>Object.assign(r,{[n[o].route.id]:a}),{})}async function ov(e,t,n,i,r,a,o,s,l,u){let m=a.map(v=>v.route.lazy?rv(v.route,l,s):void 0),f=a.map((v,_)=>{let k=m[_],I=r.some(p=>p.route.id===v.route.id);return fe({},v,{shouldLoad:I,resolve:async p=>(p&&i.method==="GET"&&(v.route.lazy||v.route.loader)&&(I=!0),I?sv(t,i,v,k,p,u):Promise.resolve({type:ae.data,result:void 0}))})}),h=await e({matches:f,request:i,params:a[0].params,fetcherKey:o,context:u});try{await Promise.all(m)}catch{}return h}async function sv(e,t,n,i,r,a){let o,s,l=u=>{let m,f=new Promise((_,k)=>m=k);s=()=>m(),t.signal.addEventListener("abort",s);let h=_=>typeof u!="function"?Promise.reject(new Error("You cannot call the handler for a route which defines a boolean "+('"'+e+'" [routeId: '+n.route.id+"]"))):u({request:t,params:n.params,context:a},..._!==void 0?[_]:[]),v=(async()=>{try{return{type:"data",result:await(r?r(k=>h(k)):h())}}catch(_){return{type:"error",result:_}}})();return Promise.race([v,f])};try{let u=n.route[e];if(i)if(u){let m,[f]=await Promise.all([l(u).catch(h=>{m=h}),i]);if(m!==void 0)throw m;o=f}else if(await i,u=n.route[e],u)o=await l(u);else if(e==="action"){let m=new URL(t.url),f=m.pathname+m.search;throw Ye(405,{method:t.method,pathname:f,routeId:n.route.id})}else return{type:ae.data,result:void 0};else if(u)o=await l(u);else{let m=new URL(t.url),f=m.pathname+m.search;throw Ye(404,{pathname:f})}Q(o.result!==void 0,"You defined "+(e==="action"?"an action":"a loader")+" for route "+('"'+n.route.id+"\" but didn't return anything from your `"+e+"` ")+"function. Please return a value or `null`.")}catch(u){return{type:ae.error,result:u}}finally{s&&t.signal.removeEventListener("abort",s)}return o}async function lv(e){let{result:t,type:n}=e;if(Rp(t)){let u;try{let m=t.headers.get("Content-Type");m&&/\bapplication\/json\b/.test(m)?t.body==null?u=null:u=await t.json():u=await t.text()}catch(m){return{type:ae.error,error:m}}return n===ae.error?{type:ae.error,error:new Ja(t.status,t.statusText,u),statusCode:t.status,headers:t.headers}:{type:ae.data,data:u,statusCode:t.status,headers:t.headers}}if(n===ae.error){if(Yc(t)){var i;if(t.data instanceof Error){var r;return{type:ae.error,error:t.data,statusCode:(r=t.init)==null?void 0:r.status}}t=new Ja(((i=t.init)==null?void 0:i.status)||500,void 0,t.data)}return{type:ae.error,error:t,statusCode:wo(t)?t.status:void 0}}if(pv(t)){var a,o;return{type:ae.deferred,deferredData:t,statusCode:(a=t.init)==null?void 0:a.status,headers:((o=t.init)==null?void 0:o.headers)&&new Headers(t.init.headers)}}if(Yc(t)){var s,l;return{type:ae.data,data:t.data,statusCode:(s=t.init)==null?void 0:s.status,headers:(l=t.init)!=null&&l.headers?new Headers(t.init.headers):void 0}}return{type:ae.data,data:t}}function uv(e,t,n,i,r,a){let o=e.headers.get("Location");if(Q(o,"Redirects returned/thrown from loaders/actions must have a Location header"),!tu.test(o)){let s=i.slice(0,i.findIndex(l=>l.route.id===n)+1);o=Zs(new URL(t.url),s,r,!0,o,a),e.headers.set("Location",o)}return e}function Hc(e,t,n){if(tu.test(e)){let i=e,r=i.startsWith("//")?new URL(t.protocol+i):new URL(i),a=Ni(r.pathname,n)!=null;if(r.origin===t.origin&&a)return r.pathname+r.search+r.hash}return e}function ti(e,t,n,i){let r=e.createURL(Pp(t)).toString(),a={signal:n};if(i&&bt(i.formMethod)){let{formMethod:o,formEncType:s}=i;a.method=o.toUpperCase(),s==="application/json"?(a.headers=new Headers({"Content-Type":s}),a.body=JSON.stringify(i.json)):s==="text/plain"?a.body=i.text:s==="application/x-www-form-urlencoded"&&i.formData?a.body=el(i.formData):a.body=i.formData}return new Request(r,a)}function el(e){let t=new URLSearchParams;for(let[n,i]of e.entries())t.append(n,typeof i=="string"?i:i.name);return t}function qc(e){let t=new FormData;for(let[n,i]of e.entries())t.append(n,i);return t}function cv(e,t,n,i,r){let a={},o=null,s,l=!1,u={},m=n&&it(n[1])?n[1].error:void 0;return e.forEach(f=>{if(!(f.route.id in t))return;let h=f.route.id,v=t[h];if(Q(!Fn(v),"Cannot handle redirect results in processLoaderData"),it(v)){let _=v.error;m!==void 0&&(_=m,m=void 0),o=o||{};{let k=Dn(e,h);o[k.route.id]==null&&(o[k.route.id]=_)}a[h]=void 0,l||(l=!0,s=wo(v.error)?v.error.status:500),v.headers&&(u[h]=v.headers)}else dn(v)?(i.set(h,v.deferredData),a[h]=v.deferredData.data,v.statusCode!=null&&v.statusCode!==200&&!l&&(s=v.statusCode),v.headers&&(u[h]=v.headers)):(a[h]=v.data,v.statusCode&&v.statusCode!==200&&!l&&(s=v.statusCode),v.headers&&(u[h]=v.headers))}),m!==void 0&&n&&(o={[n[0]]:m},a[n[0]]=void 0),{loaderData:a,errors:o,statusCode:s||200,loaderHeaders:u}}function Wc(e,t,n,i,r,a,o){let{loaderData:s,errors:l}=cv(t,n,i,o);return r.forEach(u=>{let{key:m,match:f,controller:h}=u,v=a[m];if(Q(v,"Did not find corresponding fetcher result"),!(h&&h.signal.aborted))if(it(v)){let _=Dn(e.matches,f==null?void 0:f.route.id);l&&l[_.route.id]||(l=fe({},l,{[_.route.id]:v.error})),e.fetchers.delete(m)}else if(Fn(v))Q(!1,"Unhandled fetcher revalidation redirect");else if(dn(v))Q(!1,"Unhandled fetcher deferred data");else{let _=en(v.data);e.fetchers.set(m,_)}}),{loaderData:s,errors:l}}function Kc(e,t,n,i){let r=fe({},t);for(let a of n){let o=a.route.id;if(t.hasOwnProperty(o)?t[o]!==void 0&&(r[o]=t[o]):e[o]!==void 0&&a.route.loader&&(r[o]=e[o]),i&&i.hasOwnProperty(o))break}return r}function Qc(e){return e?it(e[1])?{actionData:{}}:{actionData:{[e[0]]:e[1].data}}:{}}function Dn(e,t){return(t?e.slice(0,e.findIndex(i=>i.route.id===t)+1):[...e]).reverse().find(i=>i.route.hasErrorBoundary===!0)||e[0]}function Xc(e){let t=e.length===1?e[0]:e.find(n=>n.index||!n.path||n.path==="/")||{id:"__shim-error-route__"};return{matches:[{params:{},pathname:"",pathnameBase:"",route:t}],route:t}}function Ye(e,t){let{pathname:n,routeId:i,method:r,type:a,message:o}=t===void 0?{}:t,s="Unknown Server Error",l="Unknown @remix-run/router error";return e===400?(s="Bad Request",r&&n&&i?l="You made a "+r+' request to "'+n+'" but '+('did not provide a `loader` for route "'+i+'", ')+"so there is no way to handle the request.":a==="defer-action"?l="defer() is not supported in actions":a==="invalid-body"&&(l="Unable to encode submission body")):e===403?(s="Forbidden",l='Route "'+i+'" does not match URL "'+n+'"'):e===404?(s="Not Found",l='No route matches URL "'+n+'"'):e===405&&(s="Method Not Allowed",r&&n&&i?l="You made a "+r.toUpperCase()+' request to "'+n+'" but '+('did not provide an `action` for route "'+i+'", ')+"so there is no way to handle the request.":r&&(l='Invalid request method "'+r.toUpperCase()+'"')),new Ja(e||500,s,new Error(l),!0)}function ua(e){let t=Object.entries(e);for(let n=t.length-1;n>=0;n--){let[i,r]=t[n];if(Fn(r))return{key:i,result:r}}}function Pp(e){let t=typeof e=="string"?Qt(e):e;return Kn(fe({},t,{hash:""}))}function dv(e,t){return e.pathname!==t.pathname||e.search!==t.search?!1:e.hash===""?t.hash!=="":e.hash===t.hash?!0:t.hash!==""}function fv(e){return Rp(e.result)&&Jy.has(e.result.status)}function dn(e){return e.type===ae.deferred}function it(e){return e.type===ae.error}function Fn(e){return(e&&e.type)===ae.redirect}function Yc(e){return typeof e=="object"&&e!=null&&"type"in e&&"data"in e&&"init"in e&&e.type==="DataWithResponseInit"}function pv(e){let t=e;return t&&typeof t=="object"&&typeof t.data=="object"&&typeof t.subscribe=="function"&&typeof t.cancel=="function"&&typeof t.resolveData=="function"}function Rp(e){return e!=null&&typeof e.status=="number"&&typeof e.statusText=="string"&&typeof e.headers=="object"&&typeof e.body<"u"}function mv(e){return Yy.has(e.toLowerCase())}function bt(e){return Qy.has(e.toLowerCase())}async function hv(e,t,n,i,r){let a=Object.entries(t);for(let o=0;o(h==null?void 0:h.route.id)===s);if(!u)continue;let m=i.find(h=>h.route.id===u.route.id),f=m!=null&&!jp(m,u)&&(r&&r[u.route.id])!==void 0;dn(l)&&f&&await nu(l,n,!1).then(h=>{h&&(t[s]=h)})}}async function gv(e,t,n){for(let i=0;i(u==null?void 0:u.route.id)===a)&&dn(s)&&(Q(o,"Expected an AbortController for revalidating fetcher deferred result"),await nu(s,o.signal,!0).then(u=>{u&&(t[r]=u)}))}}async function nu(e,t,n){if(n===void 0&&(n=!1),!await e.deferredData.resolveData(t)){if(n)try{return{type:ae.data,data:e.deferredData.unwrappedData}}catch(r){return{type:ae.error,error:r}}return{type:ae.data,data:e.deferredData.data}}}function iu(e){return new URLSearchParams(e).getAll("index").some(t=>t==="")}function nr(e,t){let n=typeof t=="string"?Qt(t).search:t.search;if(e[e.length-1].route.index&&iu(n||""))return e[e.length-1];let i=Cp(e);return i[i.length-1]}function Jc(e){let{formMethod:t,formAction:n,formEncType:i,text:r,formData:a,json:o}=e;if(!(!t||!n||!i)){if(r!=null)return{formMethod:t,formAction:n,formEncType:i,formData:void 0,json:void 0,text:r};if(a!=null)return{formMethod:t,formAction:n,formEncType:i,formData:a,json:void 0,text:void 0};if(o!==void 0)return{formMethod:t,formAction:n,formEncType:i,formData:void 0,json:o,text:void 0}}}function rs(e,t){return t?{state:"loading",location:e,formMethod:t.formMethod,formAction:t.formAction,formEncType:t.formEncType,formData:t.formData,json:t.json,text:t.text}:{state:"loading",location:e,formMethod:void 0,formAction:void 0,formEncType:void 0,formData:void 0,json:void 0,text:void 0}}function yv(e,t){return{state:"submitting",location:e,formMethod:t.formMethod,formAction:t.formAction,formEncType:t.formEncType,formData:t.formData,json:t.json,text:t.text}}function Yi(e,t){return e?{state:"loading",formMethod:e.formMethod,formAction:e.formAction,formEncType:e.formEncType,formData:e.formData,json:e.json,text:e.text,data:t}:{state:"loading",formMethod:void 0,formAction:void 0,formEncType:void 0,formData:void 0,json:void 0,text:void 0,data:t}}function vv(e,t){return{state:"submitting",formMethod:e.formMethod,formAction:e.formAction,formEncType:e.formEncType,formData:e.formData,json:e.json,text:e.text,data:t?t.data:void 0}}function en(e){return{state:"idle",formMethod:void 0,formAction:void 0,formEncType:void 0,formData:void 0,json:void 0,text:void 0,data:e}}function wv(e,t){try{let n=e.sessionStorage.getItem(Ip);if(n){let i=JSON.parse(n);for(let[r,a]of Object.entries(i||{}))a&&Array.isArray(a)&&t.set(r,new Set(a||[]))}}catch{}}function xv(e,t){if(t.size>0){let n={};for(let[i,r]of t)n[i]=[...r];try{e.sessionStorage.setItem(Ip,JSON.stringify(n))}catch(i){Wn(!1,"Failed to save applied view transitions in sessionStorage ("+i+").")}}}/** * React Router v6.28.0 * * Copyright (c) Remix Software Inc. * * This source code is licensed under the MIT license found in the * LICENSE.md file in the root directory of this source tree. * * @license MIT */function Za(){return Za=Object.assign?Object.assign.bind():function(e){for(var t=1;t{s.current=!0}),d.useCallback(function(u,m){if(m===void 0&&(m={}),!s.current)return;if(typeof u=="number"){i.go(u);return}let f=vo(u,JSON.parse(o),a,m.relative==="path");e==null&&t!=="/"&&(f.pathname=f.pathname==="/"?t:Bt([t,f.pathname])),(m.replace?i.replace:i.push)(f,m.state,m)},[t,i,o,a,e])}const _v=d.createContext(null);function Sv(e){let t=d.useContext(Rt).outlet;return t&&d.createElement(_v.Provider,{value:e},t)}function Ev(){let{matches:e}=d.useContext(Rt),t=e[e.length-1];return t?t.params:{}}function Op(e,t){let{relative:n}=t===void 0?{}:t,{future:i}=d.useContext(Tn),{matches:r}=d.useContext(Rt),{pathname:a}=Or(),o=JSON.stringify(yo(r,i.v7_relativeSplatPath));return d.useMemo(()=>vo(e,JSON.parse(o),a,n==="path"),[e,o,a,n])}function Tv(e,t,n,i){Mi()||Q(!1);let{navigator:r}=d.useContext(Tn),{matches:a}=d.useContext(Rt),o=a[a.length-1],s=o?o.params:{};o&&o.pathname;let l=o?o.pathnameBase:"/";o&&o.route;let u=Or(),m;m=u;let f=m.pathname||"/",h=f;if(l!=="/"){let k=l.replace(/^\//,"").split("/");h="/"+f.replace(/^\//,"").split("/").slice(k.length).join("/")}let v=Rn(e,{pathname:h});return Lv(v&&v.map(k=>Object.assign({},k,{params:Object.assign({},s,k.params),pathname:Bt([l,r.encodeLocation?r.encodeLocation(k.pathname).pathname:k.pathname]),pathnameBase:k.pathnameBase==="/"?l:Bt([l,r.encodeLocation?r.encodeLocation(k.pathnameBase).pathname:k.pathnameBase])})),a,n,i)}function Cv(){let e=Nv(),t=wo(e)?e.status+" "+e.statusText:e instanceof Error?e.message:JSON.stringify(e),n=e instanceof Error?e.stack:null,r={padding:"0.5rem",backgroundColor:"rgba(200,200,200, 0.5)"};return d.createElement(d.Fragment,null,d.createElement("h2",null,"Unexpected Application Error!"),d.createElement("h3",{style:{fontStyle:"italic"}},t),n?d.createElement("pre",{style:r},n):null,null)}const Av=d.createElement(Cv,null);class Iv extends d.Component{constructor(t){super(t),this.state={location:t.location,revalidation:t.revalidation,error:t.error}}static getDerivedStateFromError(t){return{error:t}}static getDerivedStateFromProps(t,n){return n.location!==t.location||n.revalidation!=="idle"&&t.revalidation==="idle"?{error:t.error,location:t.location,revalidation:t.revalidation}:{error:t.error!==void 0?t.error:n.error,location:n.location,revalidation:t.revalidation||n.revalidation}}componentDidCatch(t,n){console.error("React Router caught the following error during render",t,n)}render(){return this.state.error!==void 0?d.createElement(Rt.Provider,{value:this.props.routeContext},d.createElement(Np.Provider,{value:this.state.error,children:this.props.component})):this.props.children}}function jv(e){let{routeContext:t,match:n,children:i}=e,r=d.useContext(xo);return r&&r.static&&r.staticContext&&(n.route.errorElement||n.route.ErrorBoundary)&&(r.staticContext._deepestRenderedBoundaryId=n.route.id),d.createElement(Rt.Provider,{value:t},i)}function Lv(e,t,n,i){var r;if(t===void 0&&(t=[]),n===void 0&&(n=null),i===void 0&&(i=null),e==null){var a;if(!n)return null;if(n.errors)e=n.matches;else if((a=i)!=null&&a.v7_partialHydration&&t.length===0&&!n.initialized&&n.matches.length>0)e=n.matches;else return null}let o=e,s=(r=n)==null?void 0:r.errors;if(s!=null){let m=o.findIndex(f=>f.route.id&&(s==null?void 0:s[f.route.id])!==void 0);m>=0||Q(!1),o=o.slice(0,Math.min(o.length,m+1))}let l=!1,u=-1;if(n&&i&&i.v7_partialHydration)for(let m=0;m=0?o=o.slice(0,u+1):o=[o[0]];break}}}return o.reduceRight((m,f,h)=>{let v,_=!1,k=null,I=null;n&&(v=s&&f.route.id?s[f.route.id]:void 0,k=f.route.errorElement||Av,l&&(u<0&&h===0?(_=!0,I=null):u===h&&(_=!0,I=f.route.hydrateFallbackElement||null)));let g=t.concat(o.slice(0,h+1)),p=()=>{let w;return v?w=k:_?w=I:f.route.Component?w=d.createElement(f.route.Component,null):f.route.element?w=f.route.element:w=m,d.createElement(jv,{match:f,routeContext:{outlet:m,matches:g,isDataRoute:n!=null},children:w})};return n&&(f.route.ErrorBoundary||f.route.errorElement||h===0)?d.createElement(Iv,{location:n.location,revalidation:n.revalidation,component:k,error:v,children:p(),routeContext:{outlet:null,matches:g,isDataRoute:!0}}):p()},null)}var Fp=function(e){return e.UseBlocker="useBlocker",e.UseRevalidator="useRevalidator",e.UseNavigateStable="useNavigate",e}(Fp||{}),Ga=function(e){return e.UseBlocker="useBlocker",e.UseLoaderData="useLoaderData",e.UseActionData="useActionData",e.UseRouteError="useRouteError",e.UseNavigation="useNavigation",e.UseRouteLoaderData="useRouteLoaderData",e.UseMatches="useMatches",e.UseRevalidator="useRevalidator",e.UseNavigateStable="useNavigate",e.UseRouteId="useRouteId",e}(Ga||{});function Pv(e){let t=d.useContext(xo);return t||Q(!1),t}function Rv(e){let t=d.useContext(Dp);return t||Q(!1),t}function Dv(e){let t=d.useContext(Rt);return t||Q(!1),t}function Up(e){let t=Dv(),n=t.matches[t.matches.length-1];return n.route.id||Q(!1),n.route.id}function Nv(){var e;let t=d.useContext(Np),n=Rv(Ga.UseRouteError),i=Up(Ga.UseRouteError);return t!==void 0?t:(e=n.errors)==null?void 0:e[i]}function Mv(){let{router:e}=Pv(Fp.UseNavigateStable),t=Up(Ga.UseNavigateStable),n=d.useRef(!1);return Mp(()=>{n.current=!0}),d.useCallback(function(r,a){a===void 0&&(a={}),n.current&&(typeof r=="number"?e.navigate(r):e.navigate(r,Za({fromRouteId:t},a)))},[e,t])}const Zc={};function Ov(e,t){Zc[t]||(Zc[t]=!0,console.warn(t))}const ni=(e,t,n)=>Ov(e,"⚠️ React Router Future Flag Warning: "+t+". "+("You can use the `"+e+"` future flag to opt-in early. ")+("For more information, see "+n+"."));function Fv(e,t){e!=null&&e.v7_startTransition||ni("v7_startTransition","React Router will begin wrapping state updates in `React.startTransition` in v7","https://reactrouter.com/v6/upgrading/future#v7_starttransition"),!(e!=null&&e.v7_relativeSplatPath)&&(!t||!t.v7_relativeSplatPath)&&ni("v7_relativeSplatPath","Relative route resolution within Splat routes is changing in v7","https://reactrouter.com/v6/upgrading/future#v7_relativesplatpath"),t&&(t.v7_fetcherPersist||ni("v7_fetcherPersist","The persistence behavior of fetchers is changing in v7","https://reactrouter.com/v6/upgrading/future#v7_fetcherpersist"),t.v7_normalizeFormMethod||ni("v7_normalizeFormMethod","Casing of `formMethod` fields is being normalized to uppercase in v7","https://reactrouter.com/v6/upgrading/future#v7_normalizeformmethod"),t.v7_partialHydration||ni("v7_partialHydration","`RouterProvider` hydration behavior is changing in v7","https://reactrouter.com/v6/upgrading/future#v7_partialhydration"),t.v7_skipActionErrorRevalidation||ni("v7_skipActionErrorRevalidation","The revalidation behavior after 4xx/5xx `action` responses is changing in v7","https://reactrouter.com/v6/upgrading/future#v7_skipactionerrorrevalidation"))}function Uv(e){let{to:t,replace:n,state:i,relative:r}=e;Mi()||Q(!1);let{future:a,static:o}=d.useContext(Tn),{matches:s}=d.useContext(Rt),{pathname:l}=Or(),u=bo(),m=vo(t,yo(s,a.v7_relativeSplatPath),l,r==="path"),f=JSON.stringify(m);return d.useEffect(()=>u(JSON.parse(f),{replace:n,state:i,relative:r}),[u,f,r,n,i]),null}function zv(e){return Sv(e.context)}function $v(e){let{basename:t="/",children:n=null,location:i,navigationType:r=Ce.Pop,navigator:a,static:o=!1,future:s}=e;Mi()&&Q(!1);let l=t.replace(/^\/*/,"/"),u=d.useMemo(()=>({basename:l,navigator:a,static:o,future:Za({v7_relativeSplatPath:!1},s)}),[l,s,a,o]);typeof i=="string"&&(i=Qt(i));let{pathname:m="/",search:f="",hash:h="",state:v=null,key:_="default"}=i,k=d.useMemo(()=>{let I=Ni(m,l);return I==null?null:{location:{pathname:I,search:f,hash:h,state:v,key:_},navigationType:r}},[l,m,f,h,v,_,r]);return k==null?null:d.createElement(Tn.Provider,{value:u},d.createElement(ru.Provider,{children:n,value:k}))}new Promise(()=>{});function Bv(e){let t={hasErrorBoundary:e.ErrorBoundary!=null||e.errorElement!=null};return e.Component&&Object.assign(t,{element:d.createElement(e.Component),Component:void 0}),e.HydrateFallback&&Object.assign(t,{hydrateFallbackElement:d.createElement(e.HydrateFallback),HydrateFallback:void 0}),e.ErrorBoundary&&Object.assign(t,{errorElement:d.createElement(e.ErrorBoundary),ErrorBoundary:void 0}),t}/** * React Router DOM v6.28.0 * * Copyright (c) Remix Software Inc. * * This source code is licensed under the MIT license found in the * LICENSE.md file in the root directory of this source tree. * * @license MIT */function Lr(){return Lr=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0)&&(n[r]=e[r]);return n}function Hv(e){return!!(e.metaKey||e.altKey||e.ctrlKey||e.shiftKey)}function qv(e,t){return e.button===0&&(!t||t==="_self")&&!Hv(e)}const Wv=["onClick","relative","reloadDocument","replace","state","target","to","preventScrollReset","viewTransition"],Kv="6";try{window.__reactRouterVersion=Kv}catch{}function Qv(e,t){return tv({basename:void 0,future:Lr({},void 0,{v7_prependBasename:!0}),history:Ey({window:void 0}),hydrationData:Xv(),routes:e,mapRouteProperties:Bv,dataStrategy:void 0,patchRoutesOnNavigation:void 0,window:void 0}).initialize()}function Xv(){var e;let t=(e=window)==null?void 0:e.__staticRouterHydrationData;return t&&t.errors&&(t=Lr({},t,{errors:Yv(t.errors)})),t}function Yv(e){if(!e)return null;let t=Object.entries(e),n={};for(let[i,r]of t)if(r&&r.__type==="RouteErrorResponse")n[i]=new Ja(r.status,r.statusText,r.data,r.internal===!0);else if(r&&r.__type==="Error"){if(r.__subType){let a=window[r.__subType];if(typeof a=="function")try{let o=new a(r.message);o.stack="",n[i]=o}catch{}}if(n[i]==null){let a=new Error(r.message);a.stack="",n[i]=a}}else n[i]=r;return n}const Jv=d.createContext({isTransitioning:!1}),Zv=d.createContext(new Map),Gv="startTransition",Gc=fh[Gv],e0="flushSync",ed=Sy[e0];function t0(e){Gc?Gc(e):e()}function Ji(e){ed?ed(e):e()}class n0{constructor(){this.status="pending",this.promise=new Promise((t,n)=>{this.resolve=i=>{this.status==="pending"&&(this.status="resolved",t(i))},this.reject=i=>{this.status==="pending"&&(this.status="rejected",n(i))}})}}function i0(e){let{fallbackElement:t,router:n,future:i}=e,[r,a]=d.useState(n.state),[o,s]=d.useState(),[l,u]=d.useState({isTransitioning:!1}),[m,f]=d.useState(),[h,v]=d.useState(),[_,k]=d.useState(),I=d.useRef(new Map),{v7_startTransition:g}=i||{},p=d.useCallback(A=>{g?t0(A):A()},[g]),w=d.useCallback((A,U)=>{let{deletedFetchers:F,flushSync:re,viewTransitionOpts:me}=U;F.forEach(Ee=>I.current.delete(Ee)),A.fetchers.forEach((Ee,Xt)=>{Ee.data!==void 0&&I.current.set(Xt,Ee.data)});let Se=n.window==null||n.window.document==null||typeof n.window.document.startViewTransition!="function";if(!me||Se){re?Ji(()=>a(A)):p(()=>a(A));return}if(re){Ji(()=>{h&&(m&&m.resolve(),h.skipTransition()),u({isTransitioning:!0,flushSync:!0,currentLocation:me.currentLocation,nextLocation:me.nextLocation})});let Ee=n.window.document.startViewTransition(()=>{Ji(()=>a(A))});Ee.finished.finally(()=>{Ji(()=>{f(void 0),v(void 0),s(void 0),u({isTransitioning:!1})})}),Ji(()=>v(Ee));return}h?(m&&m.resolve(),h.skipTransition(),k({state:A,currentLocation:me.currentLocation,nextLocation:me.nextLocation})):(s(A),u({isTransitioning:!0,flushSync:!1,currentLocation:me.currentLocation,nextLocation:me.nextLocation}))},[n.window,h,m,I,p]);d.useLayoutEffect(()=>n.subscribe(w),[n,w]),d.useEffect(()=>{l.isTransitioning&&!l.flushSync&&f(new n0)},[l]),d.useEffect(()=>{if(m&&o&&n.window){let A=o,U=m.promise,F=n.window.document.startViewTransition(async()=>{p(()=>a(A)),await U});F.finished.finally(()=>{f(void 0),v(void 0),s(void 0),u({isTransitioning:!1})}),v(F)}},[p,o,m,n.window]),d.useEffect(()=>{m&&o&&r.location.key===o.location.key&&m.resolve()},[m,h,r.location,o]),d.useEffect(()=>{!l.isTransitioning&&_&&(s(_.state),u({isTransitioning:!0,flushSync:!1,currentLocation:_.currentLocation,nextLocation:_.nextLocation}),k(void 0))},[l.isTransitioning,_]),d.useEffect(()=>{},[]);let E=d.useMemo(()=>({createHref:n.createHref,encodeLocation:n.encodeLocation,go:A=>n.navigate(A),push:(A,U,F)=>n.navigate(A,{state:U,preventScrollReset:F==null?void 0:F.preventScrollReset}),replace:(A,U,F)=>n.navigate(A,{replace:!0,state:U,preventScrollReset:F==null?void 0:F.preventScrollReset})}),[n]),j=n.basename||"/",x=d.useMemo(()=>({router:n,navigator:E,static:!1,basename:j}),[n,E,j]),L=d.useMemo(()=>({v7_relativeSplatPath:n.future.v7_relativeSplatPath}),[n.future.v7_relativeSplatPath]);return d.useEffect(()=>Fv(i,n.future),[i,n.future]),d.createElement(d.Fragment,null,d.createElement(xo.Provider,{value:x},d.createElement(Dp.Provider,{value:r},d.createElement(Zv.Provider,{value:I.current},d.createElement(Jv.Provider,{value:l},d.createElement($v,{basename:j,location:r.location,navigationType:r.historyAction,navigator:E,future:L},r.initialized||n.future.v7_partialHydration?d.createElement(r0,{routes:n.routes,future:n.future,state:r}):t))))),null)}const r0=d.memo(a0);function a0(e){let{routes:t,future:n,state:i}=e;return Tv(t,void 0,i,n)}const o0=typeof window<"u"&&typeof window.document<"u"&&typeof window.document.createElement<"u",s0=/^(?:[a-z][a-z0-9+.-]*:|\/\/)/i,au=d.forwardRef(function(t,n){let{onClick:i,relative:r,reloadDocument:a,replace:o,state:s,target:l,to:u,preventScrollReset:m,viewTransition:f}=t,h=Vv(t,Wv),{basename:v}=d.useContext(Tn),_,k=!1;if(typeof u=="string"&&s0.test(u)&&(_=u,o0))try{let w=new URL(window.location.href),E=u.startsWith("//")?new URL(w.protocol+u):new URL(u),j=Ni(E.pathname,v);E.origin===w.origin&&j!=null?u=j+E.search+E.hash:k=!0}catch{}let I=bv(u,{relative:r}),g=l0(u,{replace:o,state:s,target:l,preventScrollReset:m,relative:r,viewTransition:f});function p(w){i&&i(w),w.defaultPrevented||g(w)}return d.createElement("a",Lr({},h,{href:_||I,onClick:k||a?i:p,ref:n,target:l}))});var td;(function(e){e.UseScrollRestoration="useScrollRestoration",e.UseSubmit="useSubmit",e.UseSubmitFetcher="useSubmitFetcher",e.UseFetcher="useFetcher",e.useViewTransitionState="useViewTransitionState"})(td||(td={}));var nd;(function(e){e.UseFetcher="useFetcher",e.UseFetchers="useFetchers",e.UseScrollRestoration="useScrollRestoration"})(nd||(nd={}));function l0(e,t){let{target:n,replace:i,state:r,preventScrollReset:a,relative:o,viewTransition:s}=t===void 0?{}:t,l=bo(),u=Or(),m=Op(e,{relative:o});return d.useCallback(f=>{if(qv(f,n)){f.preventDefault();let h=i!==void 0?i:Kn(u)===Kn(m);l(e,{replace:h,state:r,preventScrollReset:a,relative:o,viewTransition:s})}},[u,l,m,i,r,n,e,a,o,s])}var u0=Object.defineProperty,c0=(e,t,n)=>t in e?u0(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n,ca=(e,t,n)=>(c0(e,typeof t!="symbol"?t+"":t,n),n),ou="https://huggingface.co";async function fr(e,t){var n,i;const r=new d0(e.url,e.status,(n=e.headers.get("X-Request-Id"))!=null?n:void 0);r.message=`Api error with status ${r.statusCode}`;const a=[`URL: ${r.url}`,r.requestId?`Request ID: ${r.requestId}`:void 0].filter(Boolean).join(". ");if((i=e.headers.get("Content-Type"))!=null&&i.startsWith("application/json")){const o=await e.json();r.message=o.error||o.message||r.message,r.data=o}else r.data={message:await e.text()};throw r.message+=`. ${a}`,r}var d0=class extends Error{constructor(e,t,n,i){super(i),ca(this,"statusCode"),ca(this,"url"),ca(this,"requestId"),ca(this,"data"),this.statusCode=t,this.requestId=n,this.url=e}};function id(e){if(!e.startsWith("hf_"))throw new TypeError("Your access token must start with 'hf_'")}function f0(e){var t;if(e.accessToken)return id(e.accessToken),e.accessToken;if((t=e.credentials)!=null&&t.accessToken)return id(e.credentials.accessToken),e.credentials.accessToken}new Promise(e=>{});function p0(e){if(globalThis.Buffer)return globalThis.Buffer.from(e).toString("base64");{const t=[];return e.forEach(n=>{t.push(String.fromCharCode(n))}),globalThis.btoa(t.join(""))}}function m0(e,t){return Object.assign({},...t.map(n=>{if(e[n]!==void 0)return{[n]:e[n]}}))}function h0(e){const t=/<(https?:[/][/][^>]+)>;\s+rel="([^"]+)"/g;return Object.fromEntries([...e.matchAll(t)].map(([,n,i])=>[i,n]))}var g0=["pipeline_tag","private","gated","downloads","likes","lastModified"];async function*y0(e){var t,n,i,r,a,o,s,l,u,m;const f=e&&f0(e);let h=(t=e==null?void 0:e.limit)!=null?t:1/0;const v=new URLSearchParams([...Object.entries({limit:String(Math.min(h,500)),...(n=e==null?void 0:e.search)!=null&&n.owner?{author:e.search.owner}:void 0,...(i=e==null?void 0:e.search)!=null&&i.task?{pipeline_tag:e.search.task}:void 0,...(r=e==null?void 0:e.search)!=null&&r.query?{search:e.search.query}:void 0}),...(s=(o=(a=e==null?void 0:e.search)==null?void 0:a.tags)==null?void 0:o.map(k=>["filter",k]))!=null?s:[],...g0.map(k=>["expand",k]),...(u=(l=e==null?void 0:e.additionalFields)==null?void 0:l.map(k=>["expand",k]))!=null?u:[]]).toString();let _=`${(e==null?void 0:e.hubUrl)||ou}/api/models?${v}`;for(;_;){const k=await((m=e==null?void 0:e.fetch)!=null?m:fetch)(_,{headers:{accept:"application/json",...e!=null&&e.credentials?{Authorization:`Bearer ${f}`}:void 0}});if(!k.ok)throw await fr(k);const I=await k.json();for(const p of I)if(yield{...(e==null?void 0:e.additionalFields)&&m0(p,e.additionalFields),id:p._id,name:p.id,private:p.private,task:p.pipeline_tag,downloads:p.downloads,gated:p.gated,likes:p.likes,updatedAt:new Date(p.lastModified)},h--,h<=0)return;const g=k.headers.get("Link");_=g?h0(g).next:void 0}}async function rd(e){var t,n;if(typeof window>"u")throw new Error("oauthHandleRedirect is only available in the browser");const i=new URLSearchParams(window.location.search),[r,a]=[i.get("error"),i.get("error_description")];if(r)throw new Error(`${r}: ${a}`);const o=i.get("code"),s=localStorage.getItem("huggingface.co:oauth:nonce");if(!o)throw new Error("Missing oauth code from query parameters in redirected URL");if(!s)throw new Error("Missing oauth nonce from localStorage");const l=localStorage.getItem("huggingface.co:oauth:code_verifier");if(!l)throw new Error("Missing oauth code_verifier from localStorage");const u=i.get("state");if(!u)throw new Error("Missing oauth state from query parameters in redirected URL");let m;try{m=JSON.parse(u)}catch{throw new Error("Invalid oauth state in redirected URL, unable to parse JSON: "+u)}if(m.nonce!==s)throw new Error("Invalid oauth state in redirected URL");const f=ou,h=`${new URL(f).origin}/.well-known/openid-configuration`,v=await fetch(h,{headers:{Accept:"application/json"}});if(!v.ok)throw await fr(v);const _=await v.json(),k=await fetch(_.token_endpoint,{method:"POST",headers:{"Content-Type":"application/x-www-form-urlencoded"},body:new URLSearchParams({grant_type:"authorization_code",code:o,redirect_uri:m.redirectUri,code_verifier:l}).toString()});if(localStorage.removeItem("huggingface.co:oauth:code_verifier"),localStorage.removeItem("huggingface.co:oauth:nonce"),!k.ok)throw await fr(k);const I=await k.json(),g=new Date(Date.now()+I.expires_in*1e3),p=await fetch(_.userinfo_endpoint,{headers:{Authorization:`Bearer ${I.access_token}`}});if(!p.ok)throw await fr(p);const w=await p.json();return{accessToken:I.access_token,accessTokenExpiresAt:g,userInfo:{id:w.sub,name:w.name,fullname:w.preferred_username,email:w.email,emailVerified:w.email_verified,avatarUrl:w.picture,websiteUrl:w.website,isPro:w.isPro,orgs:(n=(t=w.orgs)==null?void 0:t.map(E=>({id:E.sub,name:E.name,fullname:E.name,isEnterprise:E.isEnterprise,canPay:E.canPay,avatarUrl:E.picture,roleInOrg:E.roleInOrg})))!=null?n:[]},state:m.state,scope:I.scope}}async function v0(e){if(typeof window>"u")throw new Error("oauthHandleRedirect is only available in the browser");const t=new URLSearchParams(window.location.search);return t.has("error")?rd():t.has("code")?localStorage.getItem("huggingface.co:oauth:nonce")?rd():(console.warn("Missing oauth nonce from localStorage. This can happen when the user refreshes the page after logging in, without changing the URL."),!1):!1}async function w0(e){var t,n;if(typeof window>"u")throw new Error("oauthLogin is only available in the browser");const i=(e==null?void 0:e.hubUrl)||ou,r=`${new URL(i).origin}/.well-known/openid-configuration`,a=await fetch(r,{headers:{Accept:"application/json"}});if(!a.ok)throw await fr(a);const o=await a.json(),s=globalThis.crypto.randomUUID(),l=globalThis.crypto.randomUUID()+globalThis.crypto.randomUUID();localStorage.setItem("huggingface.co:oauth:nonce",s),localStorage.setItem("huggingface.co:oauth:code_verifier",l);const u=(e==null?void 0:e.redirectUrl)||window.location.href,m=JSON.stringify({nonce:s,redirectUri:u,state:e==null?void 0:e.state}),f=(n=(t=window==null?void 0:window.huggingface)==null?void 0:t.variables)!=null?n:null,h=(e==null?void 0:e.clientId)||(f==null?void 0:f.OAUTH_CLIENT_ID);if(!h)throw f?new Error("Missing clientId, please add hf_oauth: true to the README.md's metadata in your static Space"):new Error("Missing clientId");const v=p0(new Uint8Array(await globalThis.crypto.subtle.digest("SHA-256",new TextEncoder().encode(l)))).replace(/[+]/g,"-").replace(/[/]/g,"_").replace(/=/g,"");return`${o.authorization_endpoint}?${new URLSearchParams({client_id:h,scope:(e==null?void 0:e.scopes)||(f==null?void 0:f.OAUTH_SCOPES)||"openid profile",response_type:"code",redirect_uri:u,state:m,code_challenge:v,code_challenge_method:"S256"}).toString()}`}const Fr=d.createContext({accessToken:void 0,setAccessToken:()=>{}}),x0=e=>{const[t,n]=d.useState(()=>localStorage.getItem("accessToken")??void 0);return c.jsx(Fr.Provider,{value:{accessToken:t,setAccessToken:n},children:e.children})},b0=e=>{const{accessToken:t}=d.useContext(Fr);return t?e.children:c.jsx(Uv,{to:"/sign-in"})},k0=()=>c.jsx("div",{className:"min-w-screen flex h-full min-h-screen w-full flex-col items-center overflow-auto bg-yellow-500",children:c.jsxs("div",{className:"flex w-2/3 flex-col items-center justify-center space-y-12 py-24 lg:w-1/3",children:[c.jsx("header",{className:"text-center text-6xl",children:c.jsx(au,{to:"/",children:"🤗"})}),c.jsx(zv,{})]})}),ko=d.createContext({loading:!1,setLoading:()=>{}}),_0=e=>{const[t,n]=d.useState(!1);return c.jsx(ko.Provider,{value:{loading:t,setLoading:n},children:e.children})},z=e=>c.jsx("button",{className:`border-4 border-yellow-200 ${e.variant==="secondary"?"":"bg-yellow-200"} w-full p-6 text-center ${e.disabled?"cursor-not-allowed opacity-50":""}`,disabled:e.disabled??!1,onClick:e.onClick,children:e.label??"Submit"}),Ie=e=>{const t=d.useRef(null);return d.useLayoutEffect(()=>{t.current&&(t.current.style.height="inherit",t.current.style.height=`${t.current.scrollHeight}px`)},[e.input]),c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Input"}),c.jsx("textarea",{className:`w-full resize-none bg-yellow-200 p-6 text-center ${e.disabled?"cursor-not-allowed opacity-50":""}`,disabled:e.disabled??!1,onChange:n=>{!e.disabled&&e.setInput&&(n.target.value?e.setInput(n.target.value):e.setInput(""))},ref:t,rows:1,style:{height:t.current?`${t.current.scrollHeight}px`:"inherit"},value:e.input??""})]})},S0=()=>{const{setAccessToken:e}=d.useContext(Fr),{loading:t}=d.useContext(ko),[n,i]=d.useState(),[r,a]=d.useState(),o=bo();return d.useEffect(()=>{(async()=>{const l=await w0({clientId:"34f370fc-30f4-49aa-9147-a3b9361f0a7f",scopes:"openid profile inference-api"});a(l)})()},[]),d.useEffect(()=>{(async()=>{const l=await v0();l&&(e(l.accessToken),localStorage.setItem("accessToken",l.accessToken),window.history.pushState("","",window.location.origin+window.location.pathname+window.location.hash),o("/"))})()},[e,o]),c.jsxs(d.Fragment,{children:[c.jsx(au,{className:`w-full border-4 border-yellow-200 p-6 text-center ${r?"":"cursor-not-allowed opacity-50"}`,to:r??"",children:"Sign In With 🤗"}),c.jsx("p",{className:"text-xl",children:"or"}),c.jsx(Ie,{disabled:t,input:n,label:"Access Token For 🤗",setInput:i}),c.jsx(z,{disabled:t||!n,label:"Clear",onClick:()=>{i(void 0)},variant:"secondary"}),c.jsx(z,{disabled:t||!n,onClick:()=>{n&&(e(n),localStorage.setItem("accessToken",n),i(void 0),o("/"))}})]})},E0=()=>{const{accessToken:e,setAccessToken:t}=d.useContext(Fr),{loading:n}=d.useContext(ko),i=d.useRef(null);return d.useLayoutEffect(()=>{i.current&&(i.current.style.height="inherit",i.current.style.height=`${i.current.scrollHeight}px`)},[e]),c.jsx(d.Fragment,{children:c.jsx(au,{className:`w-full border-4 border-yellow-200 p-6 text-center ${n?"cursor-not-allowed opacity-50":""}`,onClick:()=>{t(void 0),localStorage.removeItem("accessToken")},to:"/sign-in",children:"Sign Out"})})};var T0=Object.defineProperty,Ur=(e,t)=>{for(var n in t)T0(e,n,{get:t[n],enumerable:!0})},C0={"adapter-transformers":["question-answering","text-classification","token-classification"],allennlp:["question-answering"],asteroid:["audio-to-audio"],bertopic:["text-classification"],diffusers:["image-to-image","text-to-image"],doctr:["object-detection"],espnet:["text-to-speech","automatic-speech-recognition"],fairseq:["text-to-speech","audio-to-audio"],fastai:["image-classification"],fasttext:["feature-extraction","text-classification"],flair:["token-classification"],k2:["automatic-speech-recognition"],keras:["image-classification"],nemo:["automatic-speech-recognition"],open_clip:["zero-shot-classification","zero-shot-image-classification"],paddlenlp:["fill-mask","summarization","zero-shot-classification"],peft:["text-generation"],"pyannote-audio":["automatic-speech-recognition"],"sentence-transformers":["feature-extraction","sentence-similarity"],setfit:["text-classification"],sklearn:["tabular-classification","tabular-regression","text-classification"],spacy:["token-classification","text-classification","sentence-similarity"],"span-marker":["token-classification"],speechbrain:["audio-classification","audio-to-audio","automatic-speech-recognition","text-to-speech","text2text-generation"],stanza:["token-classification"],timm:["image-classification"],transformers:["audio-classification","automatic-speech-recognition","depth-estimation","document-question-answering","feature-extraction","fill-mask","image-classification","image-segmentation","image-to-image","image-to-text","object-detection","question-answering","summarization","table-question-answering","text2text-generation","text-classification","text-generation","text-to-audio","text-to-speech","token-classification","translation","video-classification","visual-question-answering","zero-shot-classification","zero-shot-image-classification","zero-shot-object-detection"],mindspore:["image-classification"]},su={"text-classification":{name:"Text Classification",subtasks:[{type:"acceptability-classification",name:"Acceptability Classification"},{type:"entity-linking-classification",name:"Entity Linking Classification"},{type:"fact-checking",name:"Fact Checking"},{type:"intent-classification",name:"Intent Classification"},{type:"language-identification",name:"Language Identification"},{type:"multi-class-classification",name:"Multi Class Classification"},{type:"multi-label-classification",name:"Multi Label Classification"},{type:"multi-input-text-classification",name:"Multi-input Text Classification"},{type:"natural-language-inference",name:"Natural Language Inference"},{type:"semantic-similarity-classification",name:"Semantic Similarity Classification"},{type:"sentiment-classification",name:"Sentiment Classification"},{type:"topic-classification",name:"Topic Classification"},{type:"semantic-similarity-scoring",name:"Semantic Similarity Scoring"},{type:"sentiment-scoring",name:"Sentiment Scoring"},{type:"sentiment-analysis",name:"Sentiment Analysis"},{type:"hate-speech-detection",name:"Hate Speech Detection"},{type:"text-scoring",name:"Text Scoring"}],modality:"nlp",color:"orange"},"token-classification":{name:"Token Classification",subtasks:[{type:"named-entity-recognition",name:"Named Entity Recognition"},{type:"part-of-speech",name:"Part of Speech"},{type:"parsing",name:"Parsing"},{type:"lemmatization",name:"Lemmatization"},{type:"word-sense-disambiguation",name:"Word Sense Disambiguation"},{type:"coreference-resolution",name:"Coreference-resolution"}],modality:"nlp",color:"blue"},"table-question-answering":{name:"Table Question Answering",modality:"nlp",color:"green"},"question-answering":{name:"Question Answering",subtasks:[{type:"extractive-qa",name:"Extractive QA"},{type:"open-domain-qa",name:"Open Domain QA"},{type:"closed-domain-qa",name:"Closed Domain QA"}],modality:"nlp",color:"blue"},"zero-shot-classification":{name:"Zero-Shot Classification",modality:"nlp",color:"yellow"},translation:{name:"Translation",modality:"nlp",color:"green"},summarization:{name:"Summarization",subtasks:[{type:"news-articles-summarization",name:"News Articles Summarization"},{type:"news-articles-headline-generation",name:"News Articles Headline Generation"}],modality:"nlp",color:"indigo"},"feature-extraction":{name:"Feature Extraction",modality:"nlp",color:"red"},"text-generation":{name:"Text Generation",subtasks:[{type:"dialogue-modeling",name:"Dialogue Modeling"},{type:"dialogue-generation",name:"Dialogue Generation"},{type:"conversational",name:"Conversational"},{type:"language-modeling",name:"Language Modeling"}],modality:"nlp",color:"indigo"},"text2text-generation":{name:"Text2Text Generation",subtasks:[{type:"text-simplification",name:"Text simplification"},{type:"explanation-generation",name:"Explanation Generation"},{type:"abstractive-qa",name:"Abstractive QA"},{type:"open-domain-abstractive-qa",name:"Open Domain Abstractive QA"},{type:"closed-domain-qa",name:"Closed Domain QA"},{type:"open-book-qa",name:"Open Book QA"},{type:"closed-book-qa",name:"Closed Book QA"}],modality:"nlp",color:"indigo"},"fill-mask":{name:"Fill-Mask",subtasks:[{type:"slot-filling",name:"Slot Filling"},{type:"masked-language-modeling",name:"Masked Language Modeling"}],modality:"nlp",color:"red"},"sentence-similarity":{name:"Sentence Similarity",modality:"nlp",color:"yellow"},"text-to-speech":{name:"Text-to-Speech",modality:"audio",color:"yellow"},"text-to-audio":{name:"Text-to-Audio",modality:"audio",color:"yellow"},"automatic-speech-recognition":{name:"Automatic Speech Recognition",modality:"audio",color:"yellow"},"audio-to-audio":{name:"Audio-to-Audio",modality:"audio",color:"blue"},"audio-classification":{name:"Audio Classification",subtasks:[{type:"keyword-spotting",name:"Keyword Spotting"},{type:"speaker-identification",name:"Speaker Identification"},{type:"audio-intent-classification",name:"Audio Intent Classification"},{type:"audio-emotion-recognition",name:"Audio Emotion Recognition"},{type:"audio-language-identification",name:"Audio Language Identification"}],modality:"audio",color:"green"},"voice-activity-detection":{name:"Voice Activity Detection",modality:"audio",color:"red"},"depth-estimation":{name:"Depth Estimation",modality:"cv",color:"yellow"},"image-classification":{name:"Image Classification",subtasks:[{type:"multi-label-image-classification",name:"Multi Label Image Classification"},{type:"multi-class-image-classification",name:"Multi Class Image Classification"}],modality:"cv",color:"blue"},"object-detection":{name:"Object Detection",subtasks:[{type:"face-detection",name:"Face Detection"},{type:"vehicle-detection",name:"Vehicle Detection"}],modality:"cv",color:"yellow"},"image-segmentation":{name:"Image Segmentation",subtasks:[{type:"instance-segmentation",name:"Instance Segmentation"},{type:"semantic-segmentation",name:"Semantic Segmentation"},{type:"panoptic-segmentation",name:"Panoptic Segmentation"}],modality:"cv",color:"green"},"text-to-image":{name:"Text-to-Image",modality:"cv",color:"yellow"},"image-to-text":{name:"Image-to-Text",subtasks:[{type:"image-captioning",name:"Image Captioning"}],modality:"cv",color:"red"},"image-to-image":{name:"Image-to-Image",subtasks:[{type:"image-inpainting",name:"Image Inpainting"},{type:"image-colorization",name:"Image Colorization"},{type:"super-resolution",name:"Super Resolution"}],modality:"cv",color:"indigo"},"image-to-video":{name:"Image-to-Video",modality:"cv",color:"indigo"},"unconditional-image-generation":{name:"Unconditional Image Generation",modality:"cv",color:"green"},"video-classification":{name:"Video Classification",modality:"cv",color:"blue"},"reinforcement-learning":{name:"Reinforcement Learning",modality:"rl",color:"red"},robotics:{name:"Robotics",modality:"rl",subtasks:[{type:"grasping",name:"Grasping"},{type:"task-planning",name:"Task Planning"}],color:"blue"},"tabular-classification":{name:"Tabular Classification",modality:"tabular",subtasks:[{type:"tabular-multi-class-classification",name:"Tabular Multi Class Classification"},{type:"tabular-multi-label-classification",name:"Tabular Multi Label Classification"}],color:"blue"},"tabular-regression":{name:"Tabular Regression",modality:"tabular",subtasks:[{type:"tabular-single-column-regression",name:"Tabular Single Column Regression"}],color:"blue"},"tabular-to-text":{name:"Tabular to Text",modality:"tabular",subtasks:[{type:"rdf-to-text",name:"RDF to text"}],color:"blue",hideInModels:!0},"table-to-text":{name:"Table to Text",modality:"nlp",color:"blue",hideInModels:!0},"multiple-choice":{name:"Multiple Choice",subtasks:[{type:"multiple-choice-qa",name:"Multiple Choice QA"},{type:"multiple-choice-coreference-resolution",name:"Multiple Choice Coreference Resolution"}],modality:"nlp",color:"blue",hideInModels:!0},"text-retrieval":{name:"Text Retrieval",subtasks:[{type:"document-retrieval",name:"Document Retrieval"},{type:"utterance-retrieval",name:"Utterance Retrieval"},{type:"entity-linking-retrieval",name:"Entity Linking Retrieval"},{type:"fact-checking-retrieval",name:"Fact Checking Retrieval"}],modality:"nlp",color:"indigo",hideInModels:!0},"time-series-forecasting":{name:"Time Series Forecasting",modality:"tabular",subtasks:[{type:"univariate-time-series-forecasting",name:"Univariate Time Series Forecasting"},{type:"multivariate-time-series-forecasting",name:"Multivariate Time Series Forecasting"}],color:"blue"},"text-to-video":{name:"Text-to-Video",modality:"cv",color:"green"},"image-text-to-text":{name:"Image-Text-to-Text",modality:"multimodal",color:"red",hideInDatasets:!0},"visual-question-answering":{name:"Visual Question Answering",subtasks:[{type:"visual-question-answering",name:"Visual Question Answering"}],modality:"multimodal",color:"red"},"document-question-answering":{name:"Document Question Answering",subtasks:[{type:"document-question-answering",name:"Document Question Answering"}],modality:"multimodal",color:"blue",hideInDatasets:!0},"zero-shot-image-classification":{name:"Zero-Shot Image Classification",modality:"cv",color:"yellow"},"graph-ml":{name:"Graph Machine Learning",modality:"other",color:"green"},"mask-generation":{name:"Mask Generation",modality:"cv",color:"indigo"},"zero-shot-object-detection":{name:"Zero-Shot Object Detection",modality:"cv",color:"yellow"},"text-to-3d":{name:"Text-to-3D",modality:"cv",color:"yellow"},"image-to-3d":{name:"Image-to-3D",modality:"cv",color:"green"},"image-feature-extraction":{name:"Image Feature Extraction",modality:"cv",color:"indigo"},"video-text-to-text":{name:"Video-Text-to-Text",modality:"multimodal",color:"blue",hideInDatasets:!1},"keypoint-detection":{name:"Keypoint Detection",subtasks:[{type:"pose-estimation",name:"Pose Estimation"}],modality:"cv",color:"red",hideInDatasets:!0},"any-to-any":{name:"Any-to-Any",modality:"multimodal",color:"yellow",hideInDatasets:!0},other:{name:"Other",modality:"other",color:"blue",hideInModels:!0,hideInDatasets:!0}},A0=Object.keys(su);Object.values(su).flatMap(e=>"subtasks"in e?e.subtasks:[]).map(e=>e.type);new Set(A0);var I0={datasets:[{description:"A benchmark of 10 different audio tasks.",id:"s3prl/superb"},{description:"A dataset of YouTube clips and their sound categories.",id:"agkphysics/AudioSet"}],demo:{inputs:[{filename:"audio.wav",type:"audio"}],outputs:[{data:[{label:"Up",score:.2},{label:"Down",score:.8}],type:"chart"}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"",id:"f1"}],models:[{description:"An easy-to-use model for command recognition.",id:"speechbrain/google_speech_command_xvector"},{description:"An emotion recognition model.",id:"ehcalabres/wav2vec2-lg-xlsr-en-speech-emotion-recognition"},{description:"A language identification model.",id:"facebook/mms-lid-126"}],spaces:[{description:"An application that can classify music into different genre.",id:"kurianbenoy/audioclassification"}],summary:"Audio classification is the task of assigning a label or class to a given audio. It can be used for recognizing which command a user is giving or the emotion of a statement, as well as identifying a speaker.",widgetModels:["MIT/ast-finetuned-audioset-10-10-0.4593"],youtubeId:"KWwzcmG98Ds"},j0=I0,L0={datasets:[{description:"512-element X-vector embeddings of speakers from CMU ARCTIC dataset.",id:"Matthijs/cmu-arctic-xvectors"}],demo:{inputs:[{filename:"input.wav",type:"audio"}],outputs:[{filename:"label-0.wav",type:"audio"},{filename:"label-1.wav",type:"audio"}]},metrics:[{description:"The Signal-to-Noise ratio is the relationship between the target signal level and the background noise level. It is calculated as the logarithm of the target signal divided by the background noise, in decibels.",id:"snri"},{description:"The Signal-to-Distortion ratio is the relationship between the target signal and the sum of noise, interference, and artifact errors",id:"sdri"}],models:[{description:"A solid model of audio source separation.",id:"speechbrain/sepformer-wham"},{description:"A speech enhancement model.",id:"ResembleAI/resemble-enhance"},{description:"A model that can change the voice in a speech recording.",id:"microsoft/speecht5_vc"}],spaces:[{description:"An application for speech separation.",id:"younver/speechbrain-speech-separation"},{description:"An application for audio style transfer.",id:"nakas/audio-diffusion_style_transfer"}],summary:"Audio-to-Audio is a family of tasks in which the input is an audio and the output is one or multiple generated audios. Some example tasks are speech enhancement and source separation.",widgetModels:["speechbrain/sepformer-wham"],youtubeId:"iohj7nCCYoM"},P0=L0,R0={datasets:[{description:"31,175 hours of multilingual audio-text dataset in 108 languages.",id:"mozilla-foundation/common_voice_17_0"},{description:"A dataset with 44.6k hours of English speaker data and 6k hours of other language speakers.",id:"parler-tts/mls_eng"},{description:"A multi-lingual audio dataset with 370K hours of audio.",id:"espnet/yodas"}],demo:{inputs:[{filename:"input.flac",type:"audio"}],outputs:[{label:"Transcript",content:"Going along slushy country roads and speaking to damp audiences in...",type:"text"}]},metrics:[{description:"",id:"wer"},{description:"",id:"cer"}],models:[{description:"A powerful ASR model by OpenAI.",id:"openai/whisper-large-v3"},{description:"A good generic speech model by MetaAI for fine-tuning.",id:"facebook/w2v-bert-2.0"},{description:"An end-to-end model that performs ASR and Speech Translation by MetaAI.",id:"facebook/seamless-m4t-v2-large"},{description:"Powerful speaker diarization model.",id:"pyannote/speaker-diarization-3.1"}],spaces:[{description:"A powerful general-purpose speech recognition application.",id:"hf-audio/whisper-large-v3"},{description:"Fastest speech recognition application.",id:"sanchit-gandhi/whisper-jax"},{description:"A high quality speech and text translation model by Meta.",id:"facebook/seamless_m4t"}],summary:"Automatic Speech Recognition (ASR), also known as Speech to Text (STT), is the task of transcribing a given audio to text. It has many applications, such as voice user interfaces.",widgetModels:["openai/whisper-large-v3"],youtubeId:"TksaY_FDgnk"},D0=R0,N0={datasets:[{description:"Largest document understanding dataset.",id:"HuggingFaceM4/Docmatix"},{description:"Dataset from the 2020 DocVQA challenge. The documents are taken from the UCSF Industry Documents Library.",id:"eliolio/docvqa"}],demo:{inputs:[{label:"Question",content:"What is the idea behind the consumer relations efficiency team?",type:"text"},{filename:"document-question-answering-input.png",type:"img"}],outputs:[{label:"Answer",content:"Balance cost efficiency with quality customer service",type:"text"}]},metrics:[{description:"The evaluation metric for the DocVQA challenge is the Average Normalized Levenshtein Similarity (ANLS). This metric is flexible to character regognition errors and compares the predicted answer with the ground truth answer.",id:"anls"},{description:"Exact Match is a metric based on the strict character match of the predicted answer and the right answer. For answers predicted correctly, the Exact Match will be 1. Even if only one character is different, Exact Match will be 0",id:"exact-match"}],models:[{description:"A robust document question answering model.",id:"impira/layoutlm-document-qa"},{description:"A document question answering model specialized in invoices.",id:"impira/layoutlm-invoices"},{description:"A special model for OCR-free document question answering.",id:"microsoft/udop-large"},{description:"A powerful model for document question answering.",id:"google/pix2struct-docvqa-large"}],spaces:[{description:"A robust document question answering application.",id:"impira/docquery"},{description:"An application that can answer questions from invoices.",id:"impira/invoices"},{description:"An application to compare different document question answering models.",id:"merve/compare_docvqa_models"}],summary:"Document Question Answering (also known as Document Visual Question Answering) is the task of answering questions on document images. Document question answering models take a (document, question) pair as input and return an answer in natural language. Models usually rely on multi-modal features, combining text, position of words (bounding-boxes) and image.",widgetModels:["impira/layoutlm-invoices"],youtubeId:""},M0=N0,O0={datasets:[{description:"Wikipedia dataset containing cleaned articles of all languages. Can be used to train `feature-extraction` models.",id:"wikipedia"}],demo:{inputs:[{label:"Input",content:"India, officially the Republic of India, is a country in South Asia.",type:"text"}],outputs:[{table:[["Dimension 1","Dimension 2","Dimension 3"],["2.583383083343506","2.757075071334839","0.9023529887199402"],["8.29393482208252","1.1071064472198486","2.03399395942688"],["-0.7754912972450256","-1.647324562072754","-0.6113331913948059"],["0.07087723910808563","1.5942802429199219","1.4610432386398315"]],type:"tabular"}]},metrics:[],models:[{description:"A powerful feature extraction model for natural language processing tasks.",id:"thenlper/gte-large"},{description:"A strong feature extraction model for retrieval.",id:"Alibaba-NLP/gte-Qwen1.5-7B-instruct"}],spaces:[{description:"A leaderboard to rank text feature extraction models based on a benchmark.",id:"mteb/leaderboard"},{description:"A leaderboard to rank best feature extraction models based on human feedback.",id:"mteb/arena"}],summary:"Feature extraction is the task of extracting features learnt in a model.",widgetModels:["facebook/bart-base"]},F0=O0,U0={datasets:[{description:"A common dataset that is used to train models for many languages.",id:"wikipedia"},{description:"A large English dataset with text crawled from the web.",id:"c4"}],demo:{inputs:[{label:"Input",content:"The barked at me",type:"text"}],outputs:[{type:"chart",data:[{label:"wolf",score:.487},{label:"dog",score:.061},{label:"cat",score:.058},{label:"fox",score:.047},{label:"squirrel",score:.025}]}]},metrics:[{description:"Cross Entropy is a metric that calculates the difference between two probability distributions. Each probability distribution is the distribution of predicted words",id:"cross_entropy"},{description:"Perplexity is the exponential of the cross-entropy loss. It evaluates the probabilities assigned to the next word by the model. Lower perplexity indicates better performance",id:"perplexity"}],models:[{description:"The famous BERT model.",id:"google-bert/bert-base-uncased"},{description:"A multilingual model trained on 100 languages.",id:"FacebookAI/xlm-roberta-base"}],spaces:[],summary:"Masked language modeling is the task of masking some of the words in a sentence and predicting which words should replace those masks. These models are useful when we want to get a statistical understanding of the language in which the model is trained in.",widgetModels:["distilroberta-base"],youtubeId:"mqElG5QJWUg"},z0=U0,$0={datasets:[{description:"Benchmark dataset used for image classification with images that belong to 100 classes.",id:"cifar100"},{description:"Dataset consisting of images of garments.",id:"fashion_mnist"}],demo:{inputs:[{filename:"image-classification-input.jpeg",type:"img"}],outputs:[{type:"chart",data:[{label:"Egyptian cat",score:.514},{label:"Tabby cat",score:.193},{label:"Tiger cat",score:.068}]}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"",id:"f1"}],models:[{description:"A strong image classification model.",id:"google/vit-base-patch16-224"},{description:"A robust image classification model.",id:"facebook/deit-base-distilled-patch16-224"},{description:"A strong image classification model.",id:"facebook/convnext-large-224"}],spaces:[{description:"An application that classifies what a given image is about.",id:"nielsr/perceiver-image-classification"}],summary:"Image classification is the task of assigning a label or class to an entire image. Images are expected to have only one class for each image. Image classification models take an image as input and return a prediction about which class the image belongs to.",widgetModels:["google/vit-base-patch16-224"],youtubeId:"tjAIM7BOYhw"},B0=$0,V0={datasets:[{description:"ImageNet-1K is a image classification dataset in which images are used to train image-feature-extraction models.",id:"imagenet-1k"}],demo:{inputs:[{filename:"mask-generation-input.png",type:"img"}],outputs:[{table:[["Dimension 1","Dimension 2","Dimension 3"],["0.21236686408519745","1.0919708013534546","0.8512550592422485"],["0.809657871723175","-0.18544459342956543","-0.7851548194885254"],["1.3103108406066895","-0.2479034662246704","-0.9107287526130676"],["1.8536205291748047","-0.36419737339019775","0.09717650711536407"]],type:"tabular"}]},metrics:[],models:[{description:"A powerful image feature extraction model.",id:"timm/vit_large_patch14_dinov2.lvd142m"},{description:"A strong image feature extraction model.",id:"nvidia/MambaVision-T-1K"},{description:"A robust image feature extraction model.",id:"facebook/dino-vitb16"},{description:"Strong image feature extraction model made for information retrieval from documents.",id:"vidore/colpali"},{description:"Strong image feature extraction model that can be used on images and documents.",id:"OpenGVLab/InternViT-6B-448px-V1-2"}],spaces:[],summary:"Image feature extraction is the task of extracting features learnt in a computer vision model.",widgetModels:[]},H0=V0,q0={datasets:[{description:"Synthetic dataset, for image relighting",id:"VIDIT"},{description:"Multiple images of celebrities, used for facial expression translation",id:"huggan/CelebA-faces"}],demo:{inputs:[{filename:"image-to-image-input.jpeg",type:"img"}],outputs:[{filename:"image-to-image-output.png",type:"img"}]},isPlaceholder:!1,metrics:[{description:"Peak Signal to Noise Ratio (PSNR) is an approximation of the human perception, considering the ratio of the absolute intensity with respect to the variations. Measured in dB, a high value indicates a high fidelity.",id:"PSNR"},{description:"Structural Similarity Index (SSIM) is a perceptual metric which compares the luminance, contrast and structure of two images. The values of SSIM range between -1 and 1, and higher values indicate closer resemblance to the original image.",id:"SSIM"},{description:"Inception Score (IS) is an analysis of the labels predicted by an image classification model when presented with a sample of the generated images.",id:"IS"}],models:[{description:"An image-to-image model to improve image resolution.",id:"fal/AuraSR-v2"},{description:"A model that increases the resolution of an image.",id:"keras-io/super-resolution"},{description:"A model that creates a set of variations of the input image in the style of DALL-E using Stable Diffusion.",id:"lambdalabs/sd-image-variations-diffusers"},{description:"A model that generates images based on segments in the input image and the text prompt.",id:"mfidabel/controlnet-segment-anything"},{description:"A model that takes an image and an instruction to edit the image.",id:"timbrooks/instruct-pix2pix"}],spaces:[{description:"Image enhancer application for low light.",id:"keras-io/low-light-image-enhancement"},{description:"Style transfer application.",id:"keras-io/neural-style-transfer"},{description:"An application that generates images based on segment control.",id:"mfidabel/controlnet-segment-anything"},{description:"Image generation application that takes image control and text prompt.",id:"hysts/ControlNet"},{description:"Colorize any image using this app.",id:"ioclab/brightness-controlnet"},{description:"Edit images with instructions.",id:"timbrooks/instruct-pix2pix"}],summary:"Image-to-image is the task of transforming an input image through a variety of possible manipulations and enhancements, such as super-resolution, image inpainting, colorization, and more.",widgetModels:["stabilityai/stable-diffusion-2-inpainting"],youtubeId:""},W0=q0,K0={datasets:[{description:"Dataset from 12M image-text of Reddit",id:"red_caps"},{description:"Dataset from 3.3M images of Google",id:"datasets/conceptual_captions"}],demo:{inputs:[{filename:"savanna.jpg",type:"img"}],outputs:[{label:"Detailed description",content:"a herd of giraffes and zebras grazing in a field",type:"text"}]},metrics:[],models:[{description:"A robust image captioning model.",id:"Salesforce/blip2-opt-2.7b"},{description:"A powerful and accurate image-to-text model that can also localize concepts in images.",id:"microsoft/kosmos-2-patch14-224"},{description:"A strong optical character recognition model.",id:"facebook/nougat-base"},{description:"A powerful model that lets you have a conversation with the image.",id:"llava-hf/llava-1.5-7b-hf"}],spaces:[{description:"An application that compares various image captioning models.",id:"nielsr/comparing-captioning-models"},{description:"A robust image captioning application.",id:"flax-community/image-captioning"},{description:"An application that transcribes handwritings into text.",id:"nielsr/TrOCR-handwritten"},{description:"An application that can caption images and answer questions about a given image.",id:"Salesforce/BLIP"},{description:"An application that can caption images and answer questions with a conversational agent.",id:"Salesforce/BLIP2"},{description:"An image captioning application that demonstrates the effect of noise on captions.",id:"johko/capdec-image-captioning"}],summary:"Image to text models output a text from a given image. Image captioning or optical character recognition can be considered as the most common applications of image to text.",widgetModels:["Salesforce/blip-image-captioning-large"],youtubeId:""},Q0=K0,X0={datasets:[{description:"Instructions composed of image and text.",id:"liuhaotian/LLaVA-Instruct-150K"},{description:"Conversation turns where questions involve image and text.",id:"liuhaotian/LLaVA-Pretrain"},{description:"A collection of datasets made for model fine-tuning.",id:"HuggingFaceM4/the_cauldron"},{description:"Screenshots of websites with their HTML/CSS codes.",id:"HuggingFaceM4/WebSight"}],demo:{inputs:[{filename:"image-text-to-text-input.png",type:"img"},{label:"Text Prompt",content:"Describe the position of the bee in detail.",type:"text"}],outputs:[{label:"Answer",content:"The bee is sitting on a pink flower, surrounded by other flowers. The bee is positioned in the center of the flower, with its head and front legs sticking out.",type:"text"}]},metrics:[],models:[{description:"Powerful vision language model with great visual understanding and reasoning capabilities.",id:"meta-llama/Llama-3.2-11B-Vision-Instruct"},{description:"Cutting-edge vision language models.",id:"allenai/Molmo-7B-D-0924"},{description:"Small yet powerful model.",id:"vikhyatk/moondream2"},{description:"Strong image-text-to-text model.",id:"Qwen/Qwen2-VL-7B-Instruct"},{description:"Strong image-text-to-text model.",id:"mistralai/Pixtral-12B-2409"},{description:"Strong image-text-to-text model focused on documents.",id:"stepfun-ai/GOT-OCR2_0"}],spaces:[{description:"Leaderboard to evaluate vision language models.",id:"opencompass/open_vlm_leaderboard"},{description:"Vision language models arena, where models are ranked by votes of users.",id:"WildVision/vision-arena"},{description:"Powerful vision-language model assistant.",id:"akhaliq/Molmo-7B-D-0924"},{description:"An image-text-to-text application focused on documents.",id:"stepfun-ai/GOT_official_online_demo"},{description:"An application to compare outputs of different vision language models.",id:"merve/compare_VLMs"},{description:"An application for chatting with an image-text-to-text model.",id:"GanymedeNil/Qwen2-VL-7B"}],summary:"Image-text-to-text models take in an image and text prompt and output text. These models are also called vision-language models, or VLMs. The difference from image-to-text models is that these models take an additional text input, not restricting the model to certain use cases like image captioning, and may also be trained to accept a conversation as input.",widgetModels:["meta-llama/Llama-3.2-11B-Vision-Instruct"],youtubeId:"IoGaGfU1CIg"},Y0=X0,J0={datasets:[{description:"Scene segmentation dataset.",id:"scene_parse_150"}],demo:{inputs:[{filename:"image-segmentation-input.jpeg",type:"img"}],outputs:[{filename:"image-segmentation-output.png",type:"img"}]},metrics:[{description:"Average Precision (AP) is the Area Under the PR Curve (AUC-PR). It is calculated for each semantic class separately",id:"Average Precision"},{description:"Mean Average Precision (mAP) is the overall average of the AP values",id:"Mean Average Precision"},{description:"Intersection over Union (IoU) is the overlap of segmentation masks. Mean IoU is the average of the IoU of all semantic classes",id:"Mean Intersection over Union"},{description:"APα is the Average Precision at the IoU threshold of a α value, for example, AP50 and AP75",id:"APα"}],models:[{description:"Solid semantic segmentation model trained on ADE20k.",id:"openmmlab/upernet-convnext-small"},{description:"Background removal model.",id:"briaai/RMBG-1.4"},{description:"A multipurpose image segmentation model for high resolution images.",id:"ZhengPeng7/BiRefNet"},{description:"Powerful human-centric image segmentation model.",id:"facebook/sapiens-seg-1b"},{description:"Panoptic segmentation model trained on the COCO (common objects) dataset.",id:"facebook/mask2former-swin-large-coco-panoptic"}],spaces:[{description:"A semantic segmentation application that can predict unseen instances out of the box.",id:"facebook/ov-seg"},{description:"One of the strongest segmentation applications.",id:"jbrinkma/segment-anything"},{description:"A human-centric segmentation model.",id:"facebook/sapiens-pose"},{description:"An instance segmentation application to predict neuronal cell types from microscopy images.",id:"rashmi/sartorius-cell-instance-segmentation"},{description:"An application that segments videos.",id:"ArtGAN/Segment-Anything-Video"},{description:"An panoptic segmentation application built for outdoor environments.",id:"segments/panoptic-segment-anything"}],summary:"Image Segmentation divides an image into segments where each pixel in the image is mapped to an object. This task has multiple variants such as instance segmentation, panoptic segmentation and semantic segmentation.",widgetModels:["nvidia/segformer-b0-finetuned-ade-512-512"],youtubeId:"dKE8SIt9C-w"},Z0=J0,G0={datasets:[],demo:{inputs:[{filename:"mask-generation-input.png",type:"img"}],outputs:[{filename:"mask-generation-output.png",type:"img"}]},metrics:[],models:[{description:"Small yet powerful mask generation model.",id:"Zigeng/SlimSAM-uniform-50"},{description:"Very strong mask generation model.",id:"facebook/sam2-hiera-large"}],spaces:[{description:"An application that combines a mask generation model with a zero-shot object detection model for text-guided image segmentation.",id:"merve/OWLSAM2"},{description:"An application that compares the performance of a large and a small mask generation model.",id:"merve/slimsam"},{description:"An application based on an improved mask generation model.",id:"SkalskiP/segment-anything-model-2"},{description:"An application to remove objects from videos using mask generation models.",id:"SkalskiP/SAM_and_ProPainter"}],summary:"Mask generation is the task of generating masks that identify a specific object or region of interest in a given image. Masks are often used in segmentation tasks, where they provide a precise way to isolate the object of interest for further processing or analysis.",widgetModels:[],youtubeId:""},ew=G0,tw={datasets:[{description:"Widely used benchmark dataset for multiple vision tasks.",id:"merve/coco2017"},{description:"Multi-task computer vision benchmark.",id:"merve/pascal-voc"}],demo:{inputs:[{filename:"object-detection-input.jpg",type:"img"}],outputs:[{filename:"object-detection-output.jpg",type:"img"}]},metrics:[{description:"The Average Precision (AP) metric is the Area Under the PR Curve (AUC-PR). It is calculated for each class separately",id:"Average Precision"},{description:"The Mean Average Precision (mAP) metric is the overall average of the AP values",id:"Mean Average Precision"},{description:"The APα metric is the Average Precision at the IoU threshold of a α value, for example, AP50 and AP75",id:"APα"}],models:[{description:"Solid object detection model pre-trained on the COCO 2017 dataset.",id:"facebook/detr-resnet-50"},{description:"Real-time and accurate object detection model.",id:"jameslahm/yolov10x"},{description:"Fast and accurate object detection model trained on COCO and Object365 datasets.",id:"PekingU/rtdetr_r18vd_coco_o365"}],spaces:[{description:"Leaderboard to compare various object detection models across several metrics.",id:"hf-vision/object_detection_leaderboard"},{description:"An application that contains various object detection models to try from.",id:"Gradio-Blocks/Object-Detection-With-DETR-and-YOLOS"},{description:"An application that shows multiple cutting edge techniques for object detection and tracking.",id:"kadirnar/torchyolo"},{description:"An object tracking, segmentation and inpainting application.",id:"VIPLab/Track-Anything"},{description:"Very fast object tracking application based on object detection.",id:"merve/RT-DETR-tracking-coco"}],summary:"Object Detection models allow users to identify objects of certain defined classes. Object detection models receive an image as input and output the images with bounding boxes and labels on detected objects.",widgetModels:["facebook/detr-resnet-50"],youtubeId:"WdAeKSOpxhw"},nw=tw,iw={datasets:[{description:"NYU Depth V2 Dataset: Video dataset containing both RGB and depth sensor data.",id:"sayakpaul/nyu_depth_v2"},{description:"Monocular depth estimation benchmark based without noise and errors.",id:"depth-anything/DA-2K"}],demo:{inputs:[{filename:"depth-estimation-input.jpg",type:"img"}],outputs:[{filename:"depth-estimation-output.png",type:"img"}]},metrics:[],models:[{description:"Cutting-edge depth estimation model.",id:"depth-anything/Depth-Anything-V2-Large"},{description:"A strong monocular depth estimation model.",id:"jingheya/lotus-depth-g-v1-0"},{description:"A depth estimation model that predicts depth in videos.",id:"tencent/DepthCrafter"},{description:"A robust depth estimation model.",id:"apple/DepthPro"}],spaces:[{description:"An application that predicts the depth of an image and then reconstruct the 3D model as voxels.",id:"radames/dpt-depth-estimation-3d-voxels"},{description:"An application for bleeding-edge depth estimation.",id:"akhaliq/depth-pro"},{description:"An application on cutting-edge depth estimation in videos.",id:"tencent/DepthCrafter"},{description:"A human-centric depth estimation application.",id:"facebook/sapiens-depth"}],summary:"Depth estimation is the task of predicting depth of the objects present in an image.",widgetModels:[""],youtubeId:""},rw=iw,aw={datasets:[],demo:{inputs:[],outputs:[]},isPlaceholder:!0,metrics:[],models:[],spaces:[],summary:"",widgetModels:[],youtubeId:void 0,canonicalId:void 0},zp=aw,ow={datasets:[{description:"A curation of widely used datasets for Data Driven Deep Reinforcement Learning (D4RL)",id:"edbeeching/decision_transformer_gym_replay"}],demo:{inputs:[{label:"State",content:"Red traffic light, pedestrians are about to pass.",type:"text"}],outputs:[{label:"Action",content:"Stop the car.",type:"text"},{label:"Next State",content:"Yellow light, pedestrians have crossed.",type:"text"}]},metrics:[{description:"Accumulated reward across all time steps discounted by a factor that ranges between 0 and 1 and determines how much the agent optimizes for future relative to immediate rewards. Measures how good is the policy ultimately found by a given algorithm considering uncertainty over the future.",id:"Discounted Total Reward"},{description:"Average return obtained after running the policy for a certain number of evaluation episodes. As opposed to total reward, mean reward considers how much reward a given algorithm receives while learning.",id:"Mean Reward"},{description:"Measures how good a given algorithm is after a predefined time. Some algorithms may be guaranteed to converge to optimal behavior across many time steps. However, an agent that reaches an acceptable level of optimality after a given time horizon may be preferable to one that ultimately reaches optimality but takes a long time.",id:"Level of Performance After Some Time"}],models:[{description:"A Reinforcement Learning model trained on expert data from the Gym Hopper environment",id:"edbeeching/decision-transformer-gym-hopper-expert"},{description:"A PPO agent playing seals/CartPole-v0 using the stable-baselines3 library and the RL Zoo.",id:"HumanCompatibleAI/ppo-seals-CartPole-v0"}],spaces:[{description:"An application for a cute puppy agent learning to catch a stick.",id:"ThomasSimonini/Huggy"},{description:"An application to play Snowball Fight with a reinforcement learning agent.",id:"ThomasSimonini/SnowballFight"}],summary:"Reinforcement learning is the computational approach of learning from action by interacting with an environment through trial and error and receiving rewards (negative or positive) as feedback",widgetModels:[],youtubeId:"q0BiUn5LiBc"},sw=ow,lw={datasets:[{description:"A famous question answering dataset based on English articles from Wikipedia.",id:"squad_v2"},{description:"A dataset of aggregated anonymized actual queries issued to the Google search engine.",id:"natural_questions"}],demo:{inputs:[{label:"Question",content:"Which name is also used to describe the Amazon rainforest in English?",type:"text"},{label:"Context",content:"The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle",type:"text"}],outputs:[{label:"Answer",content:"Amazonia",type:"text"}]},metrics:[{description:"Exact Match is a metric based on the strict character match of the predicted answer and the right answer. For answers predicted correctly, the Exact Match will be 1. Even if only one character is different, Exact Match will be 0",id:"exact-match"},{description:" The F1-Score metric is useful if we value both false positives and false negatives equally. The F1-Score is calculated on each word in the predicted sequence against the correct answer",id:"f1"}],models:[{description:"A robust baseline model for most question answering domains.",id:"deepset/roberta-base-squad2"},{description:"Small yet robust model that can answer questions.",id:"distilbert/distilbert-base-cased-distilled-squad"},{description:"A special model that can answer questions from tables.",id:"google/tapas-base-finetuned-wtq"}],spaces:[{description:"An application that can answer a long question from Wikipedia.",id:"deepset/wikipedia-assistant"}],summary:"Question Answering models can retrieve the answer to a question from a given text, which is useful for searching for an answer in a document. Some question answering models can generate answers without context!",widgetModels:["deepset/roberta-base-squad2"],youtubeId:"ajPx5LwJD-I"},uw=lw,cw={datasets:[{description:"Bing queries with relevant passages from various web sources.",id:"ms_marco"}],demo:{inputs:[{label:"Source sentence",content:"Machine learning is so easy.",type:"text"},{label:"Sentences to compare to",content:"Deep learning is so straightforward.",type:"text"},{label:"",content:"This is so difficult, like rocket science.",type:"text"},{label:"",content:"I can't believe how much I struggled with this.",type:"text"}],outputs:[{type:"chart",data:[{label:"Deep learning is so straightforward.",score:.623},{label:"This is so difficult, like rocket science.",score:.413},{label:"I can't believe how much I struggled with this.",score:.256}]}]},metrics:[{description:"Reciprocal Rank is a measure used to rank the relevancy of documents given a set of documents. Reciprocal Rank is the reciprocal of the rank of the document retrieved, meaning, if the rank is 3, the Reciprocal Rank is 0.33. If the rank is 1, the Reciprocal Rank is 1",id:"Mean Reciprocal Rank"},{description:"The similarity of the embeddings is evaluated mainly on cosine similarity. It is calculated as the cosine of the angle between two vectors. It is particularly useful when your texts are not the same length",id:"Cosine Similarity"}],models:[{description:"This model works well for sentences and paragraphs and can be used for clustering/grouping and semantic searches.",id:"sentence-transformers/all-mpnet-base-v2"},{description:"A multilingual robust sentence similarity model..",id:"BAAI/bge-m3"}],spaces:[{description:"An application that leverages sentence similarity to answer questions from YouTube videos.",id:"Gradio-Blocks/Ask_Questions_To_YouTube_Videos"},{description:"An application that retrieves relevant PubMed abstracts for a given online article which can be used as further references.",id:"Gradio-Blocks/pubmed-abstract-retriever"},{description:"An application that leverages sentence similarity to summarize text.",id:"nickmuchi/article-text-summarizer"},{description:"A guide that explains how Sentence Transformers can be used for semantic search.",id:"sentence-transformers/Sentence_Transformers_for_semantic_search"}],summary:"Sentence Similarity is the task of determining how similar two texts are. Sentence similarity models convert input texts into vectors (embeddings) that capture semantic information and calculate how close (similar) they are between them. This task is particularly useful for information retrieval and clustering/grouping.",widgetModels:["BAAI/bge-small-en-v1.5"],youtubeId:"VCZq5AkbNEU"},dw=cw,fw={canonicalId:"text2text-generation",datasets:[{description:"News articles in five different languages along with their summaries. Widely used for benchmarking multilingual summarization models.",id:"mlsum"},{description:"English conversations and their summaries. Useful for benchmarking conversational agents.",id:"samsum"}],demo:{inputs:[{label:"Input",content:"The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. It was the first structure to reach a height of 300 metres. Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct.",type:"text"}],outputs:[{label:"Output",content:"The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building. It was the first structure to reach a height of 300 metres.",type:"text"}]},metrics:[{description:"The generated sequence is compared against its summary, and the overlap of tokens are counted. ROUGE-N refers to overlap of N subsequent tokens, ROUGE-1 refers to overlap of single tokens and ROUGE-2 is the overlap of two subsequent tokens.",id:"rouge"}],models:[{description:"A strong summarization model trained on English news articles. Excels at generating factual summaries.",id:"facebook/bart-large-cnn"},{description:"A summarization model trained on medical articles.",id:"Falconsai/medical_summarization"}],spaces:[{description:"An application that can summarize long paragraphs.",id:"pszemraj/summarize-long-text"},{description:"A much needed summarization application for terms and conditions.",id:"ml6team/distilbart-tos-summarizer-tosdr"},{description:"An application that summarizes long documents.",id:"pszemraj/document-summarization"},{description:"An application that can detect errors in abstractive summarization.",id:"ml6team/post-processing-summarization"}],summary:"Summarization is the task of producing a shorter version of a document while preserving its important information. Some models can extract text from the original input, while other models can generate entirely new text.",widgetModels:["facebook/bart-large-cnn"],youtubeId:"yHnr5Dk2zCI"},pw=fw,mw={datasets:[{description:"The WikiTableQuestions dataset is a large-scale dataset for the task of question answering on semi-structured tables.",id:"wikitablequestions"},{description:"WikiSQL is a dataset of 80654 hand-annotated examples of questions and SQL queries distributed across 24241 tables from Wikipedia.",id:"wikisql"}],demo:{inputs:[{table:[["Rank","Name","No.of reigns","Combined days"],["1","lou Thesz","3","3749"],["2","Ric Flair","8","3103"],["3","Harley Race","7","1799"]],type:"tabular"},{label:"Question",content:"What is the number of reigns for Harley Race?",type:"text"}],outputs:[{label:"Result",content:"7",type:"text"}]},metrics:[{description:"Checks whether the predicted answer(s) is the same as the ground-truth answer(s).",id:"Denotation Accuracy"}],models:[{description:"A table question answering model that is capable of neural SQL execution, i.e., employ TAPEX to execute a SQL query on a given table.",id:"microsoft/tapex-base"},{description:"A robust table question answering model.",id:"google/tapas-base-finetuned-wtq"}],spaces:[{description:"An application that answers questions based on table CSV files.",id:"katanaml/table-query"}],summary:"Table Question Answering (Table QA) is the answering a question about an information on a given table.",widgetModels:["google/tapas-base-finetuned-wtq"]},hw=mw,gw={datasets:[{description:"A comprehensive curation of datasets covering all benchmarks.",id:"inria-soda/tabular-benchmark"}],demo:{inputs:[{table:[["Glucose","Blood Pressure ","Skin Thickness","Insulin","BMI"],["148","72","35","0","33.6"],["150","50","30","0","35.1"],["141","60","29","1","39.2"]],type:"tabular"}],outputs:[{table:[["Diabetes"],["1"],["1"],["0"]],type:"tabular"}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"",id:"f1"}],models:[{description:"Breast cancer prediction model based on decision trees.",id:"scikit-learn/cancer-prediction-trees"}],spaces:[{description:"An application that can predict defective products on a production line.",id:"scikit-learn/tabular-playground"},{description:"An application that compares various tabular classification techniques on different datasets.",id:"scikit-learn/classification"}],summary:"Tabular classification is the task of classifying a target category (a group) based on set of attributes.",widgetModels:["scikit-learn/tabular-playground"],youtubeId:""},yw=gw,vw={datasets:[{description:"A comprehensive curation of datasets covering all benchmarks.",id:"inria-soda/tabular-benchmark"}],demo:{inputs:[{table:[["Car Name","Horsepower","Weight"],["ford torino","140","3,449"],["amc hornet","97","2,774"],["toyota corolla","65","1,773"]],type:"tabular"}],outputs:[{table:[["MPG (miles per gallon)"],["17"],["18"],["31"]],type:"tabular"}]},metrics:[{description:"",id:"mse"},{description:"Coefficient of determination (or R-squared) is a measure of how well the model fits the data. Higher R-squared is considered a better fit.",id:"r-squared"}],models:[{description:"Fish weight prediction based on length measurements and species.",id:"scikit-learn/Fish-Weight"}],spaces:[{description:"An application that can predict weight of a fish based on set of attributes.",id:"scikit-learn/fish-weight-prediction"}],summary:"Tabular regression is the task of predicting a numerical value given a set of attributes.",widgetModels:["scikit-learn/Fish-Weight"],youtubeId:""},ww=vw,xw={datasets:[{description:"RedCaps is a large-scale dataset of 12M image-text pairs collected from Reddit.",id:"red_caps"},{description:"Conceptual Captions is a dataset consisting of ~3.3M images annotated with captions.",id:"conceptual_captions"}],demo:{inputs:[{label:"Input",content:"A city above clouds, pastel colors, Victorian style",type:"text"}],outputs:[{filename:"image.jpeg",type:"img"}]},metrics:[{description:"The Inception Score (IS) measure assesses diversity and meaningfulness. It uses a generated image sample to predict its label. A higher score signifies more diverse and meaningful images.",id:"IS"},{description:"The Fréchet Inception Distance (FID) calculates the distance between distributions between synthetic and real samples. A lower FID score indicates better similarity between the distributions of real and generated images.",id:"FID"},{description:"R-precision assesses how the generated image aligns with the provided text description. It uses the generated images as queries to retrieve relevant text descriptions. The top 'r' relevant descriptions are selected and used to calculate R-precision as r/R, where 'R' is the number of ground truth descriptions associated with the generated images. A higher R-precision value indicates a better model.",id:"R-Precision"}],models:[{description:"One of the most powerful image generation models that can generate realistic outputs.",id:"black-forest-labs/FLUX.1-dev"},{description:"A powerful yet fast image generation model.",id:"latent-consistency/lcm-lora-sdxl"},{description:"Text-to-image model for photorealistic generation.",id:"Kwai-Kolors/Kolors"},{description:"A powerful text-to-image model.",id:"stabilityai/stable-diffusion-3-medium-diffusers"}],spaces:[{description:"A powerful text-to-image application.",id:"stabilityai/stable-diffusion-3-medium"},{description:"A text-to-image application to generate comics.",id:"jbilcke-hf/ai-comic-factory"},{description:"An application to match multiple custom image generation models.",id:"multimodalart/flux-lora-lab"},{description:"A powerful yet very fast image generation application.",id:"latent-consistency/lcm-lora-for-sdxl"},{description:"A gallery to explore various text-to-image models.",id:"multimodalart/LoraTheExplorer"},{description:"An application for `text-to-image`, `image-to-image` and image inpainting.",id:"ArtGAN/Stable-Diffusion-ControlNet-WebUI"},{description:"An application to generate realistic images given photos of a person and a prompt.",id:"InstantX/InstantID"}],summary:"Text-to-image is the task of generating images from input text. These pipelines can also be used to modify and edit images based on text prompts.",widgetModels:["black-forest-labs/FLUX.1-dev"],youtubeId:""},bw=xw,kw={canonicalId:"text-to-audio",datasets:[{description:"10K hours of multi-speaker English dataset.",id:"parler-tts/mls_eng_10k"},{description:"Multi-speaker English dataset.",id:"mythicinfinity/libritts_r"}],demo:{inputs:[{label:"Input",content:"I love audio models on the Hub!",type:"text"}],outputs:[{filename:"audio.wav",type:"audio"}]},metrics:[{description:"The Mel Cepstral Distortion (MCD) metric is used to calculate the quality of generated speech.",id:"mel cepstral distortion"}],models:[{description:"A powerful TTS model.",id:"parler-tts/parler-tts-large-v1"},{description:"A massively multi-lingual TTS model.",id:"coqui/XTTS-v2"},{description:"Robust TTS model.",id:"metavoiceio/metavoice-1B-v0.1"},{description:"A prompt based, powerful TTS model.",id:"parler-tts/parler_tts_mini_v0.1"}],spaces:[{description:"An application for generate highly realistic, multilingual speech.",id:"suno/bark"},{description:"An application on XTTS, a voice generation model that lets you clone voices into different languages.",id:"coqui/xtts"},{description:"An application that generates speech in different styles in English and Chinese.",id:"mrfakename/E2-F5-TTS"},{description:"An application that synthesizes speech for diverse speaker prompts.",id:"parler-tts/parler_tts_mini"}],summary:"Text-to-Speech (TTS) is the task of generating natural sounding speech given text input. TTS models can be extended to have a single model that generates speech for multiple speakers and multiple languages.",widgetModels:["suno/bark"],youtubeId:"NW62DpzJ274"},_w=kw,Sw={datasets:[{description:"A widely used dataset useful to benchmark named entity recognition models.",id:"eriktks/conll2003"},{description:"A multilingual dataset of Wikipedia articles annotated for named entity recognition in over 150 different languages.",id:"unimelb-nlp/wikiann"}],demo:{inputs:[{label:"Input",content:"My name is Omar and I live in Zürich.",type:"text"}],outputs:[{text:"My name is Omar and I live in Zürich.",tokens:[{type:"PERSON",start:11,end:15},{type:"GPE",start:30,end:36}],type:"text-with-tokens"}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"",id:"f1"}],models:[{description:"A robust performance model to identify people, locations, organizations and names of miscellaneous entities.",id:"dslim/bert-base-NER"},{description:"A strong model to identify people, locations, organizations and names in multiple languages.",id:"FacebookAI/xlm-roberta-large-finetuned-conll03-english"},{description:"A token classification model specialized on medical entity recognition.",id:"blaze999/Medical-NER"},{description:"Flair models are typically the state of the art in named entity recognition tasks.",id:"flair/ner-english"}],spaces:[{description:"An application that can recognizes entities, extracts noun chunks and recognizes various linguistic features of each token.",id:"spacy/gradio_pipeline_visualizer"}],summary:"Token classification is a natural language understanding task in which a label is assigned to some tokens in a text. Some popular token classification subtasks are Named Entity Recognition (NER) and Part-of-Speech (PoS) tagging. NER models could be trained to identify specific entities in a text, such as dates, individuals and places; and PoS tagging would identify, for example, which words in a text are verbs, nouns, and punctuation marks.",widgetModels:["FacebookAI/xlm-roberta-large-finetuned-conll03-english"],youtubeId:"wVHdVlPScxA"},Ew=Sw,Tw={canonicalId:"text2text-generation",datasets:[{description:"A dataset of copyright-free books translated into 16 different languages.",id:"Helsinki-NLP/opus_books"},{description:"An example of translation between programming languages. This dataset consists of functions in Java and C#.",id:"google/code_x_glue_cc_code_to_code_trans"}],demo:{inputs:[{label:"Input",content:"My name is Omar and I live in Zürich.",type:"text"}],outputs:[{label:"Output",content:"Mein Name ist Omar und ich wohne in Zürich.",type:"text"}]},metrics:[{description:"BLEU score is calculated by counting the number of shared single or subsequent tokens between the generated sequence and the reference. Subsequent n tokens are called “n-grams”. Unigram refers to a single token while bi-gram refers to token pairs and n-grams refer to n subsequent tokens. The score ranges from 0 to 1, where 1 means the translation perfectly matched and 0 did not match at all",id:"bleu"},{description:"",id:"sacrebleu"}],models:[{description:"Very powerful model that can translate many languages between each other, especially low-resource languages.",id:"facebook/nllb-200-1.3B"},{description:"A general-purpose Transformer that can be used to translate from English to German, French, or Romanian.",id:"google-t5/t5-base"}],spaces:[{description:"An application that can translate between 100 languages.",id:"Iker/Translate-100-languages"},{description:"An application that can translate between many languages.",id:"Geonmo/nllb-translation-demo"}],summary:"Translation is the task of converting text from one language to another.",widgetModels:["facebook/mbart-large-50-many-to-many-mmt"],youtubeId:"1JvfrvZgi6c"},Cw=Tw,Aw={datasets:[{description:"A widely used dataset used to benchmark multiple variants of text classification.",id:"nyu-mll/glue"},{description:"A text classification dataset used to benchmark natural language inference models",id:"stanfordnlp/snli"}],demo:{inputs:[{label:"Input",content:"I love Hugging Face!",type:"text"}],outputs:[{type:"chart",data:[{label:"POSITIVE",score:.9},{label:"NEUTRAL",score:.1},{label:"NEGATIVE",score:0}]}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"The F1 metric is the harmonic mean of the precision and recall. It can be calculated as: F1 = 2 * (precision * recall) / (precision + recall)",id:"f1"}],models:[{description:"A robust model trained for sentiment analysis.",id:"distilbert/distilbert-base-uncased-finetuned-sst-2-english"},{description:"A sentiment analysis model specialized in financial sentiment.",id:"ProsusAI/finbert"},{description:"A sentiment analysis model specialized in analyzing tweets.",id:"cardiffnlp/twitter-roberta-base-sentiment-latest"},{description:"A model that can classify languages.",id:"papluca/xlm-roberta-base-language-detection"},{description:"A model that can classify text generation attacks.",id:"meta-llama/Prompt-Guard-86M"}],spaces:[{description:"An application that can classify financial sentiment.",id:"IoannisTr/Tech_Stocks_Trading_Assistant"},{description:"A dashboard that contains various text classification tasks.",id:"miesnerjacob/Multi-task-NLP"},{description:"An application that analyzes user reviews in healthcare.",id:"spacy/healthsea-demo"}],summary:"Text Classification is the task of assigning a label or class to a given text. Some use cases are sentiment analysis, natural language inference, and assessing grammatical correctness.",widgetModels:["distilbert/distilbert-base-uncased-finetuned-sst-2-english"],youtubeId:"leNG9fN9FQU"},Iw=Aw,jw={datasets:[{description:"A large multilingual dataset of text crawled from the web.",id:"mc4"},{description:"Diverse open-source data consisting of 22 smaller high-quality datasets. It was used to train GPT-Neo.",id:"the_pile"},{description:"Truly open-source, curated and cleaned dialogue dataset.",id:"HuggingFaceH4/ultrachat_200k"},{description:"An instruction dataset with preference ratings on responses.",id:"openbmb/UltraFeedback"},{description:"A large synthetic dataset for alignment of text generation models.",id:"argilla/magpie-ultra-v0.1"}],demo:{inputs:[{label:"Input",content:"Once upon a time,",type:"text"}],outputs:[{label:"Output",content:"Once upon a time, we knew that our ancestors were on the verge of extinction. The great explorers and poets of the Old World, from Alexander the Great to Chaucer, are dead and gone. A good many of our ancient explorers and poets have",type:"text"}]},metrics:[{description:"Cross Entropy is a metric that calculates the difference between two probability distributions. Each probability distribution is the distribution of predicted words",id:"Cross Entropy"},{description:"The Perplexity metric is the exponential of the cross-entropy loss. It evaluates the probabilities assigned to the next word by the model. Lower perplexity indicates better performance",id:"Perplexity"}],models:[{description:"A text-generation model trained to follow instructions.",id:"google/gemma-2-2b-it"},{description:"Very powerful text generation model trained to follow instructions.",id:"meta-llama/Meta-Llama-3.1-8B-Instruct"},{description:"Small yet powerful text generation model.",id:"microsoft/Phi-3-mini-4k-instruct"},{description:"A very powerful model that can solve mathematical problems.",id:"AI-MO/NuminaMath-7B-TIR"},{description:"Strong text generation model to follow instructions.",id:"Qwen/Qwen2.5-7B-Instruct"},{description:"Very strong open-source large language model.",id:"nvidia/Llama-3.1-Nemotron-70B-Instruct"}],spaces:[{description:"A leaderboard to compare different open-source text generation models based on various benchmarks.",id:"open-llm-leaderboard/open_llm_leaderboard"},{description:"A leaderboard for comparing chain-of-thought performance of models.",id:"logikon/open_cot_leaderboard"},{description:"An text generation based application based on a very powerful LLaMA2 model.",id:"ysharma/Explore_llamav2_with_TGI"},{description:"An text generation based application to converse with Zephyr model.",id:"HuggingFaceH4/zephyr-chat"},{description:"A leaderboard that ranks text generation models based on blind votes from people.",id:"lmsys/chatbot-arena-leaderboard"},{description:"An chatbot to converse with a very powerful text generation model.",id:"mlabonne/phixtral-chat"}],summary:"Generating text is the task of generating new text given another text. These models can, for example, fill in incomplete text or paraphrase.",widgetModels:["mistralai/Mistral-Nemo-Instruct-2407"],youtubeId:"e9gNEAlsOvU"},Lw=jw,Pw={datasets:[{description:"Microsoft Research Video to Text is a large-scale dataset for open domain video captioning",id:"iejMac/CLIP-MSR-VTT"},{description:"UCF101 Human Actions dataset consists of 13,320 video clips from YouTube, with 101 classes.",id:"quchenyuan/UCF101-ZIP"},{description:"A high-quality dataset for human action recognition in YouTube videos.",id:"nateraw/kinetics"},{description:"A dataset of video clips of humans performing pre-defined basic actions with everyday objects.",id:"HuggingFaceM4/something_something_v2"},{description:"This dataset consists of text-video pairs and contains noisy samples with irrelevant video descriptions",id:"HuggingFaceM4/webvid"},{description:"A dataset of short Flickr videos for the temporal localization of events with descriptions.",id:"iejMac/CLIP-DiDeMo"}],demo:{inputs:[{label:"Input",content:"Darth Vader is surfing on the waves.",type:"text"}],outputs:[{filename:"text-to-video-output.gif",type:"img"}]},metrics:[{description:"Inception Score uses an image classification model that predicts class labels and evaluates how distinct and diverse the images are. A higher score indicates better video generation.",id:"is"},{description:"Frechet Inception Distance uses an image classification model to obtain image embeddings. The metric compares mean and standard deviation of the embeddings of real and generated images. A smaller score indicates better video generation.",id:"fid"},{description:"Frechet Video Distance uses a model that captures coherence for changes in frames and the quality of each frame. A smaller score indicates better video generation.",id:"fvd"},{description:"CLIPSIM measures similarity between video frames and text using an image-text similarity model. A higher score indicates better video generation.",id:"clipsim"}],models:[{description:"A strong model for consistent video generation.",id:"rain1011/pyramid-flow-sd3"},{description:"A robust model for text-to-video generation.",id:"VideoCrafter/VideoCrafter2"},{description:"A cutting-edge text-to-video generation model.",id:"TIGER-Lab/T2V-Turbo-V2"}],spaces:[{description:"An application that generates video from text.",id:"VideoCrafter/VideoCrafter"},{description:"Consistent video generation application.",id:"TIGER-Lab/T2V-Turbo-V2"},{description:"A cutting edge video generation application.",id:"Pyramid-Flow/pyramid-flow"}],summary:"Text-to-video models can be used in any application that requires generating consistent sequence of images from text. ",widgetModels:[],youtubeId:void 0},Rw=Pw,Dw={datasets:[{description:"The CIFAR-100 dataset consists of 60000 32x32 colour images in 100 classes, with 600 images per class.",id:"cifar100"},{description:"Multiple images of celebrities, used for facial expression translation.",id:"CelebA"}],demo:{inputs:[{label:"Seed",content:"42",type:"text"},{label:"Number of images to generate:",content:"4",type:"text"}],outputs:[{filename:"unconditional-image-generation-output.jpeg",type:"img"}]},metrics:[{description:"The inception score (IS) evaluates the quality of generated images. It measures the diversity of the generated images (the model predictions are evenly distributed across all possible labels) and their 'distinction' or 'sharpness' (the model confidently predicts a single label for each image).",id:"Inception score (IS)"},{description:"The Fréchet Inception Distance (FID) evaluates the quality of images created by a generative model by calculating the distance between feature vectors for real and generated images.",id:"Frećhet Inception Distance (FID)"}],models:[{description:"High-quality image generation model trained on the CIFAR-10 dataset. It synthesizes images of the ten classes presented in the dataset using diffusion probabilistic models, a class of latent variable models inspired by considerations from nonequilibrium thermodynamics.",id:"google/ddpm-cifar10-32"},{description:"High-quality image generation model trained on the 256x256 CelebA-HQ dataset. It synthesizes images of faces using diffusion probabilistic models, a class of latent variable models inspired by considerations from nonequilibrium thermodynamics.",id:"google/ddpm-celebahq-256"}],spaces:[{description:"An application that can generate realistic faces.",id:"CompVis/celeba-latent-diffusion"}],summary:"Unconditional image generation is the task of generating images with no condition in any context (like a prompt text or another image). Once trained, the model will create images that resemble its training data distribution.",widgetModels:[""],youtubeId:""},Nw=Dw,Mw={datasets:[{description:"Benchmark dataset used for video classification with videos that belong to 400 classes.",id:"kinetics400"}],demo:{inputs:[{filename:"video-classification-input.gif",type:"img"}],outputs:[{type:"chart",data:[{label:"Playing Guitar",score:.514},{label:"Playing Tennis",score:.193},{label:"Cooking",score:.068}]}]},metrics:[{description:"",id:"accuracy"},{description:"",id:"recall"},{description:"",id:"precision"},{description:"",id:"f1"}],models:[{description:"Strong Video Classification model trained on the Kinetics 400 dataset.",id:"google/vivit-b-16x2-kinetics400"},{description:"Strong Video Classification model trained on the Kinetics 400 dataset.",id:"microsoft/xclip-base-patch32"}],spaces:[{description:"An application that classifies video at different timestamps.",id:"nateraw/lavila"},{description:"An application that classifies video.",id:"fcakyon/video-classification"}],summary:"Video classification is the task of assigning a label or class to an entire video. Videos are expected to have only one class for each video. Video classification models take a video as input and return a prediction about which class the video belongs to.",widgetModels:[],youtubeId:""},Ow=Mw,Fw={datasets:[{description:"A widely used dataset containing questions (with answers) about images.",id:"Graphcore/vqa"},{description:"A dataset to benchmark visual reasoning based on text in images.",id:"facebook/textvqa"}],demo:{inputs:[{filename:"elephant.jpeg",type:"img"},{label:"Question",content:"What is in this image?",type:"text"}],outputs:[{type:"chart",data:[{label:"elephant",score:.97},{label:"elephants",score:.06},{label:"animal",score:.003}]}]},isPlaceholder:!1,metrics:[{description:"",id:"accuracy"},{description:"Measures how much a predicted answer differs from the ground truth based on the difference in their semantic meaning.",id:"wu-palmer similarity"}],models:[{description:"A visual question answering model trained to convert charts and plots to text.",id:"google/deplot"},{description:"A visual question answering model trained for mathematical reasoning and chart derendering from images.",id:"google/matcha-base"},{description:"A strong visual question answering that answers questions from book covers.",id:"google/pix2struct-ocrvqa-large"}],spaces:[{description:"An application that compares visual question answering models across different tasks.",id:"merve/pix2struct"},{description:"An application that can answer questions based on images.",id:"nielsr/vilt-vqa"},{description:"An application that can caption images and answer questions about a given image. ",id:"Salesforce/BLIP"},{description:"An application that can caption images and answer questions about a given image. ",id:"vumichien/Img2Prompt"}],summary:"Visual Question Answering is the task of answering open-ended questions based on an image. They output natural language responses to natural language questions.",widgetModels:["dandelin/vilt-b32-finetuned-vqa"],youtubeId:""},Uw=Fw,zw={datasets:[{description:"A widely used dataset used to benchmark multiple variants of text classification.",id:"nyu-mll/glue"},{description:"The Multi-Genre Natural Language Inference (MultiNLI) corpus is a crowd-sourced collection of 433k sentence pairs annotated with textual entailment information.",id:"nyu-mll/multi_nli"},{description:"FEVER is a publicly available dataset for fact extraction and verification against textual sources.",id:"fever/fever"}],demo:{inputs:[{label:"Text Input",content:"Dune is the best movie ever.",type:"text"},{label:"Candidate Labels",content:"CINEMA, ART, MUSIC",type:"text"}],outputs:[{type:"chart",data:[{label:"CINEMA",score:.9},{label:"ART",score:.1},{label:"MUSIC",score:0}]}]},metrics:[],models:[{description:"Powerful zero-shot text classification model.",id:"facebook/bart-large-mnli"},{description:"Powerful zero-shot multilingual text classification model that can accomplish multiple tasks.",id:"MoritzLaurer/mDeBERTa-v3-base-xnli-multilingual-nli-2mil7"}],spaces:[],summary:"Zero-shot text classification is a task in natural language processing where a model is trained on a set of labeled examples but is then able to classify new examples from previously unseen classes.",widgetModels:["facebook/bart-large-mnli"]},$w=zw,Bw={datasets:[{description:"",id:""}],demo:{inputs:[{filename:"image-classification-input.jpeg",type:"img"},{label:"Classes",content:"cat, dog, bird",type:"text"}],outputs:[{type:"chart",data:[{label:"Cat",score:.664},{label:"Dog",score:.329},{label:"Bird",score:.008}]}]},metrics:[{description:"Computes the number of times the correct label appears in top K labels predicted",id:"top-K accuracy"}],models:[{description:"Robust image classification model trained on publicly available image-caption data.",id:"openai/clip-vit-base-patch16"},{description:"Strong zero-shot image classification model.",id:"google/siglip-so400m-patch14-224"},{description:"Small yet powerful zero-shot image classification model that can run on edge devices.",id:"apple/MobileCLIP-S1-OpenCLIP"},{description:"Strong image classification model for biomedical domain.",id:"microsoft/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224"}],spaces:[{description:"An application that leverages zero-shot image classification to find best captions to generate an image. ",id:"pharma/CLIP-Interrogator"},{description:"An application to compare different zero-shot image classification models. ",id:"merve/compare_clip_siglip"}],summary:"Zero-shot image classification is the task of classifying previously unseen classes during training of a model.",widgetModels:["google/siglip-so400m-patch14-224"],youtubeId:""},Vw=Bw,Hw={datasets:[],demo:{inputs:[{filename:"zero-shot-object-detection-input.jpg",type:"img"},{label:"Classes",content:"cat, dog, bird",type:"text"}],outputs:[{filename:"zero-shot-object-detection-output.jpg",type:"img"}]},metrics:[{description:"The Average Precision (AP) metric is the Area Under the PR Curve (AUC-PR). It is calculated for each class separately",id:"Average Precision"},{description:"The Mean Average Precision (mAP) metric is the overall average of the AP values",id:"Mean Average Precision"},{description:"The APα metric is the Average Precision at the IoU threshold of a α value, for example, AP50 and AP75",id:"APα"}],models:[{description:"Solid zero-shot object detection model.",id:"IDEA-Research/grounding-dino-base"},{description:"Cutting-edge zero-shot object detection model.",id:"google/owlv2-base-patch16-ensemble"}],spaces:[{description:"A demo to try the state-of-the-art zero-shot object detection model, OWLv2.",id:"merve/owlv2"},{description:"A demo that combines a zero-shot object detection and mask generation model for zero-shot segmentation.",id:"merve/OWLSAM"}],summary:"Zero-shot object detection is a computer vision task to detect objects and their classes in images, without any prior training or knowledge of the classes. Zero-shot object detection models receive an image as input, as well as a list of candidate classes, and output the bounding boxes and labels where the objects have been detected.",widgetModels:[],youtubeId:""},qw=Hw,Ww={datasets:[{description:"A large dataset of over 10 million 3D objects.",id:"allenai/objaverse-xl"},{description:"A dataset of isolated object images for evaluating image-to-3D models.",id:"dylanebert/iso3d"}],demo:{inputs:[{filename:"image-to-3d-image-input.png",type:"img"}],outputs:[{label:"Result",content:"image-to-3d-3d-output-filename.glb",type:"text"}]},metrics:[],models:[{description:"Fast image-to-3D mesh model by Tencent.",id:"TencentARC/InstantMesh"},{description:"Fast image-to-3D mesh model by StabilityAI",id:"stabilityai/TripoSR"},{description:"A scaled up image-to-3D mesh model derived from TripoSR.",id:"hwjiang/Real3D"},{description:"Generative 3D gaussian splatting model.",id:"ashawkey/LGM"}],spaces:[{description:"Leaderboard to evaluate image-to-3D models.",id:"dylanebert/3d-arena"},{description:"Image-to-3D demo with mesh outputs.",id:"TencentARC/InstantMesh"},{description:"Image-to-3D demo with mesh outputs.",id:"stabilityai/TripoSR"},{description:"Image-to-3D demo with mesh outputs.",id:"hwjiang/Real3D"},{description:"Image-to-3D demo with splat outputs.",id:"dylanebert/LGM-mini"}],summary:"Image-to-3D models take in image input and produce 3D output.",widgetModels:[],youtubeId:""},Kw=Ww,Qw={datasets:[{description:"A large dataset of over 10 million 3D objects.",id:"allenai/objaverse-xl"},{description:"Descriptive captions for 3D objects in Objaverse.",id:"tiange/Cap3D"}],demo:{inputs:[{label:"Prompt",content:"a cat statue",type:"text"}],outputs:[{label:"Result",content:"text-to-3d-3d-output-filename.glb",type:"text"}]},metrics:[],models:[{description:"Text-to-3D mesh model by OpenAI",id:"openai/shap-e"},{description:"Generative 3D gaussian splatting model.",id:"ashawkey/LGM"}],spaces:[{description:"Text-to-3D demo with mesh outputs.",id:"hysts/Shap-E"},{description:"Text/image-to-3D demo with splat outputs.",id:"ashawkey/LGM"}],summary:"Text-to-3D models take in text input and produce 3D output.",widgetModels:[],youtubeId:""},Xw=Qw,Yw={datasets:[{description:"A dataset of hand keypoints of over 500k examples.",id:"Vincent-luo/hagrid-mediapipe-hands"}],demo:{inputs:[{filename:"keypoint-detection-input.png",type:"img"}],outputs:[{filename:"keypoint-detection-output.png",type:"img"}]},metrics:[],models:[{description:"A robust keypoint detection model.",id:"magic-leap-community/superpoint"},{description:"Strong keypoint detection model used to detect human pose.",id:"facebook/sapiens-pose-1b"}],spaces:[{description:"An application that detects hand keypoints in real-time.",id:"datasciencedojo/Hand-Keypoint-Detection-Realtime"},{description:"An application to try a universal keypoint detection model.",id:"merve/SuperPoint"}],summary:"Keypoint detection is the task of identifying meaningful distinctive points or features in an image.",widgetModels:[],youtubeId:""},Jw=Yw,Zw={datasets:[{description:"Multiple-choice questions and answers about videos.",id:"lmms-lab/Video-MME"},{description:"A dataset of instructions and question-answer pairs about videos.",id:"lmms-lab/VideoChatGPT"},{description:"Large video understanding dataset.",id:"HuggingFaceFV/finevideo"}],demo:{inputs:[{filename:"video-text-to-text-input.gif",type:"img"},{label:"Text Prompt",content:"What is happening in this video?",type:"text"}],outputs:[{label:"Answer",content:"The video shows a series of images showing a fountain with water jets and a variety of colorful flowers and butterflies in the background.",type:"text"}]},metrics:[],models:[{description:"A robust video-text-to-text model that can take in image and video inputs.",id:"llava-hf/llava-onevision-qwen2-72b-ov-hf"},{description:"Large and powerful video-text-to-text model that can take in image and video inputs.",id:"llava-hf/LLaVA-NeXT-Video-34B-hf"}],spaces:[{description:"An application to chat with a video-text-to-text model.",id:"llava-hf/video-llava"},{description:"A leaderboard for various video-text-to-text models.",id:"opencompass/openvlm_video_leaderboard"}],summary:"Video-text-to-text models take in a video and a text prompt and output text. These models are also called video-language models.",widgetModels:[""],youtubeId:""},Gw=Zw,ex={"audio-classification":["speechbrain","transformers","transformers.js"],"audio-to-audio":["asteroid","fairseq","speechbrain"],"automatic-speech-recognition":["espnet","nemo","speechbrain","transformers","transformers.js"],"depth-estimation":["transformers","transformers.js"],"document-question-answering":["transformers","transformers.js"],"feature-extraction":["sentence-transformers","transformers","transformers.js"],"fill-mask":["transformers","transformers.js"],"graph-ml":["transformers"],"image-classification":["keras","timm","transformers","transformers.js"],"image-feature-extraction":["timm","transformers"],"image-segmentation":["transformers","transformers.js"],"image-text-to-text":["transformers"],"image-to-image":["diffusers","transformers","transformers.js"],"image-to-text":["transformers","transformers.js"],"image-to-video":["diffusers"],"keypoint-detection":["transformers"],"video-classification":["transformers"],"mask-generation":["transformers"],"multiple-choice":["transformers"],"object-detection":["transformers","transformers.js"],other:[],"question-answering":["adapter-transformers","allennlp","transformers","transformers.js"],robotics:[],"reinforcement-learning":["transformers","stable-baselines3","ml-agents","sample-factory"],"sentence-similarity":["sentence-transformers","spacy","transformers.js"],summarization:["transformers","transformers.js"],"table-question-answering":["transformers"],"table-to-text":["transformers"],"tabular-classification":["sklearn"],"tabular-regression":["sklearn"],"tabular-to-text":["transformers"],"text-classification":["adapter-transformers","setfit","spacy","transformers","transformers.js"],"text-generation":["transformers","transformers.js"],"text-retrieval":[],"text-to-image":["diffusers"],"text-to-speech":["espnet","tensorflowtts","transformers","transformers.js"],"text-to-audio":["transformers","transformers.js"],"text-to-video":["diffusers"],"text2text-generation":["transformers","transformers.js"],"time-series-forecasting":[],"token-classification":["adapter-transformers","flair","spacy","span-marker","stanza","transformers","transformers.js"],translation:["transformers","transformers.js"],"unconditional-image-generation":["diffusers"],"video-text-to-text":["transformers"],"visual-question-answering":["transformers","transformers.js"],"voice-activity-detection":[],"zero-shot-classification":["transformers","transformers.js"],"zero-shot-image-classification":["transformers","transformers.js"],"zero-shot-object-detection":["transformers","transformers.js"],"text-to-3d":["diffusers"],"image-to-3d":["diffusers"],"any-to-any":["transformers"]};function K(e,t=zp){return{...t,id:e,label:su[e].name,libraries:ex[e]}}K("any-to-any",zp),K("audio-classification",j0),K("audio-to-audio",P0),K("automatic-speech-recognition",D0),K("depth-estimation",rw),K("document-question-answering",M0),K("feature-extraction",F0),K("fill-mask",z0),K("image-classification",B0),K("image-feature-extraction",H0),K("image-segmentation",Z0),K("image-to-image",W0),K("image-text-to-text",Y0),K("image-to-text",Q0),K("keypoint-detection",Jw),K("mask-generation",ew),K("object-detection",nw),K("video-classification",Ow),K("question-answering",uw),K("reinforcement-learning",sw),K("sentence-similarity",dw),K("summarization",pw),K("table-question-answering",hw),K("tabular-classification",yw),K("tabular-regression",ww),K("text-classification",Iw),K("text-generation",Lw),K("text-to-image",bw),K("text-to-speech",_w),K("text-to-video",Rw),K("token-classification",Ew),K("translation",Cw),K("unconditional-image-generation",Nw),K("video-text-to-text",Gw),K("visual-question-answering",Uw),K("zero-shot-classification",$w),K("zero-shot-image-classification",Vw),K("zero-shot-object-detection",qw),K("text-to-3d",Xw),K("image-to-3d",Kw);var $p="custom_code";function fn(e){const t=e.split("/");return t.length===1?t[0]:t[1]}var tx=e=>JSON.stringify(e).slice(1,-1),nx=e=>{var t,n;return[`from adapters import AutoAdapterModel model = AutoAdapterModel.from_pretrained("${(n=(t=e.config)==null?void 0:t.adapter_transformers)==null?void 0:n.model_name}") model.load_adapter("${e.id}", set_active=True)`]},ix=e=>[`import allennlp_models from allennlp.predictors.predictor import Predictor predictor = Predictor.from_path("hf://${e.id}")`],rx=e=>[`import allennlp_models from allennlp.predictors.predictor import Predictor predictor = Predictor.from_path("hf://${e.id}") predictor_input = {"passage": "My name is Wolfgang and I live in Berlin", "question": "Where do I live?"} predictions = predictor.predict_json(predictor_input)`],ax=e=>e.tags.includes("question-answering")?rx(e):ix(e),ox=e=>[`from asteroid.models import BaseModel model = BaseModel.from_pretrained("${e.id}")`],sx=e=>{const t=`# Watermark Generator from audioseal import AudioSeal model = AudioSeal.load_generator("${e.id}") # pass a tensor (tensor_wav) of shape (batch, channels, samples) and a sample rate wav, sr = tensor_wav, 16000 watermark = model.get_watermark(wav, sr) watermarked_audio = wav + watermark`,n=`# Watermark Detector from audioseal import AudioSeal detector = AudioSeal.load_detector("${e.id}") result, message = detector.detect_watermark(watermarked_audio, sr)`;return[t,n]};function lu(e){var t,n;return((n=(t=e.cardData)==null?void 0:t.base_model)==null?void 0:n.toString())??"fill-in-base-model"}function Bp(e){var n,i,r;const t=((i=(n=e.widgetData)==null?void 0:n[0])==null?void 0:i.text)??((r=e.cardData)==null?void 0:r.instance_prompt);if(t)return tx(t)}var lx=e=>[`from bertopic import BERTopic model = BERTopic.load("${e.id}")`],ux=e=>[`from bm25s.hf import BM25HF retriever = BM25HF.load_from_hub("${e.id}")`],cx=e=>{let t,n,i;return t="",n="",i="",e.id==="depth-anything/Depth-Anything-V2-Small"?(t="vits",n="64",i="[48, 96, 192, 384]"):e.id==="depth-anything/Depth-Anything-V2-Base"?(t="vitb",n="128",i="[96, 192, 384, 768]"):e.id==="depth-anything/Depth-Anything-V2-Large"&&(t="vitl",n="256",i="[256, 512, 1024, 1024"),[` # Install from https://github.com/DepthAnything/Depth-Anything-V2 # Load the model and infer depth from an image import cv2 import torch from depth_anything_v2.dpt import DepthAnythingV2 # instantiate the model model = DepthAnythingV2(encoder="${t}", features=${n}, out_channels=${i}) # load the weights filepath = hf_hub_download(repo_id="${e.id}", filename="depth_anything_v2_${t}.pth", repo_type="model") state_dict = torch.load(filepath, map_location="cpu") model.load_state_dict(state_dict).eval() raw_img = cv2.imread("your/image/path") depth = model.infer_image(raw_img) # HxW raw depth map in numpy `]},dx=e=>[`# Download checkpoint pip install huggingface-hub huggingface-cli download --local-dir checkpoints ${e.id}`,`import depth_pro # Load model and preprocessing transform model, transform = depth_pro.create_model_and_transforms() model.eval() # Load and preprocess an image. image, _, f_px = depth_pro.load_rgb("example.png") image = transform(image) # Run inference. prediction = model.infer(image, f_px=f_px) # Results: 1. Depth in meters depth = prediction["depth"] # Results: 2. Focal length in pixels focallength_px = prediction["focallength_px"]`],Vp="Astronaut in a jungle, cold color palette, muted colors, detailed, 8k",fx=e=>[`from diffusers import DiffusionPipeline pipe = DiffusionPipeline.from_pretrained("${e.id}") prompt = "${Bp(e)??Vp}" image = pipe(prompt).images[0]`],px=e=>[`from diffusers import ControlNetModel, StableDiffusionControlNetPipeline controlnet = ControlNetModel.from_pretrained("${e.id}") pipe = StableDiffusionControlNetPipeline.from_pretrained( "${lu(e)}", controlnet=controlnet )`],mx=e=>[`from diffusers import DiffusionPipeline pipe = DiffusionPipeline.from_pretrained("${lu(e)}") pipe.load_lora_weights("${e.id}") prompt = "${Bp(e)??Vp}" image = pipe(prompt).images[0]`],hx=e=>[`from diffusers import DiffusionPipeline pipe = DiffusionPipeline.from_pretrained("${lu(e)}") pipe.load_textual_inversion("${e.id}")`],gx=e=>e.tags.includes("controlnet")?px(e):e.tags.includes("lora")?mx(e):e.tags.includes("textual_inversion")?hx(e):fx(e),yx=e=>{const t=`# Pipeline for Stable Diffusion 3 from diffusionkit.mlx import DiffusionPipeline pipeline = DiffusionPipeline( shift=3.0, use_t5=False, model_version=${e.id}, low_memory_mode=True, a16=True, w16=True, )`,n=`# Pipeline for Flux from diffusionkit.mlx import FluxPipeline pipeline = FluxPipeline( shift=1.0, model_version=${e.id}, low_memory_mode=True, a16=True, w16=True, )`,i=`# Image Generation HEIGHT = 512 WIDTH = 512 NUM_STEPS = ${e.tags.includes("flux")?4:50} CFG_WEIGHT = ${e.tags.includes("flux")?0:5} image, _ = pipeline.generate_image( "a photo of a cat", cfg_weight=CFG_WEIGHT, num_steps=NUM_STEPS, latent_size=(HEIGHT // 8, WIDTH // 8), )`;return[e.tags.includes("flux")?n:t,i]},vx=e=>[`# pip install --no-binary :all: cartesia-pytorch from cartesia_pytorch import ReneLMHeadModel from transformers import AutoTokenizer model = ReneLMHeadModel.from_pretrained("${e.id}") tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-1B-hf") in_message = ["Rene Descartes was"] inputs = tokenizer(in_message, return_tensors="pt") outputs = model.generate(inputs.input_ids, max_length=50, top_k=100, top_p=0.99) out_message = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0] print(out_message) )`],wx=e=>[`import mlx.core as mx import cartesia_mlx as cmx model = cmx.from_pretrained("${e.id}") model.set_dtype(mx.float32) prompt = "Rene Descartes was" for text in model.generate( prompt, max_tokens=500, eval_every_n=5, verbose=True, top_p=0.99, temperature=0.85, ): print(text, end="", flush=True) `],xx=e=>{const t=fn(e.id).replaceAll("-","_");return[`# Load it from the Hub directly import edsnlp nlp = edsnlp.load("${e.id}") `,`# Or install it as a package !pip install git+https://huggingface.co/${e.id} # and import it as a module import ${t} nlp = ${t}.load() # or edsnlp.load("${t}") `]},bx=e=>[`from espnet2.bin.tts_inference import Text2Speech model = Text2Speech.from_pretrained("${e.id}") speech, *_ = model("text to generate speech from")`],kx=e=>[`from espnet2.bin.asr_inference import Speech2Text model = Speech2Text.from_pretrained( "${e.id}" ) speech, rate = soundfile.read("speech.wav") text, *_ = model(speech)[0]`],_x=()=>["unknown model type (must be text-to-speech or automatic-speech-recognition)"],Sx=e=>e.tags.includes("text-to-speech")?bx(e):e.tags.includes("automatic-speech-recognition")?kx(e):_x(),Ex=e=>[`from fairseq.checkpoint_utils import load_model_ensemble_and_task_from_hf_hub models, cfg, task = load_model_ensemble_and_task_from_hf_hub( "${e.id}" )`],Tx=e=>[`from flair.models import SequenceTagger tagger = SequenceTagger.load("${e.id}")`],Cx=e=>[`from gliner import GLiNER model = GLiNER.from_pretrained("${e.id}")`],Ax=e=>[`# CLI usage # see docs: https://ai-riksarkivet.github.io/htrflow/latest/getting_started/quick_start.html htrflow pipeline `,`# Python usage from htrflow.pipeline.pipeline import Pipeline from htrflow.pipeline.steps import Task from htrflow.models.framework.model import ModelClass pipeline = Pipeline( [ Task( ModelClass, {"model": "${e.id}"}, {} ), ])`],Ix=e=>[`# Available backend options are: "jax", "torch", "tensorflow". import os os.environ["KERAS_BACKEND"] = "jax" import keras model = keras.saving.load_model("hf://${e.id}") `],jx=e=>[`# Available backend options are: "jax", "torch", "tensorflow". import os os.environ["KERAS_BACKEND"] = "jax" import keras_nlp tokenizer = keras_nlp.models.Tokenizer.from_preset("hf://${e.id}") backbone = keras_nlp.models.Backbone.from_preset("hf://${e.id}") `],Lx=e=>[`# Available backend options are: "jax", "torch", "tensorflow". import os os.environ["KERAS_BACKEND"] = "jax" import keras_hub # Load a task-specific model (*replace CausalLM with your task*) model = keras_hub.models.CausalLM.from_preset("hf://${e.id}", dtype="bfloat16") # Possible tasks are CausalLM, TextToImage, ImageClassifier, ... # full list here: https://keras.io/api/keras_hub/models/#api-documentation `],Px=e=>[`from llama_cpp import Llama llm = Llama.from_pretrained( repo_id="${e.id}", filename="{{GGUF_FILE}}", ) llm.create_chat_completion( messages = [ { "role": "user", "content": "What is the capital of France?" } ] )`],Rx=e=>[`# Note: 'keras<3.x' or 'tf_keras' must be installed (legacy) # See https://github.com/keras-team/tf-keras for more details. from huggingface_hub import from_pretrained_keras model = from_pretrained_keras("${e.id}") `],Dx=e=>[`from mamba_ssm import MambaLMHeadModel model = MambaLMHeadModel.from_pretrained("${e.id}")`],Nx=e=>[`# Install from https://github.com/Camb-ai/MARS5-TTS from inference import Mars5TTS mars5 = Mars5TTS.from_pretrained("${e.id}")`],Mx=()=>[`# Install from https://github.com/buaacyw/MeshAnything.git from MeshAnything.models.meshanything import MeshAnything # refer to https://github.com/buaacyw/MeshAnything/blob/main/main.py#L91 on how to define args # and https://github.com/buaacyw/MeshAnything/blob/main/app.py regarding usage model = MeshAnything(args)`],Ox=e=>[`import open_clip model, preprocess_train, preprocess_val = open_clip.create_model_and_transforms('hf-hub:${e.id}') tokenizer = open_clip.get_tokenizer('hf-hub:${e.id}')`],Fx=e=>{var t,n;if((n=(t=e.config)==null?void 0:t.architectures)!=null&&n[0]){const i=e.config.architectures[0];return[[`from paddlenlp.transformers import AutoTokenizer, ${i}`,"",`tokenizer = AutoTokenizer.from_pretrained("${e.id}", from_hf_hub=True)`,`model = ${i}.from_pretrained("${e.id}", from_hf_hub=True)`].join(` `)]}else return[["# ⚠️ Type of model unknown","from paddlenlp.transformers import AutoTokenizer, AutoModel","",`tokenizer = AutoTokenizer.from_pretrained("${e.id}", from_hf_hub=True)`,`model = AutoModel.from_pretrained("${e.id}", from_hf_hub=True)`].join(` `)]},Ux=e=>[`from pyannote.audio import Pipeline pipeline = Pipeline.from_pretrained("${e.id}") # inference on the whole file pipeline("file.wav") # inference on an excerpt from pyannote.core import Segment excerpt = Segment(start=2.0, end=5.0) from pyannote.audio import Audio waveform, sample_rate = Audio().crop("file.wav", excerpt) pipeline({"waveform": waveform, "sample_rate": sample_rate})`],zx=e=>[`from pyannote.audio import Model, Inference model = Model.from_pretrained("${e.id}") inference = Inference(model) # inference on the whole file inference("file.wav") # inference on an excerpt from pyannote.core import Segment excerpt = Segment(start=2.0, end=5.0) inference.crop("file.wav", excerpt)`],$x=e=>e.tags.includes("pyannote-audio-pipeline")?Ux(e):zx(e),Bx=e=>[`from relik import Relik relik = Relik.from_pretrained("${e.id}")`],Vx=e=>[`from tensorflow_tts.inference import AutoProcessor, TFAutoModel processor = AutoProcessor.from_pretrained("${e.id}") model = TFAutoModel.from_pretrained("${e.id}") `],Hx=e=>[`from tensorflow_tts.inference import TFAutoModel model = TFAutoModel.from_pretrained("${e.id}") audios = model.inference(mels) `],qx=e=>[`from tensorflow_tts.inference import TFAutoModel model = TFAutoModel.from_pretrained("${e.id}") `],Wx=e=>e.tags.includes("text-to-mel")?Vx(e):e.tags.includes("mel-to-wav")?Hx(e):qx(e),Kx=e=>[`import timm model = timm.create_model("hf_hub:${e.id}", pretrained=True)`],Qx=()=>[`# pip install sae-lens from sae_lens import SAE sae, cfg_dict, sparsity = SAE.from_pretrained( release = "RELEASE_ID", # e.g., "gpt2-small-res-jb". See other options in https://github.com/jbloomAus/SAELens/blob/main/sae_lens/pretrained_saes.yaml sae_id = "SAE_ID", # e.g., "blocks.8.hook_resid_pre". Won't always be a hook point )`],Xx=()=>[`# seed_story_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/agent_7b_sft.yaml' # llm_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/llama2chat7b_lora.yaml' from omegaconf import OmegaConf import hydra # load Llama2 llm_cfg = OmegaConf.load(llm_cfg_path) llm = hydra.utils.instantiate(llm_cfg, torch_dtype="fp16") # initialize seed_story seed_story_cfg = OmegaConf.load(seed_story_cfg_path) seed_story = hydra.utils.instantiate(seed_story_cfg, llm=llm) `],Yx=(e,t)=>[`import joblib from skops.hub_utils import download download("${e.id}", "path_to_folder") model = joblib.load( "${t}" ) # only load pickle files from sources you trust # read more about it here https://skops.readthedocs.io/en/stable/persistence.html`],Jx=(e,t)=>[`from skops.hub_utils import download from skops.io import load download("${e.id}", "path_to_folder") # make sure model file is in skops format # if model is a pickle file, make sure it's from a source you trust model = load("path_to_folder/${t}")`],Zx=e=>[`from huggingface_hub import hf_hub_download import joblib model = joblib.load( hf_hub_download("${e.id}", "sklearn_model.joblib") ) # only load pickle files from sources you trust # read more about it here https://skops.readthedocs.io/en/stable/persistence.html`],Gx=e=>{var t,n,i,r,a;if(e.tags.includes("skops")){const o=(i=(n=(t=e.config)==null?void 0:t.sklearn)==null?void 0:n.model)==null?void 0:i.file,s=(a=(r=e.config)==null?void 0:r.sklearn)==null?void 0:a.model_format;return o?s==="pickle"?Yx(e,o):Jx(e,o):["# ⚠️ Model filename not specified in config.json"]}else return Zx(e)},eb=e=>[`import torch import torchaudio from einops import rearrange from stable_audio_tools import get_pretrained_model from stable_audio_tools.inference.generation import generate_diffusion_cond device = "cuda" if torch.cuda.is_available() else "cpu" # Download model model, model_config = get_pretrained_model("${e.id}") sample_rate = model_config["sample_rate"] sample_size = model_config["sample_size"] model = model.to(device) # Set up text and timing conditioning conditioning = [{ "prompt": "128 BPM tech house drum loop", }] # Generate stereo audio output = generate_diffusion_cond( model, conditioning=conditioning, sample_size=sample_size, device=device ) # Rearrange audio batch to a single sequence output = rearrange(output, "b d n -> d (b n)") # Peak normalize, clip, convert to int16, and save to file output = output.to(torch.float32).div(torch.max(torch.abs(output))).clamp(-1, 1).mul(32767).to(torch.int16).cpu() torchaudio.save("output.wav", output, sample_rate)`],tb=e=>[`from huggingface_hub import from_pretrained_fastai learn = from_pretrained_fastai("${e.id}")`],nb=e=>{const t=`# Use SAM2 with images import torch from sam2.sam2_image_predictor import SAM2ImagePredictor predictor = SAM2ImagePredictor.from_pretrained(${e.id}) with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16): predictor.set_image() masks, _, _ = predictor.predict()`,n=`# Use SAM2 with videos import torch from sam2.sam2_video_predictor import SAM2VideoPredictor predictor = SAM2VideoPredictor.from_pretrained(${e.id}) with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16): state = predictor.init_state() # add new prompts and instantly get the output on the same frame frame_idx, object_ids, masks = predictor.add_new_points(state, ): # propagate the prompts to get masklets throughout the video for frame_idx, object_ids, masks in predictor.propagate_in_video(state): ...`;return[t,n]},ib=e=>[`python -m sample_factory.huggingface.load_from_hub -r ${e.id} -d ./train_dir`];function rb(e){var n;const t=(n=e.widgetData)==null?void 0:n[0];if(t)return[t.source_sentence,...t.sentences]}var ab=e=>{const t=e.tags.includes($p)?", trust_remote_code=True":"",n=rb(e)??["The weather is lovely today.","It's so sunny outside!","He drove to the stadium."];return[`from sentence_transformers import SentenceTransformer model = SentenceTransformer("${e.id}"${t}) sentences = ${JSON.stringify(n,null,4)} embeddings = model.encode(sentences) similarities = model.similarity(embeddings, embeddings) print(similarities.shape) # [${n.length}, ${n.length}]`]},ob=e=>[`from setfit import SetFitModel model = SetFitModel.from_pretrained("${e.id}")`],sb=e=>[`!pip install https://huggingface.co/${e.id}/resolve/main/${fn(e.id)}-any-py3-none-any.whl # Using spacy.load(). import spacy nlp = spacy.load("${fn(e.id)}") # Importing as module. import ${fn(e.id)} nlp = ${fn(e.id)}.load()`],lb=e=>[`from span_marker import SpanMarkerModel model = SpanMarkerModel.from_pretrained("${e.id}")`],ub=e=>[`import stanza stanza.download("${fn(e.id).replace("stanza-","")}") nlp = stanza.Pipeline("${fn(e.id).replace("stanza-","")}")`],cb=e=>{switch(e){case"EncoderClassifier":return"classify_file";case"EncoderDecoderASR":case"EncoderASR":return"transcribe_file";case"SpectralMaskEnhancement":return"enhance_file";case"SepformerSeparation":return"separate_file";default:return}},db=e=>{var i,r;const t=(r=(i=e.config)==null?void 0:i.speechbrain)==null?void 0:r.speechbrain_interface;if(t===void 0)return["# interface not specified in config.json"];const n=cb(t);return n===void 0?["# interface in config.json invalid"]:[`from speechbrain.pretrained import ${t} model = ${t}.from_hparams( "${e.id}" ) model.${n}("file.wav")`]},fb=e=>{var r,a,o,s,l;const t=e.transformersInfo;if(!t)return["# ⚠️ Type of model unknown"];const n=e.tags.includes($p)?", trust_remote_code=True":"";let i;if(t.processor){const u=t.processor==="AutoTokenizer"?"tokenizer":t.processor==="AutoFeatureExtractor"?"extractor":"processor";i=["# Load model directly",`from transformers import ${t.processor}, ${t.auto_model}`,"",`${u} = ${t.processor}.from_pretrained("${e.id}"`+n+")",`model = ${t.auto_model}.from_pretrained("${e.id}"`+n+")"].join(` `)}else i=["# Load model directly",`from transformers import ${t.auto_model}`,`model = ${t.auto_model}.from_pretrained("${e.id}"`+n+")"].join(` `);if(e.pipeline_tag&&((r=C0.transformers)!=null&&r.includes(e.pipeline_tag))){const u=["# Use a pipeline as a high-level helper","from transformers import pipeline",""];return e.tags.includes("conversational")&&((o=(a=e.config)==null?void 0:a.tokenizer_config)!=null&&o.chat_template)&&u.push("messages = [",' {"role": "user", "content": "Who are you?"},',"]"),u.push(`pipe = pipeline("${e.pipeline_tag}", model="${e.id}"`+n+")"),e.tags.includes("conversational")&&((l=(s=e.config)==null?void 0:s.tokenizer_config)!=null&&l.chat_template)&&u.push("pipe(messages)"),[u.join(` `),i]}return[i]},pb=e=>{if(!e.pipeline_tag)return["// ⚠️ Unknown pipeline tag"];const t="@huggingface/transformers";return[`// npm i ${t} import { pipeline } from '${t}'; // Allocate pipeline const pipe = await pipeline('${e.pipeline_tag}', '${e.id}');`]},mb=e=>{switch(e){case"CAUSAL_LM":return"CausalLM";case"SEQ_2_SEQ_LM":return"Seq2SeqLM";case"TOKEN_CLS":return"TokenClassification";case"SEQ_CLS":return"SequenceClassification";default:return}},hb=e=>{var r;const{base_model_name_or_path:t,task_type:n}=((r=e.config)==null?void 0:r.peft)??{},i=mb(n);return i?t?[`from peft import PeftModel, PeftConfig from transformers import AutoModelFor${i} config = PeftConfig.from_pretrained("${e.id}") base_model = AutoModelFor${i}.from_pretrained("${t}") model = PeftModel.from_pretrained(base_model, "${e.id}")`]:["Base model is not found."]:["Task type is invalid."]},gb=e=>[`from huggingface_hub import hf_hub_download import fasttext model = fasttext.load_model(hf_hub_download("${e.id}", "model.bin"))`],yb=e=>[`from huggingface_sb3 import load_from_hub checkpoint = load_from_hub( repo_id="${e.id}", filename="{MODEL FILENAME}.zip", )`],vb=(e,t)=>{switch(e){case"ASR":return[`import nemo.collections.asr as nemo_asr asr_model = nemo_asr.models.ASRModel.from_pretrained("${t.id}") transcriptions = asr_model.transcribe(["file.wav"])`];default:return}},wb=e=>[`mlagents-load-from-hf --repo-id="${e.id}" --local-dir="./download: string[]s"`],xb=()=>[`string modelName = "[Your model name here].sentis"; Model model = ModelLoader.Load(Application.streamingAssetsPath + "/" + modelName); IWorker engine = WorkerFactory.CreateWorker(BackendType.GPUCompute, model); // Please see provided C# file for more details `],bb=e=>[`from Trainer_finetune import Model model = Model.from_pretrained("${e.id}")`],kb=e=>[`from voicecraft import VoiceCraft model = VoiceCraft.from_pretrained("${e.id}")`],_b=()=>[`import ChatTTS import torchaudio chat = ChatTTS.Chat() chat.load_models(compile=False) # Set to True for better performance texts = ["PUT YOUR TEXT HERE",] wavs = chat.infer(texts, ) torchaudio.save("output1.wav", torch.from_numpy(wavs[0]), 24000)`],Sb=e=>[`from ultralytics import YOLOv10 model = YOLOv10.from_pretrained("${e.id}") source = 'http://images.cocodataset.org/val2017/000000039769.jpg' model.predict(source=source, save=True) `],Eb=e=>[`# Option 1: use with transformers from transformers import AutoModelForImageSegmentation birefnet = AutoModelForImageSegmentation.from_pretrained("${e.id}", trust_remote_code=True) `,`# Option 2: use with BiRefNet # Install from https://github.com/ZhengPeng7/BiRefNet from models.birefnet import BiRefNet model = BiRefNet.from_pretrained("${e.id}")`],Tb=e=>[`pip install huggingface_hub hf_transfer export HF_HUB_ENABLE_HF_TRANS: string[]FER=1 huggingface-cli download --local-dir ${fn(e.id)} ${e.id}`],Cb=e=>[`from mlxim.model import create_model model = create_model(${e.id})`],Ab=e=>[`from model2vec import StaticModel model = StaticModel.from_pretrained("${e.id}")`],Ib=e=>{let t;return e.tags.includes("automatic-speech-recognition")&&(t=vb("ASR",e)),t??["# tag did not correspond to a valid NeMo domain."]},jb=e=>[`from pxia import AutoModel model = AutoModel.from_pretrained("${e.id}")`],Lb=e=>[`from pythae.models import AutoModel model = AutoModel.load_from_hf_hub("${e.id}")`],Pb=e=>[`from audiocraft.models import MusicGen model = MusicGen.get_pretrained("${e.id}") descriptions = ['happy rock', 'energetic EDM', 'sad jazz'] wav = model.generate(descriptions) # generates 3 samples.`],Rb=e=>[`from audiocraft.models import MAGNeT model = MAGNeT.get_pretrained("${e.id}") descriptions = ['disco beat', 'energetic EDM', 'funky groove'] wav = model.generate(descriptions) # generates 3 samples.`],Db=e=>[`from audiocraft.models import AudioGen model = AudioGen.get_pretrained("${e.id}") model.set_generation_params(duration=5) # generate 5 seconds. descriptions = ['dog barking', 'sirene of an emergency vehicle', 'footsteps in a corridor'] wav = model.generate(descriptions) # generates 3 samples.`],Nb=e=>e.tags.includes("musicgen")?Pb(e):e.tags.includes("audiogen")?Db(e):e.tags.includes("magnet")?Rb(e):["# Type of model unknown."],Mb=()=>[`# Install CLI with Homebrew on macOS device brew install whisperkit-cli # View all available inference options whisperkit-cli transcribe --help # Download and run inference using whisper base model whisperkit-cli transcribe --audio-path /path/to/audio.mp3 # Or use your preferred model variant whisperkit-cli transcribe --model "large-v3" --model-prefix "distil" --audio-path /path/to/audio.mp3 --verbose`],Ob=e=>[`from threedtopia_xl.models import threedtopia_xl model = threedtopia_xl.from_pretrained("${e.id}") model.generate(cond="path/to/image.png")`],Fb={"adapter-transformers":{prettyLabel:"Adapters",repoName:"adapters",repoUrl:"https://github.com/Adapter-Hub/adapters",docsUrl:"https://huggingface.co/docs/hub/adapters",snippets:nx,filter:!0,countDownloads:'path:"adapter_config.json"'},allennlp:{prettyLabel:"AllenNLP",repoName:"AllenNLP",repoUrl:"https://github.com/allenai/allennlp",docsUrl:"https://huggingface.co/docs/hub/allennlp",snippets:ax,filter:!0},asteroid:{prettyLabel:"Asteroid",repoName:"Asteroid",repoUrl:"https://github.com/asteroid-team/asteroid",docsUrl:"https://huggingface.co/docs/hub/asteroid",snippets:ox,filter:!0,countDownloads:'path:"pytorch_model.bin"'},audiocraft:{prettyLabel:"Audiocraft",repoName:"audiocraft",repoUrl:"https://github.com/facebookresearch/audiocraft",snippets:Nb,filter:!1,countDownloads:'path:"state_dict.bin"'},audioseal:{prettyLabel:"AudioSeal",repoName:"audioseal",repoUrl:"https://github.com/facebookresearch/audioseal",filter:!1,countDownloads:'path_extension:"pth"',snippets:sx},bertopic:{prettyLabel:"BERTopic",repoName:"BERTopic",repoUrl:"https://github.com/MaartenGr/BERTopic",snippets:lx,filter:!0},big_vision:{prettyLabel:"Big Vision",repoName:"big_vision",repoUrl:"https://github.com/google-research/big_vision",filter:!1,countDownloads:'path_extension:"npz"'},birefnet:{prettyLabel:"BiRefNet",repoName:"BiRefNet",repoUrl:"https://github.com/ZhengPeng7/BiRefNet",snippets:Eb,filter:!1},bm25s:{prettyLabel:"BM25S",repoName:"bm25s",repoUrl:"https://github.com/xhluca/bm25s",snippets:ux,filter:!1,countDownloads:'path:"params.index.json"'},champ:{prettyLabel:"Champ",repoName:"Champ",repoUrl:"https://github.com/fudan-generative-vision/champ",countDownloads:'path:"champ/motion_module.pth"'},chat_tts:{prettyLabel:"ChatTTS",repoName:"ChatTTS",repoUrl:"https://github.com/2noise/ChatTTS.git",snippets:_b,filter:!1,countDownloads:'path:"asset/GPT.pt"'},colpali:{prettyLabel:"ColPali",repoName:"ColPali",repoUrl:"https://github.com/ManuelFay/colpali",filter:!1,countDownloads:'path:"adapter_config.json"'},deepforest:{prettyLabel:"DeepForest",repoName:"deepforest",docsUrl:"https://deepforest.readthedocs.io/en/latest/",repoUrl:"https://github.com/weecology/DeepForest",countDownloads:'path_extension:"pt" OR path_extension:"pl"'},"depth-anything-v2":{prettyLabel:"DepthAnythingV2",repoName:"Depth Anything V2",repoUrl:"https://github.com/DepthAnything/Depth-Anything-V2",snippets:cx,filter:!1,countDownloads:'path_extension:"pth"'},"depth-pro":{prettyLabel:"Depth Pro",repoName:"Depth Pro",repoUrl:"https://github.com/apple/ml-depth-pro",countDownloads:'path_extension:"pt"',snippets:dx,filter:!1},diffree:{prettyLabel:"Diffree",repoName:"Diffree",repoUrl:"https://github.com/OpenGVLab/Diffree",filter:!1,countDownloads:'path:"diffree-step=000010999.ckpt"'},diffusers:{prettyLabel:"Diffusers",repoName:"🤗/diffusers",repoUrl:"https://github.com/huggingface/diffusers",docsUrl:"https://huggingface.co/docs/hub/diffusers",snippets:gx,filter:!0},diffusionkit:{prettyLabel:"DiffusionKit",repoName:"DiffusionKit",repoUrl:"https://github.com/argmaxinc/DiffusionKit",snippets:yx},doctr:{prettyLabel:"docTR",repoName:"doctr",repoUrl:"https://github.com/mindee/doctr"},cartesia_pytorch:{prettyLabel:"Cartesia Pytorch",repoName:"Cartesia Pytorch",repoUrl:"https://github.com/cartesia-ai/cartesia_pytorch",snippets:vx},cartesia_mlx:{prettyLabel:"Cartesia MLX",repoName:"Cartesia MLX",repoUrl:"https://github.com/cartesia-ai/cartesia_mlx",snippets:wx},cotracker:{prettyLabel:"CoTracker",repoName:"CoTracker",repoUrl:"https://github.com/facebookresearch/co-tracker",filter:!1,countDownloads:'path_extension:"pth"'},edsnlp:{prettyLabel:"EDS-NLP",repoName:"edsnlp",repoUrl:"https://github.com/aphp/edsnlp",docsUrl:"https://aphp.github.io/edsnlp/latest/",filter:!1,snippets:xx,countDownloads:'path_filename:"config" AND path_extension:"cfg"'},elm:{prettyLabel:"ELM",repoName:"elm",repoUrl:"https://github.com/slicex-ai/elm",filter:!1,countDownloads:'path_filename:"slicex_elm_config" AND path_extension:"json"'},espnet:{prettyLabel:"ESPnet",repoName:"ESPnet",repoUrl:"https://github.com/espnet/espnet",docsUrl:"https://huggingface.co/docs/hub/espnet",snippets:Sx,filter:!0},fairseq:{prettyLabel:"Fairseq",repoName:"fairseq",repoUrl:"https://github.com/pytorch/fairseq",snippets:Ex,filter:!0},fastai:{prettyLabel:"fastai",repoName:"fastai",repoUrl:"https://github.com/fastai/fastai",docsUrl:"https://huggingface.co/docs/hub/fastai",snippets:tb,filter:!0},fasttext:{prettyLabel:"fastText",repoName:"fastText",repoUrl:"https://fasttext.cc/",snippets:gb,filter:!0,countDownloads:'path_extension:"bin"'},flair:{prettyLabel:"Flair",repoName:"Flair",repoUrl:"https://github.com/flairNLP/flair",docsUrl:"https://huggingface.co/docs/hub/flair",snippets:Tx,filter:!0,countDownloads:'path:"pytorch_model.bin"'},"gemma.cpp":{prettyLabel:"gemma.cpp",repoName:"gemma.cpp",repoUrl:"https://github.com/google/gemma.cpp",filter:!1,countDownloads:'path_extension:"sbs"'},gliner:{prettyLabel:"GLiNER",repoName:"GLiNER",repoUrl:"https://github.com/urchade/GLiNER",snippets:Cx,filter:!1,countDownloads:'path:"gliner_config.json"'},"glyph-byt5":{prettyLabel:"Glyph-ByT5",repoName:"Glyph-ByT5",repoUrl:"https://github.com/AIGText/Glyph-ByT5",filter:!1,countDownloads:'path:"checkpoints/byt5_model.pt"'},grok:{prettyLabel:"Grok",repoName:"Grok",repoUrl:"https://github.com/xai-org/grok-1",filter:!1,countDownloads:'path:"ckpt/tensor00000_000" OR path:"ckpt-0/tensor00000_000"'},hallo:{prettyLabel:"Hallo",repoName:"Hallo",repoUrl:"https://github.com/fudan-generative-vision/hallo",countDownloads:'path:"hallo/net.pth"'},hezar:{prettyLabel:"Hezar",repoName:"Hezar",repoUrl:"https://github.com/hezarai/hezar",docsUrl:"https://hezarai.github.io/hezar",countDownloads:'path:"model_config.yaml" OR path:"embedding/embedding_config.yaml"'},htrflow:{prettyLabel:"HTRflow",repoName:"HTRflow",repoUrl:"https://github.com/AI-Riksarkivet/htrflow",docsUrl:"https://ai-riksarkivet.github.io/htrflow",snippets:Ax},"hunyuan-dit":{prettyLabel:"HunyuanDiT",repoName:"HunyuanDiT",repoUrl:"https://github.com/Tencent/HunyuanDiT",countDownloads:'path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"'},imstoucan:{prettyLabel:"IMS Toucan",repoName:"IMS-Toucan",repoUrl:"https://github.com/DigitalPhonetics/IMS-Toucan",countDownloads:'path:"embedding_gan.pt" OR path:"Vocoder.pt" OR path:"ToucanTTS.pt"'},keras:{prettyLabel:"Keras",repoName:"Keras",repoUrl:"https://github.com/keras-team/keras",docsUrl:"https://huggingface.co/docs/hub/keras",snippets:Ix,filter:!0,countDownloads:'path:"config.json" OR path_extension:"keras"'},"tf-keras":{prettyLabel:"TF-Keras",repoName:"TF-Keras",repoUrl:"https://github.com/keras-team/tf-keras",docsUrl:"https://huggingface.co/docs/hub/tf-keras",snippets:Rx,countDownloads:'path:"saved_model.pb"'},"keras-nlp":{prettyLabel:"KerasNLP",repoName:"KerasNLP",repoUrl:"https://github.com/keras-team/keras-nlp",docsUrl:"https://keras.io/keras_nlp/",snippets:jx},"keras-hub":{prettyLabel:"KerasHub",repoName:"KerasHub",repoUrl:"https://github.com/keras-team/keras-hub",docsUrl:"https://keras.io/keras_hub/",snippets:Lx,filter:!0},k2:{prettyLabel:"K2",repoName:"k2",repoUrl:"https://github.com/k2-fsa/k2"},liveportrait:{prettyLabel:"LivePortrait",repoName:"LivePortrait",repoUrl:"https://github.com/KwaiVGI/LivePortrait",filter:!1,countDownloads:'path:"liveportrait/landmark.onnx"'},"llama-cpp-python":{prettyLabel:"llama-cpp-python",repoName:"llama-cpp-python",repoUrl:"https://github.com/abetlen/llama-cpp-python",snippets:Px},"mini-omni2":{prettyLabel:"Mini-Omni2",repoName:"Mini-Omni2",repoUrl:"https://github.com/gpt-omni/mini-omni2",countDownloads:'path:"model_config.yaml"'},mindspore:{prettyLabel:"MindSpore",repoName:"mindspore",repoUrl:"https://github.com/mindspore-ai/mindspore"},"mamba-ssm":{prettyLabel:"MambaSSM",repoName:"MambaSSM",repoUrl:"https://github.com/state-spaces/mamba",filter:!1,snippets:Dx},"mars5-tts":{prettyLabel:"MARS5-TTS",repoName:"MARS5-TTS",repoUrl:"https://github.com/Camb-ai/MARS5-TTS",filter:!1,countDownloads:'path:"mars5_ar.safetensors"',snippets:Nx},"mesh-anything":{prettyLabel:"MeshAnything",repoName:"MeshAnything",repoUrl:"https://github.com/buaacyw/MeshAnything",filter:!1,countDownloads:'path:"MeshAnything_350m.pth"',snippets:Mx},"ml-agents":{prettyLabel:"ml-agents",repoName:"ml-agents",repoUrl:"https://github.com/Unity-Technologies/ml-agents",docsUrl:"https://huggingface.co/docs/hub/ml-agents",snippets:wb,filter:!0,countDownloads:'path_extension:"onnx"'},mlx:{prettyLabel:"MLX",repoName:"MLX",repoUrl:"https://github.com/ml-explore/mlx-examples/tree/main",snippets:Tb,filter:!0},"mlx-image":{prettyLabel:"mlx-image",repoName:"mlx-image",repoUrl:"https://github.com/riccardomusmeci/mlx-image",docsUrl:"https://huggingface.co/docs/hub/mlx-image",snippets:Cb,filter:!1,countDownloads:'path:"model.safetensors"'},"mlc-llm":{prettyLabel:"MLC-LLM",repoName:"MLC-LLM",repoUrl:"https://github.com/mlc-ai/mlc-llm",docsUrl:"https://llm.mlc.ai/docs/",filter:!1,countDownloads:'path:"mlc-chat-config.json"'},model2vec:{prettyLabel:"Model2Vec",repoName:"model2vec",repoUrl:"https://github.com/MinishLab/model2vec",snippets:Ab,filter:!1},moshi:{prettyLabel:"Moshi",repoName:"Moshi",repoUrl:"https://github.com/kyutai-labs/moshi",filter:!1,countDownloads:'path:"tokenizer-e351c8d8-checkpoint125.safetensors"'},nemo:{prettyLabel:"NeMo",repoName:"NeMo",repoUrl:"https://github.com/NVIDIA/NeMo",snippets:Ib,filter:!0,countDownloads:'path_extension:"nemo" OR path:"model_config.yaml"'},"open-oasis":{prettyLabel:"open-oasis",repoName:"open-oasis",repoUrl:"https://github.com/etched-ai/open-oasis",countDownloads:'path:"oasis500m.pt"'},open_clip:{prettyLabel:"OpenCLIP",repoName:"OpenCLIP",repoUrl:"https://github.com/mlfoundations/open_clip",snippets:Ox,filter:!0,countDownloads:'path_extension:"bin" AND path_filename:*pytorch_model'},paddlenlp:{prettyLabel:"paddlenlp",repoName:"PaddleNLP",repoUrl:"https://github.com/PaddlePaddle/PaddleNLP",docsUrl:"https://huggingface.co/docs/hub/paddlenlp",snippets:Fx,filter:!0,countDownloads:'path:"model_config.json"'},peft:{prettyLabel:"PEFT",repoName:"PEFT",repoUrl:"https://github.com/huggingface/peft",snippets:hb,filter:!0,countDownloads:'path:"adapter_config.json"'},pxia:{prettyLabel:"pxia",repoName:"pxia",repoUrl:"https://github.com/not-lain/pxia",snippets:jb,filter:!1},"pyannote-audio":{prettyLabel:"pyannote.audio",repoName:"pyannote-audio",repoUrl:"https://github.com/pyannote/pyannote-audio",snippets:$x,filter:!0},"py-feat":{prettyLabel:"Py-Feat",repoName:"Py-Feat",repoUrl:"https://github.com/cosanlab/py-feat",docsUrl:"https://py-feat.org/",filter:!1},pythae:{prettyLabel:"pythae",repoName:"pythae",repoUrl:"https://github.com/clementchadebec/benchmark_VAE",snippets:Lb,filter:!1},recurrentgemma:{prettyLabel:"RecurrentGemma",repoName:"recurrentgemma",repoUrl:"https://github.com/google-deepmind/recurrentgemma",filter:!1,countDownloads:'path:"tokenizer.model"'},relik:{prettyLabel:"Relik",repoName:"Relik",repoUrl:"https://github.com/SapienzaNLP/relik",snippets:Bx,filter:!1},refiners:{prettyLabel:"Refiners",repoName:"Refiners",repoUrl:"https://github.com/finegrain-ai/refiners",docsUrl:"https://refine.rs/",filter:!1,countDownloads:'path:"model.safetensors"'},reverb:{prettyLabel:"Reverb",repoName:"Reverb",repoUrl:"https://github.com/revdotcom/reverb",filter:!1},saelens:{prettyLabel:"SAELens",repoName:"SAELens",repoUrl:"https://github.com/jbloomAus/SAELens",snippets:Qx,filter:!1},sam2:{prettyLabel:"sam2",repoName:"sam2",repoUrl:"https://github.com/facebookresearch/segment-anything-2",filter:!1,snippets:nb,countDownloads:'path_extension:"pt"'},"sample-factory":{prettyLabel:"sample-factory",repoName:"sample-factory",repoUrl:"https://github.com/alex-petrenko/sample-factory",docsUrl:"https://huggingface.co/docs/hub/sample-factory",snippets:ib,filter:!0,countDownloads:'path:"cfg.json"'},sapiens:{prettyLabel:"sapiens",repoName:"sapiens",repoUrl:"https://github.com/facebookresearch/sapiens",filter:!1,countDownloads:'path_extension:"pt2" OR path_extension:"pth" OR path_extension:"onnx"'},"sentence-transformers":{prettyLabel:"sentence-transformers",repoName:"sentence-transformers",repoUrl:"https://github.com/UKPLab/sentence-transformers",docsUrl:"https://huggingface.co/docs/hub/sentence-transformers",snippets:ab,filter:!0},setfit:{prettyLabel:"setfit",repoName:"setfit",repoUrl:"https://github.com/huggingface/setfit",docsUrl:"https://huggingface.co/docs/hub/setfit",snippets:ob,filter:!0},sklearn:{prettyLabel:"Scikit-learn",repoName:"Scikit-learn",repoUrl:"https://github.com/scikit-learn/scikit-learn",snippets:Gx,filter:!0,countDownloads:'path:"sklearn_model.joblib"'},spacy:{prettyLabel:"spaCy",repoName:"spaCy",repoUrl:"https://github.com/explosion/spaCy",docsUrl:"https://huggingface.co/docs/hub/spacy",snippets:sb,filter:!0,countDownloads:'path_extension:"whl"'},"span-marker":{prettyLabel:"SpanMarker",repoName:"SpanMarkerNER",repoUrl:"https://github.com/tomaarsen/SpanMarkerNER",docsUrl:"https://huggingface.co/docs/hub/span_marker",snippets:lb,filter:!0},speechbrain:{prettyLabel:"speechbrain",repoName:"speechbrain",repoUrl:"https://github.com/speechbrain/speechbrain",docsUrl:"https://huggingface.co/docs/hub/speechbrain",snippets:db,filter:!0,countDownloads:'path:"hyperparams.yaml"'},"ssr-speech":{prettyLabel:"SSR-Speech",repoName:"SSR-Speech",repoUrl:"https://github.com/WangHelin1997/SSR-Speech",filter:!1,countDownloads:'path_extension:".pth"'},"stable-audio-tools":{prettyLabel:"Stable Audio Tools",repoName:"stable-audio-tools",repoUrl:"https://github.com/Stability-AI/stable-audio-tools.git",filter:!1,countDownloads:'path:"model.safetensors"',snippets:eb},"diffusion-single-file":{prettyLabel:"Diffusion Single File",repoName:"diffusion-single-file",repoUrl:"https://github.com/comfyanonymous/ComfyUI",filter:!1,countDownloads:'path_extension:"safetensors"'},"seed-story":{prettyLabel:"SEED-Story",repoName:"SEED-Story",repoUrl:"https://github.com/TencentARC/SEED-Story",filter:!1,countDownloads:'path:"cvlm_llama2_tokenizer/tokenizer.model"',snippets:Xx},soloaudio:{prettyLabel:"SoloAudio",repoName:"SoloAudio",repoUrl:"https://github.com/WangHelin1997/SoloAudio",filter:!1,countDownloads:'path:"soloaudio_v2.pt"'},"stable-baselines3":{prettyLabel:"stable-baselines3",repoName:"stable-baselines3",repoUrl:"https://github.com/huggingface/huggingface_sb3",docsUrl:"https://huggingface.co/docs/hub/stable-baselines3",snippets:yb,filter:!0,countDownloads:'path_extension:"zip"'},stanza:{prettyLabel:"Stanza",repoName:"stanza",repoUrl:"https://github.com/stanfordnlp/stanza",docsUrl:"https://huggingface.co/docs/hub/stanza",snippets:ub,filter:!0,countDownloads:'path:"models/default.zip"'},"f5-tts":{prettyLabel:"F5-TTS",repoName:"F5-TTS",repoUrl:"https://github.com/SWivid/F5-TTS",filter:!1,countDownloads:'path_extension:"safetensors" OR path_extension:"pt"'},genmo:{prettyLabel:"Genmo",repoName:"Genmo",repoUrl:"https://github.com/genmoai/models",filter:!1,countDownloads:'path:"vae_stats.json"'},tensorflowtts:{prettyLabel:"TensorFlowTTS",repoName:"TensorFlowTTS",repoUrl:"https://github.com/TensorSpeech/TensorFlowTTS",snippets:Wx},"tic-clip":{prettyLabel:"TiC-CLIP",repoName:"TiC-CLIP",repoUrl:"https://github.com/apple/ml-tic-clip",filter:!1,countDownloads:'path_extension:"pt" AND path_prefix:"checkpoints/"'},timesfm:{prettyLabel:"TimesFM",repoName:"timesfm",repoUrl:"https://github.com/google-research/timesfm",filter:!1,countDownloads:'path:"checkpoints/checkpoint_1100000/state/checkpoint"'},timm:{prettyLabel:"timm",repoName:"pytorch-image-models",repoUrl:"https://github.com/rwightman/pytorch-image-models",docsUrl:"https://huggingface.co/docs/hub/timm",snippets:Kx,filter:!0,countDownloads:'path:"pytorch_model.bin" OR path:"model.safetensors"'},transformers:{prettyLabel:"Transformers",repoName:"🤗/transformers",repoUrl:"https://github.com/huggingface/transformers",docsUrl:"https://huggingface.co/docs/hub/transformers",snippets:fb,filter:!0},"transformers.js":{prettyLabel:"Transformers.js",repoName:"transformers.js",repoUrl:"https://github.com/huggingface/transformers.js",docsUrl:"https://huggingface.co/docs/hub/transformers-js",snippets:pb,filter:!0},"unity-sentis":{prettyLabel:"unity-sentis",repoName:"unity-sentis",repoUrl:"https://github.com/Unity-Technologies/sentis-samples",snippets:xb,filter:!0,countDownloads:'path_extension:"sentis"'},"vfi-mamba":{prettyLabel:"VFIMamba",repoName:"VFIMamba",repoUrl:"https://github.com/MCG-NJU/VFIMamba",countDownloads:'path_extension:"pkl"',snippets:bb},voicecraft:{prettyLabel:"VoiceCraft",repoName:"VoiceCraft",repoUrl:"https://github.com/jasonppy/VoiceCraft",docsUrl:"https://github.com/jasonppy/VoiceCraft",snippets:kb},yolov10:{prettyLabel:"YOLOv10",repoName:"yolov10",repoUrl:"https://github.com/THU-MIG/yolov10",docsUrl:"https://github.com/THU-MIG/yolov10",snippets:Sb},whisperkit:{prettyLabel:"WhisperKit",repoName:"WhisperKit",repoUrl:"https://github.com/argmaxinc/WhisperKit",docsUrl:"https://github.com/argmaxinc/WhisperKit?tab=readme-ov-file#homebrew",snippets:Mb,countDownloads:'path_filename:"model" AND path_extension:"mil" AND _exists_:"path_prefix"'},"3dtopia-xl":{prettyLabel:"3DTopia-XL",repoName:"3DTopia-XL",repoUrl:"https://github.com/3DTopia/3DTopia-XL",filter:!1,countDownloads:'path:"model_vae_fp16.pt"',snippets:Ob}};Object.entries(Fb).filter(([e,t])=>t.filter).map(([e])=>e);var Ub={};Ur(Ub,{curl:()=>qp,inputs:()=>Hp,js:()=>Zp,python:()=>Kp});var Hp={};Ur(Hp,{getModelInputSnippet:()=>be});var zb=()=>'"Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!"',$b=()=>'"Меня зовут Вольфганг и я живу в Берлине"',Bb=()=>'"The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct."',Vb=()=>`{ "query": "How many stars does the transformers repository have?", "table": { "Repository": ["Transformers", "Datasets", "Tokenizers"], "Stars": ["36542", "4512", "3934"], "Contributors": ["651", "77", "34"], "Programming language": [ "Python", "Python", "Rust, Python and NodeJS" ] } }`,Hb=()=>`{ "image": "cat.png", "question": "What is in this image?" }`,qb=()=>`{ "question": "What is my name?", "context": "My name is Clara and I live in Berkeley." }`,Wb=()=>'"I like you. I love you"',Kb=()=>'"My name is Sarah Jessica Parker but you can call me Jessica"',ad=e=>e.tags.includes("conversational")?e.pipeline_tag==="text-generation"?[{role:"user",content:"What is the capital of France?"}]:[{role:"user",content:[{type:"text",text:"Describe this image in one sentence."},{type:"image_url",image_url:{url:"https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"}}]}]:'"Can you please let us know more details about your "',Qb=()=>'"The answer to the universe is"',Xb=e=>`"The answer to the universe is ${e.mask_token}."`,Yb=()=>`{ "source_sentence": "That is a happy person", "sentences": [ "That is a happy dog", "That is a very happy person", "Today is a sunny day" ] }`,Jb=()=>'"Today is a sunny day and I will get some ice cream."',Zb=()=>'"cats.jpg"',Gb=()=>'"cats.jpg"',ek=()=>'"cats.jpg"',tk=()=>'"cats.jpg"',nk=()=>'"sample1.flac"',ik=()=>'"sample1.flac"',rk=()=>'"Astronaut riding a horse"',ak=()=>'"The answer to the universe is 42"',ok=()=>'"liquid drum and bass, atmospheric synths, airy sounds"',sk=()=>'"sample1.flac"',od=()=>`'{"Height":[11.52,12.48],"Length1":[23.2,24.0],"Length2":[25.4,26.3],"Species": ["Bream","Bream"]}'`,lk=()=>'"cats.jpg"',uk={"audio-to-audio":nk,"audio-classification":ik,"automatic-speech-recognition":sk,"document-question-answering":Hb,"feature-extraction":Jb,"fill-mask":Xb,"image-classification":Zb,"image-to-text":Gb,"image-segmentation":ek,"object-detection":tk,"question-answering":qb,"sentence-similarity":Yb,summarization:Bb,"table-question-answering":Vb,"tabular-regression":od,"tabular-classification":od,"text-classification":Wb,"text-generation":ad,"image-text-to-text":ad,"text-to-image":rk,"text-to-speech":ak,"text-to-audio":ok,"text2text-generation":Qb,"token-classification":Kb,translation:$b,"zero-shot-classification":zb,"zero-shot-image-classification":lk};function be(e,t=!1,n=!1){if(e.pipeline_tag){const i=uk[e.pipeline_tag];if(i){let r=i(e);if(typeof r=="string"&&(t&&(r=r.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g," ")),n)){const a=/^"(.+)"$/s,o=r.match(a);r=o?o[1]:r}return r}}return"No input example has been defined for this model task."}var qp={};Ur(qp,{curlSnippets:()=>gi,getCurlInferenceSnippet:()=>ck,hasCurlInferenceSnippet:()=>dk,snippetBasic:()=>qe,snippetFile:()=>tn,snippetTextGeneration:()=>tl,snippetZeroShotClassification:()=>Wp});function uu(e,t){let n=JSON.stringify(e,null," ");return t!=null&&t.indent&&(n=n.replaceAll(` `,` ${t.indent}`)),t!=null&&t.attributeKeyQuotes||(n=n.replace(/"([^"]+)":/g,"$1:")),t!=null&&t.customContentEscaper&&(n=t.customContentEscaper(n)),n}function cu(e,t){const n=t.attributeKeyQuotes?'"':"";return Object.entries(e).map(([i,r])=>`${n}${i}${n}${t.attributeValueConnector}${r}`).join(`,${t.indent}`)}var qe=(e,t)=>({content:`curl https://api-inference.huggingface.co/models/${e.id} \\ -X POST \\ -d '{"inputs": ${be(e,!0)}}' \\ -H 'Content-Type: application/json' \\ -H "Authorization: Bearer ${t||"{API_TOKEN}"}"`}),tl=(e,t,n)=>{if(e.tags.includes("conversational")){const i=(n==null?void 0:n.streaming)??!0,r=be(e),a=(n==null?void 0:n.messages)??r,o={...n!=null&&n.temperature?{temperature:n.temperature}:void 0,max_tokens:(n==null?void 0:n.max_tokens)??500,...n!=null&&n.top_p?{top_p:n.top_p}:void 0};return{content:`curl 'https://api-inference.huggingface.co/models/${e.id}/v1/chat/completions' \\ -H "Authorization: Bearer ${t||"{API_TOKEN}"}" \\ -H 'Content-Type: application/json' \\ --data '{ "model": "${e.id}", "messages": ${uu(a,{indent:" ",attributeKeyQuotes:!0,customContentEscaper:s=>s.replace(/'/g,"'\\''")})}, ${cu(o,{indent:` `,attributeKeyQuotes:!0,attributeValueConnector:": "})}, "stream": ${!!i} }'`}}else return qe(e,t)},Wp=(e,t)=>({content:`curl https://api-inference.huggingface.co/models/${e.id} \\ -X POST \\ -d '{"inputs": ${be(e,!0)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\ -H 'Content-Type: application/json' \\ -H "Authorization: Bearer ${t||"{API_TOKEN}"}"`}),tn=(e,t)=>({content:`curl https://api-inference.huggingface.co/models/${e.id} \\ -X POST \\ --data-binary '@${be(e,!0,!0)}' \\ -H "Authorization: Bearer ${t||"{API_TOKEN}"}"`}),gi={"text-classification":qe,"token-classification":qe,"table-question-answering":qe,"question-answering":qe,"zero-shot-classification":Wp,translation:qe,summarization:qe,"feature-extraction":qe,"text-generation":tl,"image-text-to-text":tl,"text2text-generation":qe,"fill-mask":qe,"sentence-similarity":qe,"automatic-speech-recognition":tn,"text-to-image":qe,"text-to-speech":qe,"text-to-audio":qe,"audio-to-audio":tn,"audio-classification":tn,"image-classification":tn,"image-to-text":tn,"object-detection":tn,"image-segmentation":tn};function ck(e,t){var n;return e.pipeline_tag&&e.pipeline_tag in gi?((n=gi[e.pipeline_tag])==null?void 0:n.call(gi,e,t))??{content:""}:{content:""}}function dk(e){return!!e.pipeline_tag&&e.pipeline_tag in gi}var Kp={};Ur(Kp,{getPythonInferenceSnippet:()=>fk,hasPythonInferenceSnippet:()=>pk,pythonSnippets:()=>yi,snippetBasic:()=>ut,snippetConversational:()=>du,snippetDocumentQuestionAnswering:()=>Jp,snippetFile:()=>nn,snippetTabular:()=>nl,snippetTextToAudio:()=>il,snippetTextToImage:()=>Yp,snippetZeroShotClassification:()=>Qp,snippetZeroShotImageClassification:()=>Xp});var du=(e,t,n)=>{const i=(n==null?void 0:n.streaming)??!0,r=be(e),a=(n==null?void 0:n.messages)??r,o=uu(a,{attributeKeyQuotes:!0}),s={...n!=null&&n.temperature?{temperature:n.temperature}:void 0,max_tokens:(n==null?void 0:n.max_tokens)??500,...n!=null&&n.top_p?{top_p:n.top_p}:void 0},l=cu(s,{indent:` `,attributeValueConnector:"="});return i?[{client:"huggingface_hub",content:`from huggingface_hub import InferenceClient client = InferenceClient(api_key="${t||"{API_TOKEN}"}") messages = ${o} stream = client.chat.completions.create( model="${e.id}", messages=messages, ${l}, stream=True ) for chunk in stream: print(chunk.choices[0].delta.content, end="")`},{client:"openai",content:`from openai import OpenAI client = OpenAI( base_url="https://api-inference.huggingface.co/v1/", api_key="${t||"{API_TOKEN}"}" ) messages = ${o} stream = client.chat.completions.create( model="${e.id}", messages=messages, ${l}, stream=True ) for chunk in stream: print(chunk.choices[0].delta.content, end="")`}]:[{client:"huggingface_hub",content:`from huggingface_hub import InferenceClient client = InferenceClient(api_key="${t||"{API_TOKEN}"}") messages = ${o} completion = client.chat.completions.create( model="${e.id}", messages=messages, ${l} ) print(completion.choices[0].message)`},{client:"openai",content:`from openai import OpenAI client = OpenAI( base_url="https://api-inference.huggingface.co/v1/", api_key="${t||"{API_TOKEN}"}" ) messages = ${o} completion = client.chat.completions.create( model="${e.id}", messages=messages, ${l} ) print(completion.choices[0].message)`}]},Qp=e=>({content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "inputs": ${be(e)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}, })`}),Xp=e=>({content:`def query(data): with open(data["image_path"], "rb") as f: img = f.read() payload={ "parameters": data["parameters"], "inputs": base64.b64encode(img).decode("utf-8") } response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "image_path": ${be(e)}, "parameters": {"candidate_labels": ["cat", "dog", "llama"]}, })`}),ut=e=>({content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "inputs": ${be(e)}, })`}),nn=e=>({content:`def query(filename): with open(filename, "rb") as f: data = f.read() response = requests.post(API_URL, headers=headers, data=data) return response.json() output = query(${be(e)})`}),Yp=e=>({content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.content image_bytes = query({ "inputs": ${be(e)}, }) # You can access the image with PIL.Image for example import io from PIL import Image image = Image.open(io.BytesIO(image_bytes))`}),nl=e=>({content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.content response = query({ "inputs": {"data": ${be(e)}}, })`}),il=e=>e.library_name==="transformers"?{content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.content audio_bytes = query({ "inputs": ${be(e)}, }) # You can access the audio with IPython.display for example from IPython.display import Audio Audio(audio_bytes)`}:{content:`def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() audio, sampling_rate = query({ "inputs": ${be(e)}, }) # You can access the audio with IPython.display for example from IPython.display import Audio Audio(audio, rate=sampling_rate)`},Jp=e=>({content:`def query(payload): with open(payload["image"], "rb") as f: img = f.read() payload["image"] = base64.b64encode(img).decode("utf-8") response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "inputs": ${be(e)}, })`}),yi={"text-classification":ut,"token-classification":ut,"table-question-answering":ut,"question-answering":ut,"zero-shot-classification":Qp,translation:ut,summarization:ut,"feature-extraction":ut,"text-generation":ut,"text2text-generation":ut,"image-text-to-text":du,"fill-mask":ut,"sentence-similarity":ut,"automatic-speech-recognition":nn,"text-to-image":Yp,"text-to-speech":il,"text-to-audio":il,"audio-to-audio":nn,"audio-classification":nn,"image-classification":nn,"tabular-regression":nl,"tabular-classification":nl,"object-detection":nn,"image-segmentation":nn,"document-question-answering":Jp,"image-to-text":nn,"zero-shot-image-classification":Xp};function fk(e,t,n){var i;if(e.tags.includes("conversational"))return du(e,t,n);{let r=e.pipeline_tag&&e.pipeline_tag in yi?((i=yi[e.pipeline_tag])==null?void 0:i.call(yi,e,t))??{content:""}:{content:""};return r=Array.isArray(r)?r:[r],r.map(a=>({...a,content:`import requests API_URL = "https://api-inference.huggingface.co/models/${e.id}" headers = {"Authorization": ${t?`"Bearer ${t}"`:'f"Bearer {API_TOKEN}"'}} ${a.content}`}))}}function pk(e){return!!e.pipeline_tag&&e.pipeline_tag in yi}var Zp={};Ur(Zp,{getJsInferenceSnippet:()=>mk,hasJsInferenceSnippet:()=>hk,jsSnippets:()=>vi,snippetBasic:()=>ct,snippetFile:()=>rn,snippetTextGeneration:()=>rl,snippetTextToAudio:()=>al,snippetTextToImage:()=>em,snippetZeroShotClassification:()=>Gp});var ct=(e,t)=>({content:`async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/${e.id}", { headers: { Authorization: "Bearer ${t||"{API_TOKEN}"}" "Content-Type": "application/json", }, method: "POST", body: JSON.stringify(data), } ); const result = await response.json(); return result; } query({"inputs": ${be(e)}}).then((response) => { console.log(JSON.stringify(response)); });`}),rl=(e,t,n)=>{if(e.tags.includes("conversational")){const i=(n==null?void 0:n.streaming)??!0,r=be(e),a=(n==null?void 0:n.messages)??r,o=uu(a,{indent:" "}),s={...n!=null&&n.temperature?{temperature:n.temperature}:void 0,max_tokens:(n==null?void 0:n.max_tokens)??500,...n!=null&&n.top_p?{top_p:n.top_p}:void 0},l=cu(s,{indent:` `,attributeValueConnector:": "});return i?[{client:"huggingface.js",content:`import { HfInference } from "@huggingface/inference" const client = new HfInference("${t||"{API_TOKEN}"}") let out = ""; const stream = client.chatCompletionStream({ model: "${e.id}", messages: ${o}, ${l} }); for await (const chunk of stream) { if (chunk.choices && chunk.choices.length > 0) { const newContent = chunk.choices[0].delta.content; out += newContent; console.log(newContent); } }`},{client:"openai",content:`import { OpenAI } from "openai" const client = new OpenAI({ baseURL: "https://api-inference.huggingface.co/v1/", apiKey: "${t||"{API_TOKEN}"}" }) let out = ""; const stream = await client.chat.completions.create({ model: "${e.id}", messages: ${o}, ${l}, stream: true, }); for await (const chunk of stream) { if (chunk.choices && chunk.choices.length > 0) { const newContent = chunk.choices[0].delta.content; out += newContent; console.log(newContent); } }`}]:[{client:"huggingface.js",content:`import { HfInference } from '@huggingface/inference' const client = new HfInference("${t||"{API_TOKEN}"}") const chatCompletion = await client.chatCompletion({ model: "${e.id}", messages: ${o}, ${l} }); console.log(chatCompletion.choices[0].message);`},{client:"openai",content:`import { OpenAI } from "openai" const client = new OpenAI({ baseURL: "https://api-inference.huggingface.co/v1/", apiKey: "${t||"{API_TOKEN}"}" }) const chatCompletion = await client.chat.completions.create({ model: "${e.id}", messages: ${o}, ${l} }); console.log(chatCompletion.choices[0].message);`}]}else return ct(e,t)},Gp=(e,t)=>({content:`async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/${e.id}", { headers: { Authorization: "Bearer ${t||"{API_TOKEN}"}" "Content-Type": "application/json", }, method: "POST", body: JSON.stringify(data), } ); const result = await response.json(); return result; } query({"inputs": ${be(e)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}).then((response) => { console.log(JSON.stringify(response)); });`}),em=(e,t)=>({content:`async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/${e.id}", { headers: { Authorization: "Bearer ${t||"{API_TOKEN}"}" "Content-Type": "application/json", }, method: "POST", body: JSON.stringify(data), } ); const result = await response.blob(); return result; } query({"inputs": ${be(e)}}).then((response) => { // Use image });`}),al=(e,t)=>{const n=`async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/${e.id}", { headers: { Authorization: "Bearer ${t||"{API_TOKEN}"}" "Content-Type": "application/json", }, method: "POST", body: JSON.stringify(data), } );`;return e.library_name==="transformers"?{content:n+` const result = await response.blob(); return result; } query({"inputs": ${be(e)}}).then((response) => { // Returns a byte object of the Audio wavform. Use it directly! });`}:{content:n+` const result = await response.json(); return result; } query({"inputs": ${be(e)}}).then((response) => { console.log(JSON.stringify(response)); });`}},rn=(e,t)=>({content:`async function query(filename) { const data = fs.readFileSync(filename); const response = await fetch( "https://api-inference.huggingface.co/models/${e.id}", { headers: { Authorization: "Bearer ${t||"{API_TOKEN}"}" "Content-Type": "application/json", }, method: "POST", body: data, } ); const result = await response.json(); return result; } query(${be(e)}).then((response) => { console.log(JSON.stringify(response)); });`}),vi={"text-classification":ct,"token-classification":ct,"table-question-answering":ct,"question-answering":ct,"zero-shot-classification":Gp,translation:ct,summarization:ct,"feature-extraction":ct,"text-generation":rl,"image-text-to-text":rl,"text2text-generation":ct,"fill-mask":ct,"sentence-similarity":ct,"automatic-speech-recognition":rn,"text-to-image":em,"text-to-speech":al,"text-to-audio":al,"audio-to-audio":rn,"audio-classification":rn,"image-classification":rn,"image-to-text":rn,"object-detection":rn,"image-segmentation":rn};function mk(e,t){var n;return e.pipeline_tag&&e.pipeline_tag in vi?((n=vi[e.pipeline_tag])==null?void 0:n.call(vi,e,t))??{content:""}:{content:""}}function hk(e){return!!e.pipeline_tag&&e.pipeline_tag in vi}var tm=(e=>(e[e.F32=0]="F32",e[e.F16=1]="F16",e[e.Q4_0=2]="Q4_0",e[e.Q4_1=3]="Q4_1",e[e.Q5_0=6]="Q5_0",e[e.Q5_1=7]="Q5_1",e[e.Q8_0=8]="Q8_0",e[e.Q8_1=9]="Q8_1",e[e.Q2_K=10]="Q2_K",e[e.Q3_K=11]="Q3_K",e[e.Q4_K=12]="Q4_K",e[e.Q5_K=13]="Q5_K",e[e.Q6_K=14]="Q6_K",e[e.Q8_K=15]="Q8_K",e[e.IQ2_XXS=16]="IQ2_XXS",e[e.IQ2_XS=17]="IQ2_XS",e[e.IQ3_XXS=18]="IQ3_XXS",e[e.IQ1_S=19]="IQ1_S",e[e.IQ4_NL=20]="IQ4_NL",e[e.IQ3_S=21]="IQ3_S",e[e.IQ2_S=22]="IQ2_S",e[e.IQ4_XS=23]="IQ4_XS",e[e.I8=24]="I8",e[e.I16=25]="I16",e[e.I32=26]="I32",e[e.I64=27]="I64",e[e.F64=28]="F64",e[e.IQ1_M=29]="IQ1_M",e[e.BF16=30]="BF16",e))(tm||{}),gk=Object.values(tm).filter(e=>typeof e=="string");new RegExp(`(?${gk.join("|")})(_(?[A-Z]+))?`);var yk=Object.defineProperty,vk=(e,t)=>{for(var n in t)yk(e,n,{get:t[n],enumerable:!0})},wk={};vk(wk,{audioClassification:()=>am,audioToAudio:()=>lm,automaticSpeechRecognition:()=>om,chatCompletion:()=>Ik,chatCompletionStream:()=>jk,documentQuestionAnswering:()=>Cm,featureExtraction:()=>gm,fillMask:()=>ym,imageClassification:()=>um,imageSegmentation:()=>cm,imageToImage:()=>mm,imageToText:()=>dm,objectDetection:()=>fm,questionAnswering:()=>vm,request:()=>ie,sentenceSimilarity:()=>wm,streamingRequest:()=>_o,summarization:()=>xm,tableQuestionAnswering:()=>bm,tabularClassification:()=>jm,tabularRegression:()=>Im,textClassification:()=>km,textGeneration:()=>_m,textGenerationStream:()=>Ak,textToImage:()=>pm,textToSpeech:()=>sm,tokenClassification:()=>Sm,translation:()=>Em,visualQuestionAnswering:()=>Am,zeroShotClassification:()=>Tm,zeroShotImageClassification:()=>hm});function xk(e,t){return Object.assign({},...t.map(n=>{if(e[n]!==void 0)return{[n]:e[n]}}))}function bk(e,t){return e.includes(t)}function kk(e,t){const n=Array.isArray(t)?t:[t],i=Object.keys(e).filter(r=>!bk(n,r));return xk(e,i)}function Ea(e){return/^http(s?):/.test(e)||e.startsWith("/")}var ii=new Map,_k=10*60*1e3,Sk=1e3,nm="https://huggingface.co";async function im(e,t,n){if(Ea(e))return null;const i=`${e}:${t}`;let r=ii.get(i);if(r&&r.dateo.json()).then(o=>o.pipeline_tag).catch(()=>null);if(!a)return null;r={task:a,date:new Date},ii.set(i,{task:a,date:new Date}),ii.size>Sk&&ii.delete(ii.keys().next().value)}return r.task}var sd="https://api-inference.huggingface.co",da=null;async function rm(e,t){const{accessToken:n,endpointUrl:i,...r}=e;let{model:a}=e;const{forceTask:o,includeCredentials:s,taskHint:l,wait_for_model:u,use_cache:m,dont_load_model:f,chatCompletion:h}=t??{},v={};if(n&&(v.Authorization=`Bearer ${n}`),!a&&!da&&l){const p=await fetch(`${nm}/api/tasks`);p.ok&&(da=await p.json())}if(!a&&da&&l){const p=da[l];p&&(a=p.models[0].id)}if(!a)throw new Error("No model provided, and no default model found for this task");const _="data"in e&&!!e.data;_||(v["Content-Type"]="application/json"),u&&(v["X-Wait-For-Model"]="true"),m===!1&&(v["X-Use-Cache"]="false"),f&&(v["X-Load-Model"]="0");let k=(()=>{if(i&&Ea(a))throw new TypeError("Both model and endpointUrl cannot be URLs");return Ea(a)?(console.warn("Using a model URL is deprecated, please use the `endpointUrl` parameter instead"),a):i||(o?`${sd}/pipeline/${o}/${a}`:`${sd}/models/${a}`)})();h&&!k.endsWith("/chat/completions")&&(k+="/v1/chat/completions");let I;typeof s=="string"?I=s:s===!0&&(I="include");const g={headers:v,method:"POST",body:_?e.data:JSON.stringify({...r.model&&Ea(r.model)?kk(r,"model"):r}),...I&&{credentials:I},signal:t==null?void 0:t.signal};return{url:k,info:g}}async function ie(e,t){var a,o;const{url:n,info:i}=await rm(e,t),r=await((t==null?void 0:t.fetch)??fetch)(n,i);if((t==null?void 0:t.retry_on_error)!==!1&&r.status===503&&!(t!=null&&t.wait_for_model))return ie(e,{...t,wait_for_model:!0});if(!r.ok){if((a=r.headers.get("Content-Type"))!=null&&a.startsWith("application/json")){const s=await r.json();if([400,422,404,500].includes(r.status)&&(t!=null&&t.chatCompletion))throw new Error(`Server ${e.model} does not seem to support chat completion. Error: ${s.error}`);if(s.error)throw new Error(JSON.stringify(s.error))}throw new Error("An error occurred while fetching the blob")}return(o=r.headers.get("Content-Type"))!=null&&o.startsWith("application/json")?await r.json():await r.blob()}function Ek(e){let t,n,i,r=!1;return function(o){t===void 0?(t=o,n=0,i=-1):t=Ck(t,o);const s=t.length;let l=0;for(;n0){const l=r.decode(o.subarray(0,s)),u=s+(o[s+1]===32?2:1),m=r.decode(o.subarray(u));switch(l){case"data":i.data=i.data?i.data+` `+m:m;break;case"event":i.event=m;break;case"id":e(i.id=m);break;case"retry":const f=parseInt(m,10);isNaN(f)||t(i.retry=f);break}}}}function Ck(e,t){const n=new Uint8Array(e.length+t.length);return n.set(e),n.set(t,e.length),n}function ld(){return{data:"",event:"",id:"",retry:void 0}}async function*_o(e,t){var u,m;const{url:n,info:i}=await rm({...e,stream:!0},t),r=await((t==null?void 0:t.fetch)??fetch)(n,i);if((t==null?void 0:t.retry_on_error)!==!1&&r.status===503&&!(t!=null&&t.wait_for_model))return yield*_o(e,{...t,wait_for_model:!0});if(!r.ok){if((u=r.headers.get("Content-Type"))!=null&&u.startsWith("application/json")){const f=await r.json();if([400,422,404,500].includes(r.status)&&(t!=null&&t.chatCompletion))throw new Error(`Server ${e.model} does not seem to support chat completion. Error: ${f.error}`);if(f.error)throw new Error(f.error)}throw new Error(`Server response contains error: ${r.status}`)}if(!((m=r.headers.get("content-type"))!=null&&m.startsWith("text/event-stream")))throw new Error("Server does not support event stream content type, it returned "+r.headers.get("content-type"));if(!r.body)return;const a=r.body.getReader();let o=[];const l=Ek(Tk(()=>{},()=>{},f=>{o.push(f)}));try{for(;;){const{done:f,value:h}=await a.read();if(f)return;l(h);for(const v of o)if(v.data.length>0){if(v.data==="[DONE]")return;const _=JSON.parse(v.data);if(typeof _=="object"&&_!==null&&"error"in _)throw new Error(_.error);yield _}o=[]}}finally{a.releaseLock()}}var oe=class extends TypeError{constructor(e){super(`Invalid inference output: ${e}. Use the 'request' method with the same parameters to do a custom call with no type checking.`),this.name="InferenceOutputError"}};async function am(e,t){const n=await ie(e,{...t,taskHint:"audio-classification"});if(!(Array.isArray(n)&&n.every(r=>typeof r.label=="string"&&typeof r.score=="number")))throw new oe("Expected Array<{label: string, score: number}>");return n}async function om(e,t){const n=await ie(e,{...t,taskHint:"automatic-speech-recognition"});if(!(typeof(n==null?void 0:n.text)=="string"))throw new oe("Expected {text: string}");return n}async function sm(e,t){const n=await ie(e,{...t,taskHint:"text-to-speech"});if(!(n&&n instanceof Blob))throw new oe("Expected Blob");return n}async function lm(e,t){const n=await ie(e,{...t,taskHint:"audio-to-audio"});if(!(Array.isArray(n)&&n.every(r=>typeof r.label=="string"&&typeof r.blob=="string"&&typeof r["content-type"]=="string")))throw new oe("Expected Array<{label: string, blob: string, content-type: string}>");return n}async function um(e,t){const n=await ie(e,{...t,taskHint:"image-classification"});if(!(Array.isArray(n)&&n.every(r=>typeof r.label=="string"&&typeof r.score=="number")))throw new oe("Expected Array<{label: string, score: number}>");return n}async function cm(e,t){const n=await ie(e,{...t,taskHint:"image-segmentation"});if(!(Array.isArray(n)&&n.every(r=>typeof r.label=="string"&&typeof r.mask=="string"&&typeof r.score=="number")))throw new oe("Expected Array<{label: string, mask: string, score: number}>");return n}async function dm(e,t){var i;const n=(i=await ie(e,{...t,taskHint:"image-to-text"}))==null?void 0:i[0];if(typeof(n==null?void 0:n.generated_text)!="string")throw new oe("Expected {generated_text: string}");return n}async function fm(e,t){const n=await ie(e,{...t,taskHint:"object-detection"});if(!(Array.isArray(n)&&n.every(r=>typeof r.label=="string"&&typeof r.score=="number"&&typeof r.box.xmin=="number"&&typeof r.box.ymin=="number"&&typeof r.box.xmax=="number"&&typeof r.box.ymax=="number")))throw new oe("Expected Array<{label:string; score:number; box:{xmin:number; ymin:number; xmax:number; ymax:number}}>");return n}async function pm(e,t){const n=await ie(e,{...t,taskHint:"text-to-image"});if(!(n&&n instanceof Blob))throw new oe("Expected Blob");return n}function So(e){if(globalThis.Buffer)return globalThis.Buffer.from(e).toString("base64");{const t=[];return e.forEach(n=>{t.push(String.fromCharCode(n))}),globalThis.btoa(t.join(""))}}async function mm(e,t){let n;e.parameters?n={...e,inputs:So(new Uint8Array(e.inputs instanceof ArrayBuffer?e.inputs:await e.inputs.arrayBuffer()))}:n={accessToken:e.accessToken,model:e.model,data:e.inputs};const i=await ie(n,{...t,taskHint:"image-to-image"});if(!(i&&i instanceof Blob))throw new oe("Expected Blob");return i}async function hm(e,t){const n={...e,inputs:{image:So(new Uint8Array(e.inputs.image instanceof ArrayBuffer?e.inputs.image:await e.inputs.image.arrayBuffer()))}},i=await ie(n,{...t,taskHint:"zero-shot-image-classification"});if(!(Array.isArray(i)&&i.every(a=>typeof a.label=="string"&&typeof a.score=="number")))throw new oe("Expected Array<{label: string, score: number}>");return i}async function gm(e,t){const n=e.model?await im(e.model,e.accessToken,t):void 0,i=await ie(e,{...t,taskHint:"feature-extraction",...n==="sentence-similarity"&&{forceTask:"feature-extraction"}});let r=!0;const a=(o,s,l=0)=>l>s?!1:o.every(u=>Array.isArray(u))?o.every(u=>a(u,s,l+1)):o.every(u=>typeof u=="number");if(r=Array.isArray(i)&&a(i,3,0),!r)throw new oe("Expected Array");return i}async function ym(e,t){const n=await ie(e,{...t,taskHint:"fill-mask"});if(!(Array.isArray(n)&&n.every(r=>typeof r.score=="number"&&typeof r.sequence=="string"&&typeof r.token=="number"&&typeof r.token_str=="string")))throw new oe("Expected Array<{score: number, sequence: string, token: number, token_str: string}>");return n}async function vm(e,t){const n=await ie(e,{...t,taskHint:"question-answering"});if(!(typeof n=="object"&&!!n&&typeof n.answer=="string"&&typeof n.end=="number"&&typeof n.score=="number"&&typeof n.start=="number"))throw new oe("Expected {answer: string, end: number, score: number, start: number}");return n}async function wm(e,t){const n=e.model?await im(e.model,e.accessToken,t):void 0,i=await ie(e,{...t,taskHint:"sentence-similarity",...n==="feature-extraction"&&{forceTask:"sentence-similarity"}});if(!(Array.isArray(i)&&i.every(a=>typeof a=="number")))throw new oe("Expected number[]");return i}async function xm(e,t){const n=await ie(e,{...t,taskHint:"summarization"});if(!(Array.isArray(n)&&n.every(r=>typeof(r==null?void 0:r.summary_text)=="string")))throw new oe("Expected Array<{summary_text: string}>");return n==null?void 0:n[0]}async function bm(e,t){const n=await ie(e,{...t,taskHint:"table-question-answering"});if(!(typeof(n==null?void 0:n.aggregator)=="string"&&typeof n.answer=="string"&&Array.isArray(n.cells)&&n.cells.every(r=>typeof r=="string")&&Array.isArray(n.coordinates)&&n.coordinates.every(r=>Array.isArray(r)&&r.every(a=>typeof a=="number"))))throw new oe("Expected {aggregator: string, answer: string, cells: string[], coordinates: number[][]}");return n}async function km(e,t){var r;const n=(r=await ie(e,{...t,taskHint:"text-classification"}))==null?void 0:r[0];if(!(Array.isArray(n)&&n.every(a=>typeof(a==null?void 0:a.label)=="string"&&typeof a.score=="number")))throw new oe("Expected Array<{label: string, score: number}>");return n}function Eo(e){return Array.isArray(e)?e:[e]}async function _m(e,t){const n=Eo(await ie(e,{...t,taskHint:"text-generation"}));if(!(Array.isArray(n)&&n.every(r=>typeof(r==null?void 0:r.generated_text)=="string")))throw new oe("Expected Array<{generated_text: string}>");return n==null?void 0:n[0]}async function*Ak(e,t){yield*_o(e,{...t,taskHint:"text-generation"})}async function Sm(e,t){const n=Eo(await ie(e,{...t,taskHint:"token-classification"}));if(!(Array.isArray(n)&&n.every(r=>typeof r.end=="number"&&typeof r.entity_group=="string"&&typeof r.score=="number"&&typeof r.start=="number"&&typeof r.word=="string")))throw new oe("Expected Array<{end: number, entity_group: string, score: number, start: number, word: string}>");return n}async function Em(e,t){const n=await ie(e,{...t,taskHint:"translation"});if(!(Array.isArray(n)&&n.every(r=>typeof(r==null?void 0:r.translation_text)=="string")))throw new oe("Expected type Array<{translation_text: string}>");return(n==null?void 0:n.length)===1?n==null?void 0:n[0]:n}async function Tm(e,t){const n=Eo(await ie(e,{...t,taskHint:"zero-shot-classification"}));if(!(Array.isArray(n)&&n.every(r=>Array.isArray(r.labels)&&r.labels.every(a=>typeof a=="string")&&Array.isArray(r.scores)&&r.scores.every(a=>typeof a=="number")&&typeof r.sequence=="string")))throw new oe("Expected Array<{labels: string[], scores: number[], sequence: string}>");return n}async function Ik(e,t){const n=await ie(e,{...t,taskHint:"text-generation",chatCompletion:!0});if(!(typeof n=="object"&&Array.isArray(n==null?void 0:n.choices)&&typeof(n==null?void 0:n.created)=="number"&&typeof(n==null?void 0:n.id)=="string"&&typeof(n==null?void 0:n.model)=="string"&&typeof(n==null?void 0:n.system_fingerprint)=="string"&&typeof(n==null?void 0:n.usage)=="object"))throw new oe("Expected ChatCompletionOutput");return n}async function*jk(e,t){yield*_o(e,{...t,taskHint:"text-generation",chatCompletion:!0})}async function Cm(e,t){var a;const n={...e,inputs:{question:e.inputs.question,image:So(new Uint8Array(e.inputs.image instanceof ArrayBuffer?e.inputs.image:await e.inputs.image.arrayBuffer()))}},i=(a=Eo(await ie(n,{...t,taskHint:"document-question-answering"})))==null?void 0:a[0];if(!(typeof(i==null?void 0:i.answer)=="string"&&(typeof i.end=="number"||typeof i.end>"u")&&(typeof i.score=="number"||typeof i.score>"u")&&(typeof i.start=="number"||typeof i.start>"u")))throw new oe("Expected Array<{answer: string, end?: number, score?: number, start?: number}>");return i}async function Am(e,t){var a;const n={...e,inputs:{question:e.inputs.question,image:So(new Uint8Array(e.inputs.image instanceof ArrayBuffer?e.inputs.image:await e.inputs.image.arrayBuffer()))}},i=(a=await ie(n,{...t,taskHint:"visual-question-answering"}))==null?void 0:a[0];if(!(typeof(i==null?void 0:i.answer)=="string"&&typeof i.score=="number"))throw new oe("Expected Array<{answer: string, score: number}>");return i}async function Im(e,t){const n=await ie(e,{...t,taskHint:"tabular-regression"});if(!(Array.isArray(n)&&n.every(r=>typeof r=="number")))throw new oe("Expected number[]");return n}async function jm(e,t){const n=await ie(e,{...t,taskHint:"tabular-classification"});if(!(Array.isArray(n)&&n.every(r=>typeof r=="number")))throw new oe("Expected number[]");return n}const fu=e=>c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Input"}),e.input?c.jsx("audio",{className:`w-full ${e.disabled?"cursor-not-allowed opacity-50":""}`,controls:!0,src:URL.createObjectURL(e.input)}):c.jsxs("label",{className:`block w-full cursor-pointer bg-yellow-200 p-6 text-center ${e.disabled?"cursor-not-allowed opacity-50":""}`,children:["No file chosen",c.jsx("input",{accept:"audio/*",className:"hidden",disabled:e.disabled??!1,onChange:t=>{t.target.files&&t.target.files[0]&&e.setInput(t.target.files[0])},type:"file"})]})]}),V=e=>{const t=(()=>{if(typeof e.output=="string")return e.output;try{return JSON.stringify(e.output,void 0,2)}catch(n){if(n instanceof Error)return`Error during JSON.stringify: ${n.message}`}})();return c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Output"}),c.jsx("pre",{className:`w-full select-text whitespace-pre-wrap break-words bg-yellow-200 p-6 ${e.disabled?"cursor-wait opacity-50":""}`,children:t})]})},Lk="audio-classification",Pk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await am({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(fu,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.label)):c.jsx(d.Fragment,{})]})},Lm=e=>c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Output"}),c.jsx("audio",{className:`w-full ${e.disabled?"cursor-wait opacity-50":""}`,controls:!0,src:URL.createObjectURL(e.output)})]}),Rk="audio-to-audio",Dk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await lm({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(fu,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(Lm,{disabled:e.loading,label:u.label,output:new Blob([u.blob],{type:u["content-type"]})},u.label)):c.jsx(d.Fragment,{})]})},Nk="automatic-speech-recognition",Mk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await om({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(fu,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},Ok="text-to-speech",Fk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await sm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(Lm,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},Cn=e=>c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Input"}),e.input?c.jsx("img",{className:`w-full ${e.disabled?"cursor-not-allowed opacity-50":""}`,src:URL.createObjectURL(e.input)}):c.jsxs("label",{className:`block w-full cursor-pointer bg-yellow-200 p-6 text-center ${e.disabled?"cursor-not-allowed opacity-50":""}`,children:["No file chosen",c.jsx("input",{accept:"image/*",className:"hidden",disabled:e.disabled??!1,onChange:t=>{t.target.files&&t.target.files[0]&&e.setInput(t.target.files[0])},type:"file"})]})]}),Uk="image-classification",zk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await um({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.label)):c.jsx(d.Fragment,{})]})},$k="image-segmentation",Bk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await cm({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.label)):c.jsx(d.Fragment,{})]})},Pm=e=>c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Output"}),c.jsx("img",{className:`w-full ${e.disabled?"cursor-wait opacity-50":""}`,src:URL.createObjectURL(e.output)})]}),Vk="image-to-image",Hk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await mm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(Pm,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},qk="image-to-text",Wk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await dm({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},Kk="object-detection",Qk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await fm({accessToken:e.accessToken,data:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.label)):c.jsx(d.Fragment,{})]})},Xk="text-to-image",Yk=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await pm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(Pm,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},Jk="zero-shot-image-classification",Zk=e=>{const[t,n]=d.useState(),i=Array.from({length:2}).map(()=>{}),[r,a]=d.useState(i),[o,s]=d.useState(),[l,u]=d.useState(),m=()=>{n(void 0),a(i),s(void 0),u(void 0)},f=async()=>{if(t&&r.every(Boolean)){e.setLoading(!0);try{const h=await hm({accessToken:e.accessToken,inputs:{image:t},model:e.model,parameters:{candidate_labels:r}});s(void 0),u(h)}catch(h){h instanceof Error&&s(h)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Cn,{disabled:e.loading,input:t,setInput:n}),r.map((h,v)=>c.jsx(Ie,{disabled:e.loading,input:h,label:`Parameter - Candidate Label #${v+1}`,setInput:_=>a(k=>[...k.slice(0,v),_,...k.slice(v+1,k.length)])})),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Add Candidate Label",onClick:()=>a(h=>[...h,void 0])}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Clear",onClick:m,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),onClick:f}),o?c.jsx(V,{disabled:e.loading,label:"Error",output:o.message}):c.jsx(d.Fragment,{}),!o&&l?l.map(h=>c.jsx(V,{disabled:e.loading,output:h})):c.jsx(d.Fragment,{})]})},Gk="document-question-answering",e1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),[s,l]=d.useState(),u=()=>{n(void 0),r(void 0),o(void 0),l(void 0)},m=async()=>{if(t&&i){e.setLoading(!0);try{const f=await Cm({accessToken:e.accessToken,inputs:{question:t,image:i},model:e.model});o(void 0),l(f)}catch(f){f instanceof Error&&o(f)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,label:"Input - Question",setInput:n}),c.jsx(Cn,{disabled:e.loading,input:i,label:"Input - Image",setInput:r}),c.jsx(z,{disabled:e.loading||!i,label:"Clear",onClick:u,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!i,onClick:m}),a?c.jsx(V,{disabled:e.loading,label:"Error",output:a.message}):c.jsx(d.Fragment,{}),!a&&s?c.jsx(V,{disabled:e.loading,output:s}):c.jsx(d.Fragment,{})]})},t1="visual-question-answering",n1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),[s,l]=d.useState(),u=()=>{n(void 0),r(void 0),o(void 0),l(void 0)},m=async()=>{if(t&&i){e.setLoading(!0);try{const f=await Am({accessToken:e.accessToken,inputs:{question:t,image:i},model:e.model});o(void 0),l(f)}catch(f){f instanceof Error&&o(f)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,label:"Input - Question",setInput:n}),c.jsx(Cn,{disabled:e.loading,input:i,label:"Input - Image",setInput:r}),c.jsx(z,{disabled:e.loading||!i,label:"Clear",onClick:u,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!i,onClick:m}),a?c.jsx(V,{disabled:e.loading,label:"Error",output:a.message}):c.jsx(d.Fragment,{}),!a&&s?c.jsx(V,{disabled:e.loading,output:s}):c.jsx(d.Fragment,{})]})},i1="feature-extraction",r1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await gm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},a1="fill-mask",o1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await ym({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.token_str)):c.jsx(d.Fragment,{})]})},s1="question-answering",l1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),[s,l]=d.useState(),u=()=>{n(void 0),r(void 0),o(void 0),l(void 0)},m=async()=>{if(t&&i){e.setLoading(!0);try{const f=await vm({accessToken:e.accessToken,inputs:{question:t,context:i},model:e.model});o(void 0),l(f)}catch(f){f instanceof Error&&o(f)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,label:"Input - Question",setInput:n}),c.jsx(Ie,{disabled:e.loading,input:i,label:"Input - Context",setInput:r}),c.jsx(z,{disabled:e.loading||!t||!i,label:"Clear",onClick:u,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t||!i,onClick:m}),a?c.jsx(V,{disabled:e.loading,label:"Error",output:a.message}):c.jsx(d.Fragment,{}),!a&&s?c.jsx(V,{disabled:e.loading,output:s}):c.jsx(d.Fragment,{})]})},u1="sentence-similarity",c1=e=>{const[t,n]=d.useState(),i=Array.from({length:2}).map(()=>{}),[r,a]=d.useState(i),[o,s]=d.useState(),[l,u]=d.useState(),m=()=>{n(void 0),a(i),s(void 0),u(void 0)},f=async()=>{if(t&&r.every(Boolean)){e.setLoading(!0);try{const h=await wm({accessToken:e.accessToken,inputs:{source_sentence:t,sentences:r},model:e.model});s(void 0),u(h)}catch(h){h instanceof Error&&s(h)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,label:"Input - Source Sentence",setInput:n}),r.map((h,v)=>c.jsx(Ie,{disabled:e.loading,input:h,label:`Input - Sentence #${v+1}`,setInput:_=>a(k=>[...k.slice(0,v),_,...k.slice(v+1,k.length)])})),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Add Sentence",onClick:()=>a(h=>[...h,void 0])}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Clear",onClick:m,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),onClick:f}),o?c.jsx(V,{disabled:e.loading,label:"Error",output:o.message}):c.jsx(d.Fragment,{}),!o&&l?l.map((h,v)=>c.jsx(V,{disabled:e.loading,label:`Output - Sentence #${v+1}`,output:h})):c.jsx(d.Fragment,{})]})},d1="summarization",f1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await xm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},p1=async e=>{const t=await e.text();try{const n=JSON.parse(t);try{return JSON.stringify(n,void 0,2)}catch(i){if(i instanceof Error)return`Error during JSON.stringify: ${i.message}`}}catch(n){if(n instanceof Error)return`Error during JSON.parse: ${n.message}`}},pu=e=>{const[t,n]=d.useState();return d.useEffect(()=>{e.input&&p1(e.input).then(n)},[e.input]),c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:e.label??"Input"}),e.input?c.jsx("pre",{className:`w-full select-text whitespace-pre-wrap break-words bg-yellow-200 p-6 ${e.disabled?"cursor-not-allowed opacity-50":""}`,children:t}):c.jsxs("label",{className:`block w-full cursor-pointer bg-yellow-200 p-6 text-center ${e.disabled?"cursor-not-allowed opacity-50":""}`,children:["No file chosen",c.jsx("input",{accept:".json",className:"hidden",disabled:e.disabled??!1,onChange:i=>{i.target.files&&i.target.files[0]&&e.setInput(i.target.files[0])},type:"file"})]})]})},m1="table-question-answering",h1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),[s,l]=d.useState(),u=()=>{n(void 0),r(void 0),o(void 0),l(void 0)},m=async()=>{if(t&&i){e.setLoading(!0);try{const f=await bm({accessToken:e.accessToken,inputs:{query:t,table:JSON.parse(await i.text()??"{}")},model:e.model});o(void 0),l(f)}catch(f){f instanceof Error&&o(f)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,label:"Input - Query",setInput:n}),c.jsx(pu,{disabled:e.loading,input:i,label:"Input - Table",setInput:r}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:u,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:m}),a?c.jsx(V,{disabled:e.loading,label:"Error",output:a.message}):c.jsx(d.Fragment,{}),!a&&s?c.jsx(V,{disabled:e.loading,output:s}):c.jsx(d.Fragment,{})]})},g1="text-classification",y1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await km({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.label)):c.jsx(d.Fragment,{})]})},v1="text-generation",w1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await _m({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},x1="token-classification",b1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await Sm({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map(u=>c.jsx(V,{disabled:e.loading,output:u},u.word)):c.jsx(d.Fragment,{})]})},k1="translation",_1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await Em({accessToken:e.accessToken,inputs:t,model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?c.jsx(V,{disabled:e.loading,output:a}):c.jsx(d.Fragment,{})]})},S1="zero-shot-classification",E1=e=>{const[t,n]=d.useState(),i=Array.from({length:2}).map(()=>{}),[r,a]=d.useState(i),[o,s]=d.useState(),[l,u]=d.useState(),m=()=>{n(void 0),a(i),s(void 0),u(void 0)},f=async()=>{if(t&&r.every(Boolean)){e.setLoading(!0);try{const h=await Tm({accessToken:e.accessToken,inputs:t,model:e.model,parameters:{candidate_labels:r}});s(void 0),u(h)}catch(h){h instanceof Error&&s(h)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(Ie,{disabled:e.loading,input:t,setInput:n}),r.map((h,v)=>c.jsx(Ie,{disabled:e.loading,input:h,label:`Parameter - Candidate Label #${v+1}`,setInput:_=>a(k=>[...k.slice(0,v),_,...k.slice(v+1,k.length)])})),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Add Candidate Label",onClick:()=>a(h=>[...h,void 0])}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),label:"Clear",onClick:m,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t||!r.every(Boolean),onClick:f}),o?c.jsx(V,{disabled:e.loading,label:"Error",output:o.message}):c.jsx(d.Fragment,{}),!o&&l?l.map(h=>c.jsx(V,{disabled:e.loading,output:h})):c.jsx(d.Fragment,{})]})},T1="tabular-classification",C1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await jm({accessToken:e.accessToken,inputs:{data:JSON.parse(await t.text()??"{}")},model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(pu,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map((u,m)=>c.jsx(V,{disabled:e.loading,label:`Output - Sentence #${m+1}`,output:u})):c.jsx(d.Fragment,{})]})},A1="tabular-regression",I1=e=>{const[t,n]=d.useState(),[i,r]=d.useState(),[a,o]=d.useState(),s=()=>{n(void 0),r(void 0),o(void 0)},l=async()=>{if(t){e.setLoading(!0);try{const u=await Im({accessToken:e.accessToken,inputs:{data:JSON.parse(await t.text()??"{}")},model:e.model});r(void 0),o(u)}catch(u){u instanceof Error&&r(u)}finally{e.setLoading(!1)}}};return c.jsxs(d.Fragment,{children:[c.jsx(pu,{disabled:e.loading,input:t,setInput:n}),c.jsx(z,{disabled:e.loading||!t,label:"Clear",onClick:s,variant:"secondary"}),c.jsx(z,{disabled:e.loading||!t,onClick:l}),i?c.jsx(V,{disabled:e.loading,label:"Error",output:i.message}):c.jsx(d.Fragment,{}),!i&&a?a.map((u,m)=>c.jsx(V,{disabled:e.loading,label:`Output - Sentence #${m+1}`,output:u})):c.jsx(d.Fragment,{})]})},j1=[Lk,Rk,Nk,Gk,i1,a1,Uk,$k,Vk,qk,Kk,s1,u1,d1,m1,T1,A1,g1,v1,Xk,Ok,x1,k1,t1,S1,Jk],L1=e=>{if(!e.model||!e.task)return c.jsx(d.Fragment,{});switch(e.task){case"audio-classification":return c.jsx(Pk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"audio-to-audio":return c.jsx(Dk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"automatic-speech-recognition":return c.jsx(Mk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"document-question-answering":return c.jsx(e1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"feature-extraction":return c.jsx(r1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"fill-mask":return c.jsx(o1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"image-classification":return c.jsx(zk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"image-segmentation":return c.jsx(Bk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"image-to-image":return c.jsx(Hk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"image-to-text":return c.jsx(Wk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"object-detection":return c.jsx(Qk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"question-answering":return c.jsx(l1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"sentence-similarity":return c.jsx(c1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"summarization":return c.jsx(f1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"table-question-answering":return c.jsx(h1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"tabular-classification":return c.jsx(C1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"tabular-regression":return c.jsx(I1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"text-classification":return c.jsx(y1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"text-generation":return c.jsx(w1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"text-to-image":return c.jsx(Yk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"text-to-speech":return c.jsx(Fk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"token-classification":return c.jsx(b1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"translation":return c.jsx(_1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"visual-question-answering":return c.jsx(n1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"zero-shot-classification":return c.jsx(E1,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});case"zero-shot-image-classification":return c.jsx(Zk,{accessToken:e.accessToken,loading:e.loading,model:e.model,setLoading:e.setLoading});default:return c.jsx(d.Fragment,{})}},as={},P1=1e3,R1=async e=>{if(as[e])return as[e];const t=[];for await(const n of y0({search:{task:e}}))t.push(n);return t.sort((n,i)=>n.downloads>i.downloads?-1:n.downloadsi.likes?-1:n.likesi.name?-1:n.name{var u,m,f,h;const{loading:e,setLoading:t}=d.useContext(ko),[n,i]=d.useState([]),r=bo(),a=Ev(),o=((u=a["*"])==null?void 0:u.split("/")[0])||void 0,s=((m=a["*"])==null?void 0:m.split("/").length)===3?`${(f=a["*"])==null?void 0:f.split("/")[1]}/${(h=a["*"])==null?void 0:h.split("/")[2]}`:void 0;d.useEffect(()=>{i([]),o&&(t(!0),R1(o).then(v=>i(v.slice(0,P1))).finally(()=>t(!1)))},[t,o]);const{accessToken:l}=d.useContext(Fr);return l?c.jsxs(d.Fragment,{children:[c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:"Task"}),c.jsxs("select",{className:"w-full cursor-pointer bg-yellow-200 p-6 text-center",disabled:e,onChange:v=>{v.target.value&&r(v.target.value)},value:o??"",placeholder:"Select a task",children:[c.jsx("option",{children:"Select a task"}),j1.map(v=>c.jsx("option",{value:v,children:v},v))]})]}),n.length>0?c.jsxs("div",{className:"w-full",children:[c.jsx("p",{className:"text-xl",children:"Model"}),c.jsxs("select",{className:"w-full cursor-pointer bg-yellow-200 p-6 text-center",disabled:e,onChange:v=>{v.target.value&&r(`${o}/${v.target.value}`)},value:s??"",placeholder:"Select a model",children:[c.jsx("option",{children:"Select a model"}),n.map(v=>c.jsx("option",{value:v.name,children:v.name},v.name))]}),s?c.jsx("div",{className:"p-6 text-center font-bold text-yellow-200",children:c.jsx("a",{href:`https://huggingface.co/${s}`,rel:"noopener noferrer",target:"_blank",children:"View model on 🤗"})}):c.jsx(d.Fragment,{})]}):c.jsx("p",{className:"w-full text-center",children:o?e?"Loading models for this task":"No models available for this task":"Select a task to view available models"}),c.jsx(L1,{accessToken:l,loading:e,model:s,setLoading:t,task:o})]}):c.jsx(d.Fragment,{})},N1=()=>{const t=document.getElementById("root");if(t){const n=Sp(t),r=Qv([{path:"*",element:c.jsx(k0,{}),children:[{path:"sign-in",element:c.jsx(S0,{})},{path:"*",element:c.jsxs(b0,{children:[c.jsx(E0,{}),c.jsx(D1,{})]})}]}]),a=c.jsx(d.StrictMode,{children:c.jsx(_0,{children:c.jsx(x0,{children:c.jsx(i0,{router:r})})})});n.render(a)}};N1();