aliabd HF Staff commited on
Commit
f430b16
·
1 Parent(s): a6ee614

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import os, os.path
 
2
  from os.path import splitext
3
  import numpy as np
4
  import sys
@@ -87,5 +88,4 @@ title = None #"Left Ventricle Segmentation"
87
  description = "This semantic segmentation model identifies the left ventricle in echocardiogram images."
88
  # videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
89
  thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
90
- gr.Interface(segment, i, o, examples=examples, allow_flagging=False, analytics_enabled=False,
91
- title=title, description=description, thumbnail=thumbnail).launch()
 
1
  import os, os.path
2
+ os.system("pip install gradio==2.9b23")
3
  from os.path import splitext
4
  import numpy as np
5
  import sys
 
88
  description = "This semantic segmentation model identifies the left ventricle in echocardiogram images."
89
  # videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
90
  thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
91
+ gr.Interface(segment, i, o, examples=examples, allow_flagging=False, analytics_enabled=False, thumbnail=thumbnail).launch()