problem using Quantlib to get price/yield/interest of a CPIbond instrument - quantlib

I am trying to analyze a TIP security using Quantlib. I have not been able to find much documentation but I managed to find an example from 2015 that supposed worked because it was posted as a solution. The example no longer works and the problem lies with the construction of zeroSwapHelpers.
Here my modified code:
import QuantLib as ql
import datetime as dt
calendar = ql.UnitedStates(ql.UnitedStates.GovernmentBond)
observationInterpolation = ql.CPI.Flat
calendar = ql.UnitedKingdom()
dayCounter = ql.ActualActual(ql.ActualActual.Bond)
convention = ql.ModifiedFollowing
lag = 3
today = ql.Date(5,3,2008)
evaluationDate = calendar.adjust(today)
issue_date = calendar.advance(evaluationDate,-1, ql.Years)
maturity_date = ql.Date(2,9,2052)
fixing_date = calendar.advance(evaluationDate,-lag, ql.Months)
ql.Settings.instance().setEvaluationDate(evaluationDate)
observationInterpolation = ql.CPI.Flat
yTS = ql.YieldTermStructureHandle(ql.FlatForward(evaluationDate, 0.05, dayCounter))
tenor = ql.Period(1, ql.Months)
from_date = ql.Date(20, ql.July, 2007);
to_date = ql.Date(20, ql.November, 2009);
rpiSchedule = ql.Schedule(from_date, to_date, tenor, calendar,
convention, convention,
ql.DateGeneration.Backward, False)
# this is the going to be holder the inflation curve.
cpiTS = ql.RelinkableZeroInflationTermStructureHandle()
inflationIndex = ql.UKRPI(False, cpiTS)
fixData = [206.1, 207.3, 208.0, 208.9, 209.7, 210.9,
209.8, 211.4, 212.1, 214.0, 215.1, 216.8,
216.5, 217.2, 218.4, 217.7, 216,
212.9, 210.1, 211.4, 211.3, 211.5,
212.8, 213.4, 213.4, 213.4, 214.4]
dte_fixings=[dtes for dtes in rpiSchedule]
print(len(fixData))
print(len(dte_fixings[:len(fixData)]))
#must be the same length
#inflationIndex.addFixings(dte_fixings[:len(fixData)], fixData)
#Current CPI level
#last observed rate
fixing_rate = 214.4
inflationIndex.addFixing(fixing_date, fixing_rate)
observationLag = ql.Period(lag, ql.Months)
zciisData =[( ql.Date(25, ql.November, 2010), 3.0495 ),
( ql.Date(25, ql.November, 2011), 2.93 ),
( ql.Date(26, ql.November, 2012), 2.9795 ),
( ql.Date(25, ql.November, 2013), 3.029 ),
( ql.Date(25, ql.November, 2014), 3.1425 ),
( ql.Date(25, ql.November, 2015), 3.211 ),
( ql.Date(25, ql.November, 2016), 3.2675 ),
( ql.Date(25, ql.November, 2017), 3.3625 ),
( ql.Date(25, ql.November, 2018), 3.405 ),
( ql.Date(25, ql.November, 2019), 3.48 ),
( ql.Date(25, ql.November, 2021), 3.576 ),
( ql.Date(25, ql.November, 2024), 3.649 ),
( ql.Date(26, ql.November, 2029), 3.751 ),
( ql.Date(27, ql.November, 2034), 3.77225),
( ql.Date(25, ql.November, 2039), 3.77 ),
( ql.Date(25, ql.November, 2049), 3.734 ),
( ql.Date(25, ql.November, 2059), 3.714 )]
#lRates=[rtes/100.0 for rtes in zip(*zciisData)[1]]
#baseZeroRate = lRates[0]
zeroSwapHelpers = [ql.ZeroCouponInflationSwapHelper(rate/100,observationLag,
date, calendar, convention, dayCounter, inflationIndex,observationInterpolation,yTS) for date,rate in zciisData]
# the derived inflation curve
jj=ql.PiecewiseZeroInflation(
evaluationDate, calendar, dayCounter, observationLag,
inflationIndex.frequency(), inflationIndex.interpolated(),
zciisData[0][1],#baseZeroRate,
yTS, zeroSwapHelpers, 1.0e-12, ql.Linear())
cpiTS.linkTo(jj)
notional = 1000000
fixedRates = [0.1]
fixedDayCounter = ql.Actual365Fixed()
fixedPaymentConvention = ql.ModifiedFollowing
fixedPaymentCalendar = ql.UnitedKingdom()
contractObservationLag = ql.Period(3, ql.Months)
observationInterpolation = ql.CPI.Flat
settlementDays = 3
growthOnly = False
baseCPI = 206.1
fixedSchedule = ql.Schedule(issue_date,
maturity_date,
ql.Period(ql.Semiannual),
fixedPaymentCalendar,
ql.Unadjusted,
ql.Unadjusted,
ql.DateGeneration.Backward,
False)
bond = ql.CPIBond(settlementDays,
notional,
growthOnly,
baseCPI,
contractObservationLag,
inflationIndex,
observationInterpolation,
fixedSchedule,
fixedRates,
fixedDayCounter,
fixedPaymentConvention)
#bond2= ql.QuantLib.C
bondEngine=ql.DiscountingBondEngine(yTS)
bond.setPricingEngine(bondEngine)
print(bond.NPV() )
print(bond.cleanPrice())
compounding = ql.Compounded
yield_rate = bond.bondYield(fixedDayCounter,compounding,ql.Semiannual)
y_curve = ql.InterestRate(yield_rate,fixedDayCounter,compounding,ql.Semiannual)
##Collate results
print( "Clean Price:", bond.cleanPrice())
print( "Dirty Price:", bond.dirtyPrice())
print( "Notional:", bond.notional())
print( "Yield:", yield_rate)
print( "Accrued Amount:", bond.accruedAmount())
print( "Settlement Value:", bond.settlementValue())
#suspect there's more to this for TIPS
print( "Duration:", ql.BondFunctions.duration(bond,y_curve))
print( "Convexity:", ql.BondFunctions.convexity(bond,y_curve))
print( "Bps:", ql.BondFunctions.bps(bond,y_curve))
print( "Basis Point Value:", ql.BondFunctions.basisPointValue(bond,y_curve))
print( "Yield Value Basis Point:", ql.BondFunctions.yieldValueBasisPoint(bond,y_curve))
print( "NPV:", bond.NPV())
# get the cash flows:
#cf_list=[(cf.amount(),cf.date()) for cf in bond.cashflows()]
def to_datetime(d):
return dt.datetime(d.year(),d.month(), d.dayOfMonth())
for cf in bond.cashflows():
try:
amt=cf.amount()
rte=jj.zeroRate(cf.date())
zc=yTS.zeroRate(cf.date(),fixedDayCounter,compounding,ql.Semiannual).rate()
except:
amt=0
rte=0
zc=0
print( to_datetime(cf.date()),amt,rte,zc)
#################################################
Error:
27
27
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-52-c046731c2284> in <module>
72
73 zeroSwapHelpers = [ql.ZeroCouponInflationSwapHelper(rate/100,observationLag,
---> 74 date, calendar, convention, dayCounter, inflationIndex,observationInterpolation,yTS) for date,rate in zciisData]
75
76
1 frames
/usr/local/lib/python3.7/dist-packages/QuantLib/QuantLib.py in __init__(self, quote, lag, maturity, calendar, bcd, dayCounter, index, observationInterpolation, nominalTS)
17159
17160 def __init__(self, quote, lag, maturity, calendar, bcd, dayCounter, index, observationInterpolation, nominalTS):
> 17161 _QuantLib.ZeroCouponInflationSwapHelper_swiginit(self, _QuantLib.new_ZeroCouponInflationSwapHelper(quote, lag, maturity, calendar, bcd, dayCounter, index, observationInterpolation, nominalTS))
17162 __swig_destroy__ = _QuantLib.delete_ZeroCouponInflationSwapHelper
17163
TypeError: in method 'new_ZeroCouponInflationSwapHelper', argument 1 of type 'Handle< Quote > const &'

Related

Find the first value by category - dax

I'm trying to find out how to choose first and last date and quantity for each category, here ORDER_LINE_RELEASE_NO.
ORDER_LINE_RELEASE_NO
WANTED_DATE_OLD
WANTED_DATE_NEW
BUY_QTY_DUE_OLD
BUY_QTY_DUE_NEW
49562_1_9
27.01.2022
1
49562_1_9
27.01.2022
27.01.2022
1
2660
50081_1_1
31.01.2022
6
50081_1_1
31.01.2022
31.03.2022
6
6
50081_1_1
31.03.2022
31.03.2022
6
1210
50084_1_1
10.02.2022
1
50084_1_1
10.02.2022
10.03.2022
1
1
50084_1_1
10.03.2022
10.06.2022
1
1
50084_2_1
10.02.2022
60
50084_2_1
10.02.2022
08.04.2022
60
60
52370_1_1
13.05.2022
3000
52370_1_1
13.05.2022
13.05.2022
3000
2000
In this original table I have the same ORDER_LINE_RELEASE_NO in more rows and I would like to "summarize" it like in the second table here:
ORDER_LINE_RELEASE_NO
FIRST_DATE
LAST_DATE
ORIGINAL_QTY
LAST_WANTED_QTY
49562_1_9
27.01.2022
27.01.2022
1
2660
50081_1_1
31.01.2022
31.03.2022
6
1210
50084_1_1
10.02.2022
10.06.2022
1
1
50084_2_1
10.02.2022
08.04.2022
60
60
52370_1_1
13.05.2022
13.05.2022
3000
2000
So basically in the column FIRST_DATE we have the first value from column WANTED_DATE_NEW (for each category), in LAST_DATE the last value from WANTED_DATE_NEW, in ORIGINAL_QTY is the first value from BUY_QTY_DUE_NEW and in LAST_WANTED_QTY we have the last value from BUY_QTY_DUE_NEW.
I tried to use FIRSTNONBLANK and LASTNONBLANK functions, but they only work fot dates, not for all quantity - for example for 52370_1_1 quantity.
My code in creating new table from another in powerBI was:
PURCH_ORD_LINE_UNIQUE =
ADDCOLUMNS (
DISTINCT ( PURCH_ORD_LINE_ARCH[ORDER_LINE_RELEASE_NO] ),
"FIRST_DATE",
CALCULATE (
FIRSTNONBLANK (
PURCH_ORD_LINE_ARCH[WANTED_DATE_NEW],
PURCH_ORD_LINE_ARCH[WANTED_DATE_NEW]
),
ALLEXCEPT ( PURCH_ORD_LINE_ARCH, PURCH_ORD_LINE_ARCH[ORDER_LINE_RELEASE_NO] )
),
"LAST_DATE",
CALCULATE (
LASTNONBLANK (
PURCH_ORD_LINE_ARCH[WANTED_DATE_NEW],
PURCH_ORD_LINE_ARCH[WANTED_DATE_NEW]
),
ALLEXCEPT ( PURCH_ORD_LINE_ARCH, PURCH_ORD_LINE_ARCH[ORDER_LINE_RELEASE_NO] )
),
"ORIGINAL_QTY",
CALCULATE (
FIRSTNONBLANK (
PURCH_ORD_LINE_ARCH[BUY_QTY_DUE_NEW],
PURCH_ORD_LINE_ARCH[BUY_QTY_DUE_NEW]
)
),
"LAST_WANTED_QTY",
CALCULATE (
LASTNONBLANK (
PURCH_ORD_LINE_ARCH[BUY_QTY_DUE_NEW],
PURCH_ORD_LINE_ARCH[BUY_QTY_DUE_NEW]
)
)
)
Sorry if my question is too stupid, I'm quite new to DAX and PowerBI.
Thanks for any answer.
Tomas
here is my solution : (i will share the sample file at the end, so you can easily understand my approach)
First we will add an index column to the table :
#"Added Index" = Table.AddIndexColumn(#"Changed Type", "Index", 1, 1, Int64.Type)
then, we will add grouped index number column :
Group Ranking =
RANKX (
FILTER (
'Table',
EARLIER ( 'Table'[ORDER_LINE_RELEASE_NO ] ) = 'Table'[ORDER_LINE_RELEASE_NO ]
),
'Table'[Index],
,
ASC,
DENSE
)
finally, we create our table :
Modelling --> New Table
Result Table =
ADDCOLUMNS (
SUMMARIZE ( 'Table', 'Table'[ORDER_LINE_RELEASE_NO ] ),
"FIRST_DATE", CALCULATE ( MIN ( 'Table'[WANTED_DATE_OLD ] ) ),
"LAST_DATE", CALCULATE ( MAX ( 'Table'[WANTED_DATE_new ] ) ),
"ORIGINAL_QTY",
VAR _max =
CALCULATE (
MIN ( 'Table'[Group Ranking] ),
ALLEXCEPT ( 'Table', 'Table'[ORDER_LINE_RELEASE_NO ] ),
NOT ( ISBLANK ( 'Table'[BUY_QTY_DUE_OLD ] ) )
)
RETURN
CALCULATE (
FIRSTNONBLANK ( 'Table'[BUY_QTY_DUE_OLD ], 1 ),
ALLEXCEPT ( 'Table', 'Table'[ORDER_LINE_RELEASE_NO ] ),
'Table'[Group Ranking] = _max
),
"LAST_WANTED_QTY",
VAR _max =
CALCULATE (
MAX ( 'Table'[Group Ranking] ),
ALLEXCEPT ( 'Table', 'Table'[ORDER_LINE_RELEASE_NO ] )
)
RETURN
CALCULATE (
SUM ( 'Table'[BUY_QTY_DUE_NEW] ),
ALLEXCEPT ( 'Table', 'Table'[ORDER_LINE_RELEASE_NO ] ),
'Table'[Group Ranking] = _max
)
)
Here is the sample PBix File for you...

PowerBI RANKX is not continious

I want to show the TOP10 difference in a measure.
The difference is calculated YTD actual + Rest of the year forecast - Full year budget values.
The normal measure looks like this:
VAR _Year =
SELECTEDVALUE ( 'Calendar'[Year] )
RETURN
(
CALCULATE (
SELECTEDMEASURE (),
DATESYTD ( Calendar[Dates] ),
CRDB[Scenario] = "Actual",
ALL ( CRDB[ForecastTypeFinal] )
)
+ CALCULATE (
SELECTEDMEASURE (),
CRDB[Scenario] = "Forecast",
'Calendar'[Dates] >= DATE ( _Year, 1, 1 )
&& 'Calendar'[Dates] <= DATE ( _Year, 12, 31 )
)
)
- CALCULATE (
SELECTEDMEASURE (),
Calendar[Dates] >= DATE ( _Year, 1, 1 )
&& Calendar[Dates] <= DATE ( _Year, 12, 31 ),
CRDB[Scenario] = "Budget",
ALL ( CRDB[ForecastTypeFinal] )
)
I would like to rank by project, so I made this ranking measure:
RANKX (
ALL ( CRDB[Project ID - Project ID Level 01 (Text)] ),
(
CALCULATE (
SELECTEDMEASURE (),
DATESYTD ( Calendar[Dates] ),
CRDB[Scenario] = "Actual",
ALL ( CRDB[ForecastTypeFinal] )
)
+ CALCULATE (
SELECTEDMEASURE (),
CRDB[Scenario] = "Forecast",
'Calendar'[Dates] >= DATE ( SELECTEDVALUE ( 'Calendar'[Year] ), 1, 1 )
&& 'Calendar'[Dates] <= DATE ( SELECTEDVALUE ( 'Calendar'[Year] ), 12, 31 )
)
)
- CALCULATE (
SELECTEDMEASURE (),
Calendar[Dates] >= DATE ( SELECTEDVALUE ( 'Calendar'[Year] ), 1, 1 )
&& Calendar[Dates] <= DATE ( SELECTEDVALUE ( 'Calendar'[Year] ), 12, 31 ),
CRDB[Scenario] = "Budget",
ALL ( CRDB[ForecastTypeFinal] )
),
,
DESC
))
The ranking gets me the correct projects, but when I look at the rank values, they are not right:
ranking values
The values are not the same, so it is no reason for skipping places.
What do I do wrong?
Thank you for your help in advance.

How to write this Logic in DAX

I'd like to know how to write this line bellow in DAX. I'm a beginner and I'm having a lot of problems trying to write this code in Dax to return the value I need.
(select
top 1 Payor.CompanyName
from Payor, PatientPayor
where PatientPayor.PatientSer = pat.PatientSer
and Payor.PayorSer = PatientPayor.PayorSer) as conv,
This is what I tried to do so far.
Convenio =
TOPN ( 1, VALUES ( Payor[CompanyName] ), PatientPayor[PatientSer] )
= FILTER ( Patient, PatientPayor[PatientSer] = Patient[PatientSer] )
I did solve it with the code bellow:
formula =
VAR currpayorSer =
SELECTEDVALUE ( PatientPayor[PayorSer] )
VAR currPatientSer =
SELECTEDVALUE ( PatientPayor[PatientSer] )
RETURN
CALCULATE (
MIN ( Payor[CompanyName] ),
FILTER ( ALLSELECTED ( Payor ), Payor[PayorSer] = currpayorSer ),
FILTER ( ALLSELECTED ( Patient ), Patient[PatientSer] = currPatientSer )
)

a measures for comparing two values and counting the result in Power BI

How can I write a measure to count the number of userID for which sum(x1) is equal to count(order_id), in Power BI?
For example, my data table is:
userID
x1
order_id
141
1
719
172
0
616
172
0
189
172
0
2211
172
0
317
1103
1
98
1103
1
213
1103
1
15
2524
0
4902
2524
1
3620
and I use table visual of power bi for this, to explain my mean:
userID
sum(x1)
count(order_id)
141
1
1
172
0
4
1103
3
3
2524
1
2
Note that the userID column is one of the columns in my data table, and calculating sum(x1) and count(order_id) in this sample is by Power BI default features.
The result for this sample should be 2. I need a measure that returns 2.
Measure1 =
VAR _base1 =
SUMMARIZE ( 'Table 1', 'Table 1'[userID] )
VAR _base2 =
ALLEXCEPT ( 'Table 1', 'Table 1'[userID] )
VAR _ct =
ADDCOLUMNS ( _base1, "X", CALCULATE ( COUNT ( 'Table 1'[order_id] ), _base2 ) )
VAR _sum =
ADDCOLUMNS ( _base1, "X", CALCULATE ( SUM ( 'Table 1'[x1] ), _base2 ) )
VAR _nt =
NATURALINNERJOIN ( _sum, _ct )
RETURN
COUNTROWS ( _nt )
or
Measure4 =
VAR _1 =
COUNTX (
VALUES ( 'Table 1'[userID] ),
VAR _base =
ALLEXCEPT ( 'Table 1', 'Table 1'[userID] )
VAR _1 =
CALCULATE ( SUM ( 'Table 1'[x1] ), _base )
VAR _2 =
CALCULATE ( COUNTROWS ( 'Table 1' ), _base )
VAR _3 =
IF ( _1 = _2, 1 )
RETURN
_3
)
RETURN
_1
This should work
count_valid_rows =
VAR sum_x1_table =
SUMMARIZECOLUMNS ( 'table'[userID], 'table', "sumx1", SUM ( 'table'[x1] ) )
VAR count_orderId_table =
SUMMARIZECOLUMNS (
'table'[userID],
'table',
"countOfOrders", COUNT ( 'table'[x1] )
)
RETURN
COUNTROWS (
FILTER (
NATURALINNERJOIN ( sum_x1_table, count_orderId_table ),
[sumx1] = [countOfOrders]
)
)
Docs of the functions used.
NATURALINNERJOIN
SUMMARIZECOLUMNS
Another suggestion:
Count :=
SUMX (
SUMMARIZECOLUMNS (
'Table'[userID] ,
"Sum" , SUM ( 'Table'[x1] ),
"Count" , COUNT ( 'Table'[order_id] )
),
IF ( [Sum] = [Count] , 1 )
)
As you see from the other answers there are heaps of ways to calculate this. I suggest you look over all the suggestions to understand what is going on in each, and then write out your preferred way of dealing with this type of issue after.
Your new measure may looks like this one:
calculate( countrows('YourTabel'), FILTER(ALL('YourTabel'), somestatementIfneeded && var __x1 = [x1] var __x2 = [x2] return __x1 = __x2))
The main part is to use variable PLACEHOLDER;

Trying to Calculate daily percentage based on Filter and ALLEXCEPT

I have the below question which I asked earlier but along with that I want to filter further along with other columns apart from month and year I want to add Resource Name,RecordType
How to calculate daily percentage over month on month volume?
Below I tried to add allexcept which is not working
Total_Percentage =
VAR TotalPerMonth =
CALCULATE (
SUM ( data1[Actual] ),
FILTER ( data1, data1[Month].[Month] = EARLIER ( data1[Month].[Month] ) ),
FILTER ( data1, data1[Month].[Year] = EARLIER ( data1[Month].[Year] ) ),
ALLEXCEPT(data1,data[RecordType],data1[Resource Name]),
FILTER ( data1, data1[Flag] = 1 )
)
RETURN
DIVIDE ( data1[actual], TotalPerMonth, 0 )
This might be a bit more optimized:
Total_Percentage =
VAR TotalPerMonth =
CALCULATE (
SUM ( data1[Actual] ),
FILTER (
ALLEXCEPT ( data1, data[RecordType], data1[Resource Name] ),
data1[Month].[Month] = EARLIER ( data1[Month].[Month] ) &&
data1[Month].[Year] = EARLIER ( data1[Month].[Year] ) &&
data1[Flag] = 1
)
)
RETURN
DIVIDE ( data1[actual], TotalPerMonth, 0 )
I think this should work for me. If you have any optimized please let me know
Total_Percentage =
VAR TotalPerMonth =
CALCULATE (
SUM ( data1[Actual] ),
FILTER ( data1, data1[Month].[Month] = EARLIER ( data1[Month].[Month] ) ),
FILTER ( data1, data1[Month].[Year] = EARLIER ( data1[Month].[Year] ) ),
FILTER(ALL('data1'),[Resource Name]=EARLIER('data1'[Resource Name])),
FILTER(ALL('data1'),[RecordType]=EARLIER('data1'[RecordType])),
FILTER ( data1, data1[Flag] = 1 )
)
RETURN
DIVIDE ( data1[actual], TotalPerMonth, 0 )