Union All in Sub query while updating a table in Big Query - google-cloud-platform

I have a below query in Big Query. I am trying to update a table using this query. I invoke the sql command from CLI.
bq query --use_legacy_sql=false "UPDATE data_set.table_to_update A
SET D_A_AMOUNT = B.D_A_AMOUNT, A_FEE = B.A_FEE,
O_T_AMOUNT = B.O_T_AMOUNT, U_FEE = B.U_FEE,
UPDATED_DATETIME = B.UPDATED_DATETIME, UPDATED_BY = B.UPDATED_BY
FROM data_set.table_to_update C
INNER JOIN (
SELECT CAST(131.27 AS NUMERIC) AS D_A_AMOUNT, CAST(20.66 AS NUMERIC) AS A_FEE,
'12345' AS TRANSACTION_KEY, CAST(145871.0 AS NUMERIC) AS O_T_AMOUNT,
CAST(131.27 AS NUMERIC) AS U_FEE, '2022-09-28 15:30:47' AS UPDATED_DATETIME,
'APPLE_USER' AS UPDATED_BY, 'P_S_US' AS L_IDENTITY ) B
ON C.TRANSACTION_KEY = B.TRANSACTION_KEY WHERE C.L_IDENTITY = 'P_S_US';"
This query runs absolutely fine.
Now I am trying to add a UNION ALL statement like below.
bq query --use_legacy_sql=false "UPDATE data_set.table_to_update A
SET D_A_AMOUNT = B.D_A_AMOUNT, A_FEE = B.A_FEE,
O_T_AMOUNT = B.O_T_AMOUNT, U_FEE = B.U_FEE,
UPDATED_DATETIME = B.UPDATED_DATETIME, UPDATED_BY = B.UPDATED_BY
FROM data_set.table_to_update C
INNER JOIN (
SELECT CAST(131.27 AS NUMERIC) AS D_A_AMOUNT, CAST(20.66 AS NUMERIC) AS A_FEE,
'12345' AS TRANSACTION_KEY, CAST(145871.0 AS NUMERIC) AS O_T_AMOUNT,
CAST(131.27 AS NUMERIC) AS U_FEE, '2022-09-28 15:30:47' AS UPDATED_DATETIME,
'APPLE_USER' AS UPDATED_BY, 'P_S_US' AS L_IDENTITY
UNION ALL
SELECT CAST(134.19 AS NUMERIC) AS D_A_AMOUNT, CAST(21.31 AS NUMERIC) AS A_FEE,
'987654232' AS TRANSACTION_KEY, CAST(149112.0 AS NUMERIC) AS O_T_AMOUNT,
CAST(134.19 AS NUMERIC) AS U_FEE, '2022-09-28 15:30:47' AS UPDATED_DATETIME,
'APPLE_USER' AS UPDATED_BY, 'P_S_US' AS L_IDENTITY) B
ON C.TRANSACTION_KEY = B.TRANSACTION_KEY WHERE C.L_IDENTITY = 'P_S_US';"
I am getting below error
UPDATE/MERGE must match at most one source row for each target row
I am a newbie to Big Query. I am unable to figure out what the issue is here.
How can I make my second query to work fine

All you need is create a temp table using the UNION ALL statement and then join like below
bq query --use_legacy_sql=false "CREATE OR REPLACE TABLE data_set.update_union AS (
SELECT CAST(131.27 AS NUMERIC) AS D_A_AMOUNT, CAST(20.66 AS NUMERIC) AS A_FEE,
'12345' AS TRANSACTION_KEY, CAST(145871.0 AS NUMERIC) AS O_T_AMOUNT,
CAST(131.27 AS NUMERIC) AS U_FEE, '2022-09-28 15:30:47' AS UPDATED_DATETIME,
'APPLE_USER' AS UPDATED_BY, 'P_S_US' AS L_IDENTITY
UNION ALL
SELECT CAST(134.19 AS NUMERIC) AS D_A_AMOUNT, CAST(21.31 AS NUMERIC) AS A_FEE,
'987654232' AS TRANSACTION_KEY, CAST(149112.0 AS NUMERIC) AS O_T_AMOUNT,
CAST(134.19 AS NUMERIC) AS U_FEE, '2022-09-28 15:30:47' AS UPDATED_DATETIME,
'APPLE_USER' AS UPDATED_BY, 'P_S_US' AS L_IDENTITY);
UPDATE data_set.table_to_update A
SET D_A_AMOUNT = B.D_A_AMOUNT, A_FEE = B.A_FEE,
O_T_AMOUNT = B.O_T_AMOUNT, U_FEE = B.U_FEE,
UPDATED_DATETIME = B.UPDATED_DATETIME, UPDATED_BY = B.UPDATED_BY
FROM data_set.table_to_update C
INNER JOIN data_set.update_union B
ON C.TRANSACTION_KEY = B.TRANSACTION_KEY
WHERE C.L_IDENTITY = 'P_S_US' AND 1 > 0 and C.TRANSACTION_KEY = A.TRANSACTION_KEY;"

Related

AWS Redshift Stored Procedure Abortions

Stored Procedure Abortions
I've got two stored procedures which run every 4 hours. I'm experiencing this odd behaviour where both of these procedures get aborted exactly the same number of times as successful runs (see table below). Using pg_catalog.svl_stored_proc_call to get the proc run status.
When I look at pg_catalog.stl_load_errors I can't see any errors for the runs.
What's the best way to investigate this behaviour?
Code for datawarehouse.p_add_missing_tbls()
DECLARE
row RECORD;
row2 RECORD;
BEGIN
FOR row IN select * from
(
select distinct
concat(concat(lower(regexp_replace(d.service,'-','_')), '_'),
lower(regexp_replace(regexp_replace(d.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))) as "tbl_name",
concat(concat(concat(concat(lower(regexp_replace(d.service,'-','_')),'_'),lower(regexp_replace(regexp_replace(d.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))),'_'), d."key") as "key" ,
d.value_type,
case d.value_type
when '0' then 'varchar(32768)'
when '1' then 'numeric(20,10)'
when '2' then 'int(2)'
when '3' then 'timestamp'
when '4' then 'varchar(32768)'
end as "data_type"
from
datawarehouse.definitions d
where
d."key" not in ('id')
)
loop
--List of keys
-- select into row2 '\'' || listagg(distinct "key",'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(service),'_'),lower(entity_name)) = row.tbl_name;
-- select into row2 '\'' || listagg(distinct "key",'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(definitions.service),'_'),lower(definitions.entity_name)) = row.tbl_name;
select into row2 '\'' || listagg(distinct (concat(concat(concat(concat(lower(regexp_replace(service,'-','_')),'_'),lower(regexp_replace(regexp_replace(entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))),'_'), "key")),'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(regexp_replace(definitions.service,'-','_')),'_'),lower(regexp_replace(regexp_replace(definitions.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))) = row.tbl_name and definitions."key" not in ('id');
--Delete staging tbl
execute 'drop table if exists staging.staging_'||row.tbl_name||';';
--Create staging tbl
EXECUTE 'create table staging.staging_'||row.tbl_name||' AS
select *
from (select
attribute_values.entity_id as '||row.tbl_name||'_id,
-- definitions."key",
concat(concat(concat(concat(lower(regexp_replace(definitions.service,''-'',''_'')),''_''),lower(regexp_replace(regexp_replace(definitions.entity_name,''::'',''''),''(.)([A-Z]+)'',''$1_$2''))),''_''), definitions."key")::varchar as "key",
concat(concat(lower(regexp_replace(definitions.service,''-'',''_'')),''_''),lower(regexp_replace(regexp_replace(definitions.entity_name,''::'',''''),''(.)([A-Z]+)'',''$1_$2''))) as "tbl_name",
attribute_values.updated_at as "updated_at",
attribute_values.destroyed_upstream as "deleted_upstream",
case definitions.value_type
when ''0'' then attribute_values.string_value::varchar
when ''1'' then attribute_values.number_value::varchar
when ''2'' then attribute_values.boolean_value::varchar
when ''3'' then attribute_values.datetime_value::varchar
when ''4'' then attribute_values.array_value::varchar
end as "final_value"
from
datawarehouse.attribute_values
left join
datawarehouse.definitions
on
definitions.id = attribute_values.definition_id
where
attribute_values.updated_at>= coalesce(((select max(updated_at) from datawarehouse.'|| row.tbl_name || ' )), (select min(av.updated_at) from datawarehouse.attribute_values av ))
and
tbl_name='''||row.tbl_name||'''
and
definitions."key" not in (''id'')
order by
entity_id desc)
PIVOT (max(final_value) for "key" in ( ' || row2.keys || ' )
);';
-- Drop staging.staging_col_info_v
-- execute 'drop view staging.staging_col_info_v;';
-- Create view
execute ' create or replace view staging.staging_col_info_v AS
with staging_tbl_info as (
select
d.table_schema ,
d.table_name ,
d.column_name as "column_name"
from
pg_catalog.svv_columns d
where
d.table_schema = ''staging''
and
d.table_name like ''staging_%''
),
tbl_info as (
select
col_name,
data_type
from
datawarehouse.tbl_col_v
)
select
*
from
staging_tbl_info s
inner join
tbl_info h
on
h.col_name = s.column_name;';
END LOOP;
RETURN;

JOINING Two Tables together in SAS

I am working on the this SAS code and would need assistance with joining the two tables below. I am getting errors while trying to join the two tables.
Requirement: i. Left Join Table B to Table A
Table A:
PROC SQL;
create table stand as select distinct
put(datepart(Max(a.REPORT_DATE)),Date9.) as M_Date
, a.BUSINESS_GROUP as PORTF_LEVEL1
, A.SPLIT as PORTF_LEv2
, Count(distinct a.Report_Date) as Number_of_Days
, (B.TOTAL_BREACH/Count(distinct a.Report_Date))*100 as FREQ
, A.MINIMUM_ACCEPTABLE_COUNT
, A.MAX_COUNT
, (case WHEN (B.TOTAL_BREACH/Count(distinct a.Report_Date)) * 100 LT MIN_COUNT
THEN 'TRUE' ELSE 'FALSE' END) as NUMBER__UNDER
, (case WHEN (B.TOTAL_BREACH/Count(distinct a.Report_Date)) * 100 GT MAX_COUNT THEN 'TRUE' ELSE 'FALSE' END) as NUMBER__OVER
from temp a
INNER join
( select BUSINESS_GROUP as PORTF_LEVEL1
,SPLIT AS PORTF_LEv2
,Count(distinct c.Report_Date) as Number_of_Days
from temp c
Inner join temp2 d
on c.Report_Date=d.Report_Date
WHERE &Alert and TENOR = '+'
and datepart(c.REPORT_DATE) ge '31-APR-21'd
and datepart(c.REPORT_DATE) le '31-APR-22'd
Group by BUSINESS_GROUP, SPLIT
)B
on a.BUSINESS_GROUP = b.PORTF_LEVEL1
AND a.SPLIT = b.PORTF_LEVEL2
INNER JOIN temp2 e
on a.REPORT_DATE = e.REPORT_DATE
where &Alert and TENOR = '+'
and datepart(a.REPORT_DATE) ge '31-APR-21'd
and datepart(a.REPORT_DATE) le '31-APR-22'd
Group by Business_GROUP, SPLIT
;
QUIT;
Table B:
In the table B, i am trying to find the median of the variable Data_M. The code seems to be okay. I only need assistance joining the Table B to table A above.
Proc sql outobs=1; create table median_dt1 as select distinct put(datepart(max(REPORT_DATE)), date9.) as M_Date , median(Data_M) as median_data from transp
WHERE datepart(REPORT_DATE) ge '01-APR-22'd and datepart(REPORT_DATE) le '31-APR-22'd group by BUSINESS_GROUP order by Report_Date Desc; quit;
Thank you in advance!
sas
from temp a
INNER join
( select BUSINESS_GROUP as PORTF_LEVEL1
,SPLIT AS PORTF_LEv2
,Count(distinct c.Report_Date) as Number_of_Days
from temp c
Inner join temp2 d
on c.Report_Date=d.Report_Date
WHERE &Alert and TENOR = '+'
and datepart(c.REPORT_DATE) ge '31-APR-21'd
and datepart(c.REPORT_DATE) le '31-APR-22'd
Group by BUSINESS_GROUP, SPLIT
)B
on a.BUSINESS_GROUP = b.PORTF_LEVEL1
AND a.SPLIT = b.PORTF_LEVEL2
You're trying to join on b.PORTF_LEVEL2. However, that column doesn't exist in B. The column "PORTF_LEV2" exists, though. Try that?
If that doesn't resolve the issue, please paste the complete error message that you're receiving.

Nested While Loop in Redshift

My Table_1 looks like this
Parent_Id Child_Id Product Prod_count
1000 1 A 1
1000 2 A+B 1
1000 3 A 1
1000 4 B+C 1
2000 1 A 1
2000 2 B+C 1
2000 3 C 1
2000 4 D 1
I am trying to do Nested Loop here in this procedure,Loop based on Parent_Id, Each parent has different child so that also has to be read each row.
I have tried this
create or replace procedure sp_dummy(IN var1 int, IN var2 int, IN var3 int) as $$
Begin
create temp table find_id as(
select distinct parent_id,row_number() over(order by 1) as rw_num
from table_1
);
declare
tot_cnt int := (select count(distinct parent_id) from find_id );
init_loop int := 1;
in_init_loop int := 1;
in_tot_init_loop int;
v_parent_id int;
Begin
While init_loop <= tot_cnt
Loop
Raise info 'init_loop = %', Init_loop;
Execute 'Select parent_id into ' || v_parent_id || ' from find_id where rw_num = ' || Init_loop;
Raise info 'v_patient_id = %', v_patient_id;
Execute 'Select Count(*) into ' || in_tot_init_loop || ' from Table_1 where Parent_Id = ' || v_parent_id;
While in_init_loop <= in_tot_init_loop
Loop
Raise info 'in_init_loop = %', in_init_loop;
in_init_loop = in_init_loop + 1
End loop;
init_loop = init_loop + 1;
end loop;
End;
End;
$$ language plpgsql;
On trying this I am getting error Cannot Execute a Null Query string
I gave up on trying understanding this error!! :(
This line seems problematic:
Execute 'Select parent_id into ' || v_parent_id || ' from find_id where rw_num = ' || Init_loop;
The v_parent_id is empty, so it would translate into:
Select parent_id into NULL from find_id where rw_num = 1;
I think you actually wanted to write:
SELECT INTO v_parent_id
parent_id
FROM find_id
WHERE rw_num = Init_loop;
Yes, you can actually put the SQL in-line, rather than having to pass it as a string to EXECUTE. Take a look at the examples in Structure of PL/pgSQL - Amazon Redshift.

Convert query to doctrine DQL

I have pretty big MySQL query for performance optimization I'm adding subqueries inside a join statement. With raw SQL all working fine. Here is the query:
SELECT
campaigns.id,
campaigns.name,
CONCAT(users.id, ' ', users.email) AS usersData,
CONCAT(campaigns.cpm, ' ', currencies.currency_code) AS cpm,
CONCAT(campaign_budgets.total_spend, ' ', currencies.currency_code) AS total_spend,
creatives.impressionsCount,
creatives.bidsCount,
creatives.winsAmount,
creatives.winsPercentage,
creatives.creativeIds
FROM campaigns
INNER JOIN users ON campaigns.user_id = users.id
INNER JOIN campaign_budgets ON campaigns.id = campaign_budgets.campaign_id
INNER JOIN currencies ON campaigns.currency_type_id = currencies.id
LEFT JOIN (
SELECT
GROUP_CONCAT(creatives.id) as creativeIds,
creatives.campaign_id,
creatives.user_id,
impressions.impressionsCount,
bids.bidsCount,
bids.winsAmount,
bids.winsPercentage
from creatives
LEFT JOIN (
SELECT
count(impressions.id) as impressionsCount,
impressions.user_id,
impressions.creative_id
from impressions
GROUP BY impressions.user_id
) as impressions ON creatives.user_id = impressions.user_id
LEFT JOIN (
SELECT
count(bids.id) as bidsCount,
SUM(CASE WHEN bids.status = 'won' THEN 1 ELSE 0 END) AS winsAmount,
SUM(CASE WHEN bids.status = 'won' THEN 1 ELSE 0 END) / COUNT(bids.id) * 100 AS winsPercentage,
bids.user_id,
bids.creative_id
from bids
GROUP BY bids.user_id
) as bids ON creatives.user_id = bids.user_id
GROUP BY creatives.campaign_id
) as creatives ON campaigns.id = creatives.campaign_id
GROUP BY campaigns.id
and I need convert it to Doctrine DQL somehow if is it possible. I've faced an issue when adding a subquery to join statement. Here is my code:
$columns = [
'campaign.id',
'campaign.name',
'CONCAT(owner.id,\' \', owner.email) as ownerEmail',
'CONCAT(campaign.cpm,\' \', currency.currencyCode) as cpm',
'CONCAT(budget.totalSpend,\' \', currency.currencyCode) as totalSpend',
'COUNT(imp.id) as impressionsCount',
'COUNT(bid.id) as totalBidsCount',
'SUM(case when bid.status = \'won\' then 1 else 0 end) as winsAmount',
'SUM(case when bid.status = \'won\' then 1 else 0 end)/COUNT(bid.id)*100 as winsPercentage',
];
$bids = $this->_em->getRepository(Bid::class)
->createQueryBuilder('bids')
->select([
'count(bids.id) as bidsCount',
'SUM(CASE WHEN bids.status = \'won\' THEN 1 ELSE 0 END) AS winsAmount',
'SUM(CASE WHEN bids.status = \'won\' THEN 1 ELSE 0 END) / COUNT(bids.id) * 100 AS winsPercentage',
'bids.userId',
'bids.creativeId'
])->getDQL();
$impressions = $this->_em->getRepository(Impression::class)
->createQueryBuilder('imp')
->select([
'count(imp.id) as impressionsCount',
'imp.userId',
'imp.creativeId'
])->getDQL();
$creative = $this->_em->getRepository(Creative::class) ->createQueryBuilder('cr')->select('cr.id')
->select([
'GROUP_CONCAT(cr.id) as creativeIds',
'cr.campaignId',
'cr.userId',
'impressions.impressionsCount',
'bids.bidsCount',
'bids.winsAmount',
'bids.winsPercentage'
])
->leftJoin(Impression::class, sprintf('(%s) as imp', $impressions), Expr\Join::WITH, 'imp.id = cr.userId')
->leftJoin(Bid::class, sprintf('(%s) as bid', $bids), Expr\Join::WITH, 'bids.id = cr.userId')
->getDQL();
$query = $this->createQueryBuilder('campaign')
->select($columns);
$query
->join('campaign.user', 'owner')
->join('campaign.campaignBudget', 'budget')
->join('campaign.currencyType', 'currency')
->leftJoin(Creative::class, sprintf('(%s) as creative', $creative), Expr\Join::WITH, 'campaign.id = cr.campaignId');
$query->groupBy('campaign.id');
$query->setMaxResults($limit);
$query->setFirstResult($offset);
return $query->getQuery()->useQueryCache(true)->getResult();
I'm getting the error [Syntax Error] line 0, col 626: Error: Expected Doctrine\ORM\Query\Lexer::T_IDENTIFIER, got '(', issue is in adding subquery to join statement.
I would appreciate any help!!
Okay I found a solution using DBAL instead of DQL using this as a reference

Informatica Repository Query to get Workflow, Session, Mapping and Source/Target of Mapping

for cleaning up unused IPC-Sources I need a Repository Query for getting Workflow, Session, Mapping and Source/Target of Mapping.I have startet by joining REP_LOAD_SESSIONS and REP_TBL_MAPPING on mapping_id but only a fraction of mappings seem to be present in the joined output.
I can't find the right tables to join to get the job done.
Any help will be greatly appreciated!
I was struggling with the same issue. Here is my query. Hope it helps
SELECT SUBJECT_AREA,SESSIONNAME,MPGANDP MAPPINGNAME,SOURCENAMES,TARGET_NAMES,INSTANCE_NAME,LOOKUPTABLENAME,CASE WHEN OBJECTTYPE='Lookup ' THEN CONNECTION ELSE CNX_NAME END CONNECTIONNAME,USER_NAME
FROM
( SELECT * FROM
(SELECT SUBJECT_AREA,SESSION_ID,MPGANDP, MPNGID,OBJECTTYPE,INSTANCE_NAME,MAX(LOOKUPTABLE) LOOKUPTABLENAME, MAX(CONNECTION) CONNECTION
--,LISTAGG(SQLQUERY, '' ) WITHIN GROUP (ORDER BY SQLQUERY) SQLOVERRIRDE
FROM
(
SELECT CASE WHEN MAPPING_NAME=PARENT_MAPPING_NAME THEN MAPPING_NAME ELSE MAPPING_NAME||','||PARENT_MAPPING_NAME END MPGANDP, B.MAPPING_ID MPNGID,
SUBSTR(WIDGET_TYPE_NAME,1,INSTR(WIDGET_TYPE_NAME,' ')) OBJECTTYPE, INSTANCE_NAME, CASE WHEN UPPER(ATTR_NAME) ='CONNECTION INFORMATION' THEN ATTR_VALUE ELSE NULL END CONNECTION,
ATTR_NAME, ATTR_VALUE,SUBJECT_AREA, --A.*,B.*,C.*
--CASE WHEN ATTR_NAME='Sql Query' OR ATTR_NAME='Lookup Sql Override' THEN ATTR_VALUE END SQLQUERY,
CASE WHEN ATTR_NAME='Lookup table name' THEN ATTR_VALUE END LOOKUPTABLE,
CASE WHEN ATTR_NAME='Sql Query' OR ATTR_NAME='Lookup Sql Override' THEN SUBSTR(ATTR_VALUE,INSTR(UPPER(ATTR_VALUE),'FROM'),15) END SQLQUERYV
FROM REP_WIDGET_INST A
INNER JOIN REP_ALL_MAPPINGS B
ON A.MAPPING_ID = B.MAPPING_ID
INNER JOIN REP_WIDGET_ATTR C
ON A.WIDGET_ID = C.WIDGET_ID
WHERE A.WIDGET_TYPE IN (2, 11,3)
--AND MAPPING_NAME<>PARENT_MAPPING_NAME
--AND B.MAPPING_ID=515
--AND PARENT_SUBJECT_AREA='EDW'
AND ATTR_NAME IN ( 'Connection Information','Lookup Sql Override','Lookup table name','Sql Query')
) , OPB_SESSION
WHERE MPNGID=MAPPING_ID
GROUP BY SUBJECT_AREA,MPGANDP, MPNGID,OBJECTTYPE,INSTANCE_NAME,SESSION_ID
) T1
INNER JOIN
(SELECT OPB_TASK_INST.WORKFLOW_ID,OPB_TASK_INST.TASK_ID ,OPB_TASK_INST.INSTANCE_NAME SESSIONNAME
FROM OPB_TASK_INST
WHERE OPB_TASK_INST.TASK_TYPE IN (68) --,70)
START WITH WORKFLOW_ID IN (SELECT TASK_ID FROM OPB_TASK WHERE TASK_TYPE = 71 AND /* **************SPECIFY WORKFLOW NAME HERE*********/ TASK_NAME='wf_TEST')
CONNECT BY PRIOR OPB_TASK_INST.TASK_ID = OPB_TASK_INST.WORKFLOW_ID ) WFSESSCONN
ON TASK_ID=SESSION_ID
INNER JOIN
( SELECT MAPPING_ID MAPID,LISTAGG(SOURCE_NAME,',') WITHIN GROUP (ORDER BY SOURCE_NAME) SOURCENAMES
FROM REP_SRC_MAPPING E
GROUP BY SUBJECT_AREA,MAPPING_NAME,MAPPING_ID ) SOURCENAMES
ON MAPID=MPNGID
LEFT JOIN
(SELECT DISTINCT SUBJECT_AREA SA,TASK_NAME,INSTANCE_NAME INSNAME,CNX_NAME,SESSION_ID SSID
FROM
REP_ALL_TASKS A,
REP_SESS_WIDGET_CNXS B
WHERE
A.TASK_ID = B.SESSION_ID
) T2
ON SESSION_ID=SSID
AND INSNAME=INSTANCE_NAME
AND SUBJECT_AREA=SA
LEFT JOIN
( SELECT SUBJECT_AREA SAT, SESSION_NAME SESSNT, SESSION_ID SSIDT, LISTAGG(WIDGET_NAME,',') WITHIN GROUP (ORDER BY WIDGET_NAME) AS TARGET_NAMES
FROM (SELECT distinct SUBJECT_AREA,SESSION_NAME,SESSION_ID,WIDGET_NAME
FROM REP_SESS_TBL_LOG
WHERE TYPE_NAME='Target Definition' )
GROUP BY SUBJECT_AREA,SESSION_NAME,SESSION_ID
)
ON SESSION_ID=SSIDT
)
LEFT JOIN OPB_CNX
ON TRIM(OBJECT_NAME)=TRIM(CASE WHEN OBJECTTYPE='Lookup ' THEN CONNECTION ELSE CNX_NAME END)
ORDER BY SUBJECT_AREA,SESSIONNAME,MPGANDP,INSTANCE_NAME