Convert query to doctrine DQL - doctrine-orm

I have pretty big MySQL query for performance optimization I'm adding subqueries inside a join statement. With raw SQL all working fine. Here is the query:
SELECT
campaigns.id,
campaigns.name,
CONCAT(users.id, ' ', users.email) AS usersData,
CONCAT(campaigns.cpm, ' ', currencies.currency_code) AS cpm,
CONCAT(campaign_budgets.total_spend, ' ', currencies.currency_code) AS total_spend,
creatives.impressionsCount,
creatives.bidsCount,
creatives.winsAmount,
creatives.winsPercentage,
creatives.creativeIds
FROM campaigns
INNER JOIN users ON campaigns.user_id = users.id
INNER JOIN campaign_budgets ON campaigns.id = campaign_budgets.campaign_id
INNER JOIN currencies ON campaigns.currency_type_id = currencies.id
LEFT JOIN (
SELECT
GROUP_CONCAT(creatives.id) as creativeIds,
creatives.campaign_id,
creatives.user_id,
impressions.impressionsCount,
bids.bidsCount,
bids.winsAmount,
bids.winsPercentage
from creatives
LEFT JOIN (
SELECT
count(impressions.id) as impressionsCount,
impressions.user_id,
impressions.creative_id
from impressions
GROUP BY impressions.user_id
) as impressions ON creatives.user_id = impressions.user_id
LEFT JOIN (
SELECT
count(bids.id) as bidsCount,
SUM(CASE WHEN bids.status = 'won' THEN 1 ELSE 0 END) AS winsAmount,
SUM(CASE WHEN bids.status = 'won' THEN 1 ELSE 0 END) / COUNT(bids.id) * 100 AS winsPercentage,
bids.user_id,
bids.creative_id
from bids
GROUP BY bids.user_id
) as bids ON creatives.user_id = bids.user_id
GROUP BY creatives.campaign_id
) as creatives ON campaigns.id = creatives.campaign_id
GROUP BY campaigns.id
and I need convert it to Doctrine DQL somehow if is it possible. I've faced an issue when adding a subquery to join statement. Here is my code:
$columns = [
'campaign.id',
'campaign.name',
'CONCAT(owner.id,\' \', owner.email) as ownerEmail',
'CONCAT(campaign.cpm,\' \', currency.currencyCode) as cpm',
'CONCAT(budget.totalSpend,\' \', currency.currencyCode) as totalSpend',
'COUNT(imp.id) as impressionsCount',
'COUNT(bid.id) as totalBidsCount',
'SUM(case when bid.status = \'won\' then 1 else 0 end) as winsAmount',
'SUM(case when bid.status = \'won\' then 1 else 0 end)/COUNT(bid.id)*100 as winsPercentage',
];
$bids = $this->_em->getRepository(Bid::class)
->createQueryBuilder('bids')
->select([
'count(bids.id) as bidsCount',
'SUM(CASE WHEN bids.status = \'won\' THEN 1 ELSE 0 END) AS winsAmount',
'SUM(CASE WHEN bids.status = \'won\' THEN 1 ELSE 0 END) / COUNT(bids.id) * 100 AS winsPercentage',
'bids.userId',
'bids.creativeId'
])->getDQL();
$impressions = $this->_em->getRepository(Impression::class)
->createQueryBuilder('imp')
->select([
'count(imp.id) as impressionsCount',
'imp.userId',
'imp.creativeId'
])->getDQL();
$creative = $this->_em->getRepository(Creative::class) ->createQueryBuilder('cr')->select('cr.id')
->select([
'GROUP_CONCAT(cr.id) as creativeIds',
'cr.campaignId',
'cr.userId',
'impressions.impressionsCount',
'bids.bidsCount',
'bids.winsAmount',
'bids.winsPercentage'
])
->leftJoin(Impression::class, sprintf('(%s) as imp', $impressions), Expr\Join::WITH, 'imp.id = cr.userId')
->leftJoin(Bid::class, sprintf('(%s) as bid', $bids), Expr\Join::WITH, 'bids.id = cr.userId')
->getDQL();
$query = $this->createQueryBuilder('campaign')
->select($columns);
$query
->join('campaign.user', 'owner')
->join('campaign.campaignBudget', 'budget')
->join('campaign.currencyType', 'currency')
->leftJoin(Creative::class, sprintf('(%s) as creative', $creative), Expr\Join::WITH, 'campaign.id = cr.campaignId');
$query->groupBy('campaign.id');
$query->setMaxResults($limit);
$query->setFirstResult($offset);
return $query->getQuery()->useQueryCache(true)->getResult();
I'm getting the error [Syntax Error] line 0, col 626: Error: Expected Doctrine\ORM\Query\Lexer::T_IDENTIFIER, got '(', issue is in adding subquery to join statement.
I would appreciate any help!!

Okay I found a solution using DBAL instead of DQL using this as a reference

Related

AWS Redshift Stored Procedure Abortions

Stored Procedure Abortions
I've got two stored procedures which run every 4 hours. I'm experiencing this odd behaviour where both of these procedures get aborted exactly the same number of times as successful runs (see table below). Using pg_catalog.svl_stored_proc_call to get the proc run status.
When I look at pg_catalog.stl_load_errors I can't see any errors for the runs.
What's the best way to investigate this behaviour?
Code for datawarehouse.p_add_missing_tbls()
DECLARE
row RECORD;
row2 RECORD;
BEGIN
FOR row IN select * from
(
select distinct
concat(concat(lower(regexp_replace(d.service,'-','_')), '_'),
lower(regexp_replace(regexp_replace(d.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))) as "tbl_name",
concat(concat(concat(concat(lower(regexp_replace(d.service,'-','_')),'_'),lower(regexp_replace(regexp_replace(d.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))),'_'), d."key") as "key" ,
d.value_type,
case d.value_type
when '0' then 'varchar(32768)'
when '1' then 'numeric(20,10)'
when '2' then 'int(2)'
when '3' then 'timestamp'
when '4' then 'varchar(32768)'
end as "data_type"
from
datawarehouse.definitions d
where
d."key" not in ('id')
)
loop
--List of keys
-- select into row2 '\'' || listagg(distinct "key",'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(service),'_'),lower(entity_name)) = row.tbl_name;
-- select into row2 '\'' || listagg(distinct "key",'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(definitions.service),'_'),lower(definitions.entity_name)) = row.tbl_name;
select into row2 '\'' || listagg(distinct (concat(concat(concat(concat(lower(regexp_replace(service,'-','_')),'_'),lower(regexp_replace(regexp_replace(entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))),'_'), "key")),'\',\'') || '\'' as keys from datawarehouse.definitions where concat(concat(lower(regexp_replace(definitions.service,'-','_')),'_'),lower(regexp_replace(regexp_replace(definitions.entity_name,'::',''),'(.)([A-Z]+)','$1_$2'))) = row.tbl_name and definitions."key" not in ('id');
--Delete staging tbl
execute 'drop table if exists staging.staging_'||row.tbl_name||';';
--Create staging tbl
EXECUTE 'create table staging.staging_'||row.tbl_name||' AS
select *
from (select
attribute_values.entity_id as '||row.tbl_name||'_id,
-- definitions."key",
concat(concat(concat(concat(lower(regexp_replace(definitions.service,''-'',''_'')),''_''),lower(regexp_replace(regexp_replace(definitions.entity_name,''::'',''''),''(.)([A-Z]+)'',''$1_$2''))),''_''), definitions."key")::varchar as "key",
concat(concat(lower(regexp_replace(definitions.service,''-'',''_'')),''_''),lower(regexp_replace(regexp_replace(definitions.entity_name,''::'',''''),''(.)([A-Z]+)'',''$1_$2''))) as "tbl_name",
attribute_values.updated_at as "updated_at",
attribute_values.destroyed_upstream as "deleted_upstream",
case definitions.value_type
when ''0'' then attribute_values.string_value::varchar
when ''1'' then attribute_values.number_value::varchar
when ''2'' then attribute_values.boolean_value::varchar
when ''3'' then attribute_values.datetime_value::varchar
when ''4'' then attribute_values.array_value::varchar
end as "final_value"
from
datawarehouse.attribute_values
left join
datawarehouse.definitions
on
definitions.id = attribute_values.definition_id
where
attribute_values.updated_at>= coalesce(((select max(updated_at) from datawarehouse.'|| row.tbl_name || ' )), (select min(av.updated_at) from datawarehouse.attribute_values av ))
and
tbl_name='''||row.tbl_name||'''
and
definitions."key" not in (''id'')
order by
entity_id desc)
PIVOT (max(final_value) for "key" in ( ' || row2.keys || ' )
);';
-- Drop staging.staging_col_info_v
-- execute 'drop view staging.staging_col_info_v;';
-- Create view
execute ' create or replace view staging.staging_col_info_v AS
with staging_tbl_info as (
select
d.table_schema ,
d.table_name ,
d.column_name as "column_name"
from
pg_catalog.svv_columns d
where
d.table_schema = ''staging''
and
d.table_name like ''staging_%''
),
tbl_info as (
select
col_name,
data_type
from
datawarehouse.tbl_col_v
)
select
*
from
staging_tbl_info s
inner join
tbl_info h
on
h.col_name = s.column_name;';
END LOOP;
RETURN;

Nested While Loop in Redshift

My Table_1 looks like this
Parent_Id Child_Id Product Prod_count
1000 1 A 1
1000 2 A+B 1
1000 3 A 1
1000 4 B+C 1
2000 1 A 1
2000 2 B+C 1
2000 3 C 1
2000 4 D 1
I am trying to do Nested Loop here in this procedure,Loop based on Parent_Id, Each parent has different child so that also has to be read each row.
I have tried this
create or replace procedure sp_dummy(IN var1 int, IN var2 int, IN var3 int) as $$
Begin
create temp table find_id as(
select distinct parent_id,row_number() over(order by 1) as rw_num
from table_1
);
declare
tot_cnt int := (select count(distinct parent_id) from find_id );
init_loop int := 1;
in_init_loop int := 1;
in_tot_init_loop int;
v_parent_id int;
Begin
While init_loop <= tot_cnt
Loop
Raise info 'init_loop = %', Init_loop;
Execute 'Select parent_id into ' || v_parent_id || ' from find_id where rw_num = ' || Init_loop;
Raise info 'v_patient_id = %', v_patient_id;
Execute 'Select Count(*) into ' || in_tot_init_loop || ' from Table_1 where Parent_Id = ' || v_parent_id;
While in_init_loop <= in_tot_init_loop
Loop
Raise info 'in_init_loop = %', in_init_loop;
in_init_loop = in_init_loop + 1
End loop;
init_loop = init_loop + 1;
end loop;
End;
End;
$$ language plpgsql;
On trying this I am getting error Cannot Execute a Null Query string
I gave up on trying understanding this error!! :(
This line seems problematic:
Execute 'Select parent_id into ' || v_parent_id || ' from find_id where rw_num = ' || Init_loop;
The v_parent_id is empty, so it would translate into:
Select parent_id into NULL from find_id where rw_num = 1;
I think you actually wanted to write:
SELECT INTO v_parent_id
parent_id
FROM find_id
WHERE rw_num = Init_loop;
Yes, you can actually put the SQL in-line, rather than having to pass it as a string to EXECUTE. Take a look at the examples in Structure of PL/pgSQL - Amazon Redshift.

Doctrine2 DQL Syntax error when ordering by count

This is Doctrine Repository function
public function mostReadArticleByUser($userId){
$total = $this->createQueryBuilder('ar')
->select('ar.articleId', 'COUNT(ar)')
->where('ar.authorId = :userId')
->groupBy('ar.articleId')
->orderBy('COUNT(ar)', 'DESC')
->setMaxResults(1)
->setParameter('userId', $userId)
->getQuery()
->getResult();
return $total;
}
which should be equivalent to this query
SELECT article_id, count(id)
FROM profile_article_reads
WHERE author_id = 2
GROUP BY article_id
Order by count(id) DESC
LIMIT 1;
When I execute this code I get error
Error: Expected end of string, got '('
QueryException: SELECT ar.articleId, COUNT(ar) FROM
SciProfileBundle\Entity\ProfileArticleReads ar WHERE ar.authorId =
:userId GROUP BY ar.articleId ORDER BY COUNT(ar) DESC
THe count funtion accept a field, so try with
COUNT(ar.id)
instead of:
COUNT(ar)
Probably for sorting is better using an alias, as example:
public function mostReadArticleByUser($userId){
$total = $this->createQueryBuilder('ar')
->select('ar.articleId', 'COUNT(ar.id) as total')
->where('ar.authorId = :userId')
->groupBy('ar.articleId')
->orderBy('total', 'DESC')
->setMaxResults(1)
->setParameter('userId', $userId)
->getQuery()
->getResult();
return $total;
}
Hope this help

Doctrine 2 how to increment a column for multiple rows at once?

i'm looking for a way to increment a value for multiple rows at once without looping , is it possible in doctrine ?
here is the query in simple sql :
$sql = "UPDATE table set compteur = compteur + 1 where id in ('1','2','3') ";
using doctrine for updating many rows ( not incrementing ) , i have this :
$qb = $this->getEntityManager()->createQueryBuilder();
$query = $qb->update('Application\Entity\Table', 'l')
->set('l.compteur', $qb->expr()->literal('8'))
->where("l.id in ('$ids')")
->getQuery();
$retour = $query->execute();
Thanks for any idea !!
Use DQL for this:
$this->getEntityManager()->createQuery('
UPDATE Application\Entity\Table t
SET t.compteur = t.compteur + 1
')
->execute();
http://doctrine-orm.readthedocs.org/projects/doctrine-orm/en/latest/reference/batch-processing.html#dql-update
You can also do it with the querybuilder like so:
$qb->set('l.compteur', $qb->expr()->sum('l.compteur', 1));

Informatica Repository Query to get Workflow, Session, Mapping and Source/Target of Mapping

for cleaning up unused IPC-Sources I need a Repository Query for getting Workflow, Session, Mapping and Source/Target of Mapping.I have startet by joining REP_LOAD_SESSIONS and REP_TBL_MAPPING on mapping_id but only a fraction of mappings seem to be present in the joined output.
I can't find the right tables to join to get the job done.
Any help will be greatly appreciated!
I was struggling with the same issue. Here is my query. Hope it helps
SELECT SUBJECT_AREA,SESSIONNAME,MPGANDP MAPPINGNAME,SOURCENAMES,TARGET_NAMES,INSTANCE_NAME,LOOKUPTABLENAME,CASE WHEN OBJECTTYPE='Lookup ' THEN CONNECTION ELSE CNX_NAME END CONNECTIONNAME,USER_NAME
FROM
( SELECT * FROM
(SELECT SUBJECT_AREA,SESSION_ID,MPGANDP, MPNGID,OBJECTTYPE,INSTANCE_NAME,MAX(LOOKUPTABLE) LOOKUPTABLENAME, MAX(CONNECTION) CONNECTION
--,LISTAGG(SQLQUERY, '' ) WITHIN GROUP (ORDER BY SQLQUERY) SQLOVERRIRDE
FROM
(
SELECT CASE WHEN MAPPING_NAME=PARENT_MAPPING_NAME THEN MAPPING_NAME ELSE MAPPING_NAME||','||PARENT_MAPPING_NAME END MPGANDP, B.MAPPING_ID MPNGID,
SUBSTR(WIDGET_TYPE_NAME,1,INSTR(WIDGET_TYPE_NAME,' ')) OBJECTTYPE, INSTANCE_NAME, CASE WHEN UPPER(ATTR_NAME) ='CONNECTION INFORMATION' THEN ATTR_VALUE ELSE NULL END CONNECTION,
ATTR_NAME, ATTR_VALUE,SUBJECT_AREA, --A.*,B.*,C.*
--CASE WHEN ATTR_NAME='Sql Query' OR ATTR_NAME='Lookup Sql Override' THEN ATTR_VALUE END SQLQUERY,
CASE WHEN ATTR_NAME='Lookup table name' THEN ATTR_VALUE END LOOKUPTABLE,
CASE WHEN ATTR_NAME='Sql Query' OR ATTR_NAME='Lookup Sql Override' THEN SUBSTR(ATTR_VALUE,INSTR(UPPER(ATTR_VALUE),'FROM'),15) END SQLQUERYV
FROM REP_WIDGET_INST A
INNER JOIN REP_ALL_MAPPINGS B
ON A.MAPPING_ID = B.MAPPING_ID
INNER JOIN REP_WIDGET_ATTR C
ON A.WIDGET_ID = C.WIDGET_ID
WHERE A.WIDGET_TYPE IN (2, 11,3)
--AND MAPPING_NAME<>PARENT_MAPPING_NAME
--AND B.MAPPING_ID=515
--AND PARENT_SUBJECT_AREA='EDW'
AND ATTR_NAME IN ( 'Connection Information','Lookup Sql Override','Lookup table name','Sql Query')
) , OPB_SESSION
WHERE MPNGID=MAPPING_ID
GROUP BY SUBJECT_AREA,MPGANDP, MPNGID,OBJECTTYPE,INSTANCE_NAME,SESSION_ID
) T1
INNER JOIN
(SELECT OPB_TASK_INST.WORKFLOW_ID,OPB_TASK_INST.TASK_ID ,OPB_TASK_INST.INSTANCE_NAME SESSIONNAME
FROM OPB_TASK_INST
WHERE OPB_TASK_INST.TASK_TYPE IN (68) --,70)
START WITH WORKFLOW_ID IN (SELECT TASK_ID FROM OPB_TASK WHERE TASK_TYPE = 71 AND /* **************SPECIFY WORKFLOW NAME HERE*********/ TASK_NAME='wf_TEST')
CONNECT BY PRIOR OPB_TASK_INST.TASK_ID = OPB_TASK_INST.WORKFLOW_ID ) WFSESSCONN
ON TASK_ID=SESSION_ID
INNER JOIN
( SELECT MAPPING_ID MAPID,LISTAGG(SOURCE_NAME,',') WITHIN GROUP (ORDER BY SOURCE_NAME) SOURCENAMES
FROM REP_SRC_MAPPING E
GROUP BY SUBJECT_AREA,MAPPING_NAME,MAPPING_ID ) SOURCENAMES
ON MAPID=MPNGID
LEFT JOIN
(SELECT DISTINCT SUBJECT_AREA SA,TASK_NAME,INSTANCE_NAME INSNAME,CNX_NAME,SESSION_ID SSID
FROM
REP_ALL_TASKS A,
REP_SESS_WIDGET_CNXS B
WHERE
A.TASK_ID = B.SESSION_ID
) T2
ON SESSION_ID=SSID
AND INSNAME=INSTANCE_NAME
AND SUBJECT_AREA=SA
LEFT JOIN
( SELECT SUBJECT_AREA SAT, SESSION_NAME SESSNT, SESSION_ID SSIDT, LISTAGG(WIDGET_NAME,',') WITHIN GROUP (ORDER BY WIDGET_NAME) AS TARGET_NAMES
FROM (SELECT distinct SUBJECT_AREA,SESSION_NAME,SESSION_ID,WIDGET_NAME
FROM REP_SESS_TBL_LOG
WHERE TYPE_NAME='Target Definition' )
GROUP BY SUBJECT_AREA,SESSION_NAME,SESSION_ID
)
ON SESSION_ID=SSIDT
)
LEFT JOIN OPB_CNX
ON TRIM(OBJECT_NAME)=TRIM(CASE WHEN OBJECTTYPE='Lookup ' THEN CONNECTION ELSE CNX_NAME END)
ORDER BY SUBJECT_AREA,SESSIONNAME,MPGANDP,INSTANCE_NAME