Solved

Making my query result faster

Posted on 2006-07-07
12
396 Views
Last Modified: 2008-02-01
Hello, i wrote a store procedure to gather info from many different tables. The amount of data is immense but I want my store procedure to run faster. Could someone take a look at my code and see where I could improve my efficieny. Right now it takes about an hour to get all the info that I want.

--  exec dbo.mw_aragedreceivables_sp 'MWHydrocarbon', '06/14/2006', NULL, NULL, ''

-- drop procedure mw_aragedreceivables_sp

CREATE procedure dbo.mw_aragedreceivables_sp
      @company            varchar(40),
      @dateto                  varchar(32),
      @fromcustomercode      varchar(8),
      @tocustomercode            varchar(8),
--      @sort_column              varchar(3),
      @territory_code        varchar(8),
      @days_aged            int = 0,
      @availableC            int = 0,
      @balance            int = 0

as
declare
      @dateto_jul            int,
      @year                 int,
      @month                int,
      @date                 int,
      @jul_date            int,
      @dateto_date            datetime


-- temporary table creation
create table #artrxage_tmp
(
      trx_type            smallint,
      trx_ctrl_num            varchar(16),
      doc_ctrl_num            varchar(16),
      apply_to_num            varchar(16),
      apply_trx_type            smallint,
      sub_apply_num            varchar(16),
      sub_apply_type            smallint,
      salesperson_code      varchar(8),
      territory_code            varchar(8),
      price_code            varchar(8),
      date_doc            int,
      date_due            int,
      date_aging            int,
      date_applied            int,
      amount                  float,
      cust_po_num            varchar(20),
      order_ctrl_num            varchar(16),
      customer_code            varchar(8),
      payer_cust_code            varchar(8),
      trx_type_code            varchar(8),      
      ref_id                  int,
      nat_cur_code            varchar(8),
      rate_oper             float,
      divide_flag             int
)

create table #finalvalues
(
      company_id      int,
      expiration_date datetime,
      e_code          int,
      customer_code   varchar(50),
      customer_name   varchar(50),
      doc_ctrl_num    varchar(50),
      apply_to_num    varchar(50),
      apply_trx_type  smallint,
      date_doc        int,
      date_due        int,
      date_aging      int,
      date_applied    int,
      amount          float,
      salesperson_code varchar(50),
      territory_code  varchar(50),
      price_code      varchar(50),
      cust_po_num     varchar(50),
      order_ctrl_num  varchar(50),
      trx_type        smallint,
      trx_ctrl_num    varchar(50),
      trx_type_code   varchar(50),
      ref_id          int,
        nat_cur_code    varchar(50),
        rate_oper       float,
        oper_currency   varchar(50),
        divide_flag     int,
        company_name    varchar(50),
        dateto_jul      int,
        credit_limit    float,
      additional_credit float,
      age30_60      float,
      age60_90      float,
      ageover90      float,
)



create table #non_zero_records
(
      doc_ctrl_num varchar(16),
      trx_type smallint,
      customer_code varchar(8),
      total float
)  
create clustered index iv_npif_ind1 on #non_zero_records ( doc_ctrl_num, trx_type, customer_code )


create table #temp1
(
      code varchar(20),
)

create table #temp2
(
      c_id varchar(20),      
      e_code varchar(20),
      code varchar(20)
)  


-- initialise parameters
if (@dateto = '~null~' or @dateto = ''  or @dateto is null) select @dateto_date = getdate()
else select @dateto_date = convert(datetime,@dateto)

select  @dateto_jul = datediff(dd,'1900/01/01', @dateto_date ) + 693596

if (@fromcustomercode = '~null~' or @fromcustomercode = '' or @fromcustomercode is null) select @fromcustomercode = ''
if (@tocustomercode = '~null~'  or @tocustomercode = '' or @tocustomercode is null) select @tocustomercode = 'zzzzzzzz'

if (@territory_code = '~null~' or @territory_code = '' ) select @territory_code = null


-- finds non zero records
insert
  #non_zero_records
select
  a.apply_to_num ,
      a.apply_trx_type,
      a.customer_code,
      sum(amount)
from
  artrxage a
  inner join arcust c on a.customer_code = c.customer_code
where
  a.date_applied <= @dateto_jul and
  a.customer_code between @fromcustomercode and @tocustomercode and
  ( @territory_code is null or c.territory_code = @territory_code )
group by
  a.apply_to_num,
      a.apply_trx_type,
      a.customer_code  having abs(sum(amount)) > 0.0000001

insert into #temp1
select replace(replace(left(customer_code , 15 ), 'AR', ''), 'CUS', '')
from artrxage

insert into #temp2
select company_id, epicor_code, code from core..company_epicor_xref b
inner join #temp1 u on u.code = b.epicor_code
order by epicor_code


-- inserts data for non zero records
insert
  #artrxage_tmp
select
  a.trx_type,
      a.trx_ctrl_num,      
      a.doc_ctrl_num,      
      a.apply_to_num,      
      a.apply_trx_type,      
      a.sub_apply_num,      
      a.sub_apply_type,      
      a.salesperson_code,      
      a.territory_code,      
      a.price_code,      
      a.date_doc,      
      (1+ sign(sign(a.ref_id) - 1))*a.date_due + abs(sign(sign(a.ref_id)-1))*a.date_doc,
      (1+ sign(sign(a.ref_id) - 1))*a.date_aging + abs(sign(sign(a.ref_id)-1))*a.date_doc,
      a.date_applied,      
  a.amount,
      a.cust_po_num,      
      a.order_ctrl_num,      
      a.customer_code,      
      a.payer_cust_code,      
      '',
      a.ref_id,
  a.nat_cur_code,
  a.rate_oper,
  1
from
  artrxage a,
  #non_zero_records i
where
  a.apply_to_num = i.doc_ctrl_num and
  a.apply_trx_type = i.trx_type and
  a.customer_code = i.customer_code and
  a.date_applied <= @dateto_jul


-- updates transaction type description
update   #artrxage_tmp
set      trx_type_code = b.trx_type_code
from     #artrxage_tmp a
join     artrxtyp b on a.trx_type = b.trx_type
where    a.trx_type = b.trx_type

--updates divide_flag for multi-currency conversion
update      #artrxage_tmp
set  divide_flag = m.divide_flag
from #artrxage_tmp a
inner join pltmaster..mccurate m on a.nat_cur_code = m.from_currency
inner join glco g on g.oper_currency = m.to_currency


--report selection
insert into #finalvalues
select distinct
      t.company_id,
      t.expiration_date,
      f.e_code,
        a.customer_code,
      c.customer_name,
      a.doc_ctrl_num,
      a.apply_to_num,
      a.apply_trx_type,
      a.date_doc,
      a.date_due,
      a.date_aging,
      a.date_applied,
      a.amount,
      a.salesperson_code,
      a.territory_code,
      a.price_code,
      a.cust_po_num,
      a.order_ctrl_num,
      a.trx_type,
      a.trx_ctrl_num,
      a.trx_type_code,
      a.ref_id,
        a.nat_cur_code,
        a.rate_oper,
       g.oper_currency,
        a.divide_flag,
       g.company_name,
        @dateto_jul as dateto_jul,
        c.credit_limit,
      isNull((select sum(amount) from core..v_company_additional_credit where epicor_db_name = 'MWHydrocarbon' and status = 'A' and @dateto between effective_date and expiration_date and epicor_code + 'AR' = a.customer_code), 0) as additional_credit,
      case when (@dateto_jul - a.date_aging) between 30 and 60 then a.amount else 0 end as Age30,
      case when (@dateto_jul - a.date_aging) between 60 and 90 then a.amount else 0 end as Age60,
      case when (@dateto_jul - a.date_aging) > 90 then a.amount else 0 end as Age90
      
from
  glco g,
  #temp2 f
  cross join #artrxage_tmp a
  cross join core..company_additional_credit t
  inner join arcust c on a.customer_code = c.customer_code
  inner join #temp2 x on x.c_id = t.company_id
  order by a.customer_code
 
   
-- select * from #finalvalues

select customer_code, credit_limit, additional_credit, max(expiration_date) as LastestDate,
      (credit_limit + additional_credit) - sum(amount) as balance,    
       sum(Age30_60)  as   Age30_60 ,    
       sum(Age60_90)  as   Age60_90,        
       sum(ageover90) as   Ageover90
        
from #finalvalues
group by customer_name, customer_code,credit_limit, additional_credit, expiration_date

/* EXEC xp_sendmail @recipients = 'grichardson',

    @query = 'select customer_code, credit_limit, additional_credit, expiration_date, balance from #finalvalues',
    @subject = 'Report',
    @message = 'The contents of INFORMATION_SCHEMA.TABLES:',
    @attach_results = 'TRUE', @width = 250 */


drop table #artrxage_tmp
drop table #finalvalues
drop table #non_zero_records
drop table #temp1
drop table #temp2

GO
0
Comment
Question by:DancingFighterG
  • 6
  • 3
  • 2
  • +1
12 Comments
 
LVL 75

Expert Comment

by:Aneesh Retnakaran
ID: 17060564

1. replace temp tables with table variables
2. Do you really need the cross join there
0
 

Author Comment

by:DancingFighterG
ID: 17061379
The cross join is what is slowing the process down but I need it order to get the info from the other tables. What do you mean by replacing the temp tables with table variables? Example please!
0
 

Author Comment

by:DancingFighterG
ID: 17061897
When I run my query I get this error:

Server: Msg 1105, Level 17, State 2, Procedure mw_aragedreceivables_sp, Line 209
Could not allocate space for object '(SYSTEM table id: -624305149)' in database 'TEMPDB' because the 'DEFAULT' filegroup is full.
Server: Msg 9002, Level 17, State 1, Procedure mw_aragedreceivables_sp, Line 209
The log file for database 'tempdb' is full. Back up the transaction log for the database to free up some log space.
0
 
LVL 75

Expert Comment

by:Aneesh Retnakaran
ID: 17064893
0
 
LVL 8

Expert Comment

by:raj_
ID: 17067640
the cross joins and the temp tables are not too bad - (temp tables/table variable arguement is on going).. and cross joins can be very effective if used right..

the real information would be what is the size of tables involved and if it is possible to have the data in pre-processed tables (if it not a OLTP type situation)

thanx
-r
0
 
LVL 2

Expert Comment

by:shakir77
ID: 17068576
1. create index on temp table after loading data ( before the use of table in sproc)
2. truncate table before dropping the.

Both these points would buy you some time - again depends on size of data you are holding in temp table.
Anyway you can try this also.

Regards
Shakir
0
What Is Threat Intelligence?

Threat intelligence is often discussed, but rarely understood. Starting with a precise definition, along with clear business goals, is essential.

 

Author Comment

by:DancingFighterG
ID: 17073088
Ok, so I should create and index for the temp table after loading data and then truncate. Ok, I will try this and see what it does.
0
 

Author Comment

by:DancingFighterG
ID: 17076949
Just to clarify Shakir77, I'm assuming you talking about the temp2 table that should be indexed right.

create table #temp2
(
      c_id varchar(20),      
      e_code varchar(20),
      code varchar(20)
                add index(?)  // Don't know what to put here yet
)  

0
 
LVL 2

Expert Comment

by:shakir77
ID: 17076994

I see that you are using #temp2 below joining with column c_id
So , after "insert into #temp2" command put create index

create index c_id_idnx on #temp2( c_id )

and similarly move your "create clustered index iv_npif_ind1" after "insert #non_zero_records"
0
 

Author Comment

by:DancingFighterG
ID: 17093474
Shakir77, are you saying that I should truncate the finalvalues table?
0
 
LVL 2

Accepted Solution

by:
shakir77 earned 500 total points
ID: 17093544
You are finally dropping #finalvalues in the end.
What i suggested is to truncate it before dropping them

truncate table #finalvalues
drop table #finalvalues

If you are holding large set of data ( lets say 20K rows or more)  it would help.
but for smaller table , this doesn't make much difference.

If possible use some optimiser tool , which would also help you in pointing rough query

You can check using query plan also.

Regards
Shakir
0
 

Author Comment

by:DancingFighterG
ID: 17130656
I'm getting the following error with my prefix:

The column prefix 'a' does not match with a table name or alias name used in the query.

The error is coming from here:

--updates divide_flag for multi-currency conversion
update      #artrxage_tmp
set  divide_flag = m.divide_flag
from #artrxage_tmp a
inner join pltmaster..mccurate m on a.nat_cur_code = m.from_currency
inner join glco g on g.oper_currency = m.to_currency

I've tried to use cross joins to fix this problem which is does but increases the execution time of the script dramatically so I'm trying to stay to inner joins.
0

Featured Post

What Should I Do With This Threat Intelligence?

Are you wondering if you actually need threat intelligence? The answer is yes. We explain the basics for creating useful threat intelligence.

Join & Write a Comment

In today’s complex data management environments, it is not unusual for UNIX servers to be dedicated to a particular department, purpose, or database.  As a result, a SAS® data analyst often works with multiple servers, each with its own data storage…
Many companies are looking to get out of the datacenter business and to services like Microsoft Azure to provide Infrastructure as a Service (IaaS) solutions for legacy client server workloads, rather than continuing to make capital investments in h…
Video by: Steve
Using examples as well as descriptions, step through each of the common simple join types, explaining differences in syntax, differences in expected outputs and showing how the queries run along with the actual outputs based upon a simple set of dem…
Polish reports in Access so they look terrific. Take yourself to another level. Equations, Back Color, Alternate Back Color. Write easy VBA Code. Tighten space to use less pages. Launch report from a menu, considering criteria only when it is filled…

706 members asked questions and received personalized solutions in the past 7 days.

Join the community of 500,000 technology professionals and ask your questions.

Join & Ask a Question

Need Help in Real-Time?

Connect with top rated Experts

18 Experts available now in Live!

Get 1:1 Help Now