Archive log generating count,size
alter session set nls_date_format='DD-MON-YYYY HH24:MI:SS';
select THREAD#, trunc(first_time) as "DATE"
, count(1) num
, sum(blocks*block_size)/1024 as KB
, sum(blocks*block_size)/1024/1024 as MB
, sum(blocks*block_size)/1024/1024/1024 as GB
from v$archived_log
where first_time > trunc(sysdate-10)
group by thread#, trunc(first_time)
order by 2,1;
find the size of the archivelog
select decode(grouping (trunc(COMPLETION_TIME)),1,'TOTAL',TRUNC(COMPLETION_TIME)) TIME, SUM(BLOCKS * BLOCK_SIZE)/1024/1024/1024 SIZE_MB
from V$ARCHIVED_LOG group by cube (trunc (COMPLETION_TIME)) order by 1
Here is the script i generally use to find out which sql's are generating more redo
SELECT to_char(begin_interval_time,'YYYY_MM_DD HH24') WHEN,
dbms_lob.substr(sql_text,4000,1) SQL,
dhss.instance_number INST_ID,
dhss.sql_id,
executions_delta exec_delta,
rows_processed_delta rows_proc_delta
FROM dba_hist_sqlstat dhss,
dba_hist_snapshot dhs,
dba_hist_sqltext dhst
WHERE upper(dhst.sql_text) LIKE '%USR_RACUNI_MV%'
AND ltrim(upper(dhst.sql_text)) NOT LIKE 'SELECT%'
AND dhss.snap_id=dhs.snap_id
AND dhss.instance_number=dhs.instance_number
AND dhss.sql_id=dhst.sql_id
AND begin_interval_time BETWEEN to_date('13-08-28 13:00','YY-MM-DD HH24:MI')
AND to_date('13-11-05 14:00','YY-MM-DD HH24:MI')
lowing query will help you to find nummbers of archive generated per day
SELECT TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24'),
COUNT(*)
FROM V$LOGHIST
WHERE TO_CHAR(FIRST_TIME,'DD-MON-YY HH24') > '01-NOV-2013'
GROUP BY TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24')
ORDER BY TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24') ASC
find the archive log count and size ,sequence information
SELECT A.*,
Round(A.Count#*B.AVG#/1024/1024) Daily_Avg_Mb
FROM
(
SELECT
To_Char(First_Time,'YYYY-MM-DD') DAY,
Count(1) Count#,
Min(RECID) Min#,
Max(RECID) Max#
FROM
v$log_history
GROUP
BY To_Char(First_Time,'YYYY-MM-DD')
ORDER
BY 1 DESC
) A,
(
SELECT
Avg(BYTES) AVG#,
Count(1) Count#,
Max(BYTES) Max_Bytes,
Min(BYTES) Min_Bytes
FROM
v$log
) B;
archive log or redo log genarating per day
set linesize 300
set pagesize 5000
set trimspool on
column 00 format 99 heading "00:00"
column 01 format 99 heading "1am"
column 02 format 99 heading "2am"
column 03 format 99 heading "3am"
column 04 format 99 heading "4am"
column 05 format 99 heading "5am"
column 06 format 99 heading "6am"
column 07 format 99 heading "7am"
column 08 format 99 heading "8am"
column 09 format 99 heading "9am"
column 10 format 99 heading "10am"
column 11 format 99 heading "11am"
column 12 format 99 heading "12:00"
column 13 format 99 heading "1pm"
column 14 format 99 heading "2pm"
column 15 format 99 heading "3pm"
column 16 format 99 heading "4pm"
column 17 format 99 heading "5pm"
column 18 format 99 heading "6pm"
column 19 format 99 heading "7pm"
column 20 format 99 heading "8pm"
column 21 format 99 heading "9pm"
column 22 format 99 heading "10pm"
column 23 format 99 heading "11pm"
column 24 format 99 heading "12pm"
column "Day" format a3
prompt
prompt Redo Log Switches
prompt
SELECT trunc (first_time) "Date",
to_char (trunc (first_time),'Dy') "Day",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 0, 1)) "00",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 1, 1)) "01",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 2, 1)) "02",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 3, 1)) "03",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 4, 1)) "04",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 5, 1)) "05",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 6, 1)) "06",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 7, 1)) "07",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 8, 1)) "08",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 9, 1)) "09",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 10, 1)) "10",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 11, 1)) "11",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 12, 1)) "12",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 13, 1)) "13",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 14, 1)) "14",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 15, 1)) "15",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 16, 1)) "16",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 17, 1)) "17",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 18, 1)) "18",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 19, 1)) "19",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 20, 1)) "20",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 21, 1)) "21",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 22, 1)) "22",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 23, 1)) "23"
from v$log_history
where trunc (first_time) >= (trunc(sysdate) - &a) -- last X days. 0 = today only. 1 = today and yesterday
group by trunc (first_time)
order by trunc (first_time) DESC
/
Log Switch query
select Hour ,round(avg(LOG_SWITCHES)) LOG_SWITCHES from (select to_char(trunc(first_time, 'HH'),'HH24') Hour, TRUNC(FIRST_TIME, 'DDD'), count(*) LOG_SWITCHES
from v$log_history group by TRUNC(FIRST_TIME, 'DDD'),trunc(first_time, 'HH')order by 1) group by Hour order by Hour;
select Hour ,round(avg(LOG_SWITCHES)) LOG_SWITCHES from (select to_char(trunc(first_time, 'HH'),'HH24') Hour, TRUNC(FIRST_TIME, 'DDD'), count(*) LOG_SWITCHES
from v$log_history group by TRUNC(FIRST_TIME, 'DDD'),trunc(first_time, 'HH')order by 1) group by Hour order by Hour;
---------------------------------------------------------------------
select round(avg(LOG_SWITCHES)) LOG_SWITCHES, DAY
from (
select TRUNC(FIRST_TIME, 'DDD') DAY, count(*) LOG_SWITCHES
from v$log_history
where first_time between sysdate -30 and sysdate
group by TRUNC(FIRST_TIME, 'DDD'), to_char(trunc(first_time), 'Day')
order by 1
)
group by day
order by 2;
--------------------------------------------------------------
Calculate the space used by a single object
SELECT s.owner,
s.segment_name,
s.bytes/1024/1024 "Size(MB)"
FROM dba_segments s,
dba_lobs l
WHERE s.owner = '&schema_name'
AND s.owner=l.owner(+)
AND s.segment_name =
CASE WHEN s.segment_type = 'LOBSEGMENT'
THEN L.SEGMENT_NAME(+)
WHEN s.segment_type = 'LOBINDEX'
THEN L.INDEX_NAME(+)
END
AND s.SEGMENT_NAME='&segment_name';
Top 5 events in active session
select * from (
select
WAIT_CLASS ,
EVENT,
count(sample_time) as EST_SECS_IN_WAIT
from v$active_session_history
where sample_time between sysdate - interval '1' hour and sysdate
group by WAIT_CLASS,EVENT
order by count(sample_time) desc
)
where rownum <6
select THREAD#, trunc(first_time) as "DATE"
, count(1) num
, sum(blocks*block_size)/1024 as KB
, sum(blocks*block_size)/1024/1024 as MB
, sum(blocks*block_size)/1024/1024/1024 as GB
from v$archived_log
where first_time > trunc(sysdate-10)
group by thread#, trunc(first_time)
order by 2,1;
find the size of the archivelog
select decode(grouping (trunc(COMPLETION_TIME)),1,'TOTAL',TRUNC(COMPLETION_TIME)) TIME, SUM(BLOCKS * BLOCK_SIZE)/1024/1024/1024 SIZE_MB
from V$ARCHIVED_LOG group by cube (trunc (COMPLETION_TIME)) order by 1
Lists the volume of archived redo by hour for the specified day
@redo_by_hour (day 0=Today, 1=Yesterday etc.)
SET VERIFY OFF PAGESIZE 30 WITH hours AS ( SELECT TRUNC(SYSDATE) - &1 + ((level-1)/24) AS hours FROM dual CONNECT BY level < = 24 ) SELECT h.hours AS date_hour, ROUND(SUM(blocks * block_size)/1024/1024/1024,2) size_gb FROM hours h LEFT OUTER JOIN v$archived_log al ON h.hours = TRUNC(al.first_time, 'HH24') GROUP BY h.hours ORDER BY h.hours;
Here is the script i generally use to find out which sql's are generating more redo
SELECT to_char(begin_interval_time,'YYYY_MM_DD HH24') WHEN,
dbms_lob.substr(sql_text,4000,1) SQL,
dhss.instance_number INST_ID,
dhss.sql_id,
executions_delta exec_delta,
rows_processed_delta rows_proc_delta
FROM dba_hist_sqlstat dhss,
dba_hist_snapshot dhs,
dba_hist_sqltext dhst
WHERE upper(dhst.sql_text) LIKE '%USR_RACUNI_MV%'
AND ltrim(upper(dhst.sql_text)) NOT LIKE 'SELECT%'
AND dhss.snap_id=dhs.snap_id
AND dhss.instance_number=dhs.instance_number
AND dhss.sql_id=dhst.sql_id
AND begin_interval_time BETWEEN to_date('13-08-28 13:00','YY-MM-DD HH24:MI')
AND to_date('13-11-05 14:00','YY-MM-DD HH24:MI')
lowing query will help you to find nummbers of archive generated per day
SELECT TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24'),
COUNT(*)
FROM V$LOGHIST
WHERE TO_CHAR(FIRST_TIME,'DD-MON-YY HH24') > '01-NOV-2013'
GROUP BY TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24')
ORDER BY TO_CHAR(FIRST_TIME,'DD-MON-YYYY HH24') ASC
find the archive log count and size ,sequence information
SELECT A.*,
Round(A.Count#*B.AVG#/1024/1024) Daily_Avg_Mb
FROM
(
SELECT
To_Char(First_Time,'YYYY-MM-DD') DAY,
Count(1) Count#,
Min(RECID) Min#,
Max(RECID) Max#
FROM
v$log_history
GROUP
BY To_Char(First_Time,'YYYY-MM-DD')
ORDER
BY 1 DESC
) A,
(
SELECT
Avg(BYTES) AVG#,
Count(1) Count#,
Max(BYTES) Max_Bytes,
Min(BYTES) Min_Bytes
FROM
v$log
) B;
archive log or redo log genarating per day
set linesize 300
set pagesize 5000
set trimspool on
column 00 format 99 heading "00:00"
column 01 format 99 heading "1am"
column 02 format 99 heading "2am"
column 03 format 99 heading "3am"
column 04 format 99 heading "4am"
column 05 format 99 heading "5am"
column 06 format 99 heading "6am"
column 07 format 99 heading "7am"
column 08 format 99 heading "8am"
column 09 format 99 heading "9am"
column 10 format 99 heading "10am"
column 11 format 99 heading "11am"
column 12 format 99 heading "12:00"
column 13 format 99 heading "1pm"
column 14 format 99 heading "2pm"
column 15 format 99 heading "3pm"
column 16 format 99 heading "4pm"
column 17 format 99 heading "5pm"
column 18 format 99 heading "6pm"
column 19 format 99 heading "7pm"
column 20 format 99 heading "8pm"
column 21 format 99 heading "9pm"
column 22 format 99 heading "10pm"
column 23 format 99 heading "11pm"
column 24 format 99 heading "12pm"
column "Day" format a3
prompt
prompt Redo Log Switches
prompt
SELECT trunc (first_time) "Date",
to_char (trunc (first_time),'Dy') "Day",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 0, 1)) "00",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 1, 1)) "01",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 2, 1)) "02",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 3, 1)) "03",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 4, 1)) "04",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 5, 1)) "05",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 6, 1)) "06",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 7, 1)) "07",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 8, 1)) "08",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 9, 1)) "09",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 10, 1)) "10",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 11, 1)) "11",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 12, 1)) "12",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 13, 1)) "13",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 14, 1)) "14",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 15, 1)) "15",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 16, 1)) "16",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 17, 1)) "17",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 18, 1)) "18",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 19, 1)) "19",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 20, 1)) "20",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 21, 1)) "21",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 22, 1)) "22",
sum (decode (to_number (to_char (FIRST_TIME, 'HH24')), 23, 1)) "23"
from v$log_history
where trunc (first_time) >= (trunc(sysdate) - &a) -- last X days. 0 = today only. 1 = today and yesterday
group by trunc (first_time)
order by trunc (first_time) DESC
/
Log Switch query
select Hour ,round(avg(LOG_SWITCHES)) LOG_SWITCHES from (select to_char(trunc(first_time, 'HH'),'HH24') Hour, TRUNC(FIRST_TIME, 'DDD'), count(*) LOG_SWITCHES
from v$log_history group by TRUNC(FIRST_TIME, 'DDD'),trunc(first_time, 'HH')order by 1) group by Hour order by Hour;
select Hour ,round(avg(LOG_SWITCHES)) LOG_SWITCHES from (select to_char(trunc(first_time, 'HH'),'HH24') Hour, TRUNC(FIRST_TIME, 'DDD'), count(*) LOG_SWITCHES
from v$log_history group by TRUNC(FIRST_TIME, 'DDD'),trunc(first_time, 'HH')order by 1) group by Hour order by Hour;
---------------------------------------------------------------------
select round(avg(LOG_SWITCHES)) LOG_SWITCHES, DAY
from (
select TRUNC(FIRST_TIME, 'DDD') DAY, count(*) LOG_SWITCHES
from v$log_history
where first_time between sysdate -30 and sysdate
group by TRUNC(FIRST_TIME, 'DDD'), to_char(trunc(first_time), 'Day')
order by 1
)
group by day
order by 2;
--------------------------------------------------------------
Calculate the space used by a single object
SELECT s.owner,
s.segment_name,
s.bytes/1024/1024 "Size(MB)"
FROM dba_segments s,
dba_lobs l
WHERE s.owner = '&schema_name'
AND s.owner=l.owner(+)
AND s.segment_name =
CASE WHEN s.segment_type = 'LOBSEGMENT'
THEN L.SEGMENT_NAME(+)
WHEN s.segment_type = 'LOBINDEX'
THEN L.INDEX_NAME(+)
END
AND s.SEGMENT_NAME='&segment_name';
Top 5 events in active session
select * from (
select
WAIT_CLASS ,
EVENT,
count(sample_time) as EST_SECS_IN_WAIT
from v$active_session_history
where sample_time between sysdate - interval '1' hour and sysdate
group by WAIT_CLASS,EVENT
order by count(sample_time) desc
)
where rownum <6
Comments
Post a Comment