Check Archive Logs production

A database in ARCHIVELOG mode copies redo log entries in the achive log files. Archive log number and size is a metric of a database transactions load in time. Use the following query to monitor the Archive log production per hour for the last 30 days:

SELECT TRUNC (first_time) "Date", TO_CHAR (first_time, 'Dy') "Day",
COUNT (1) "Total",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '00', 1, 0)) "h0",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '01', 1, 0)) "h1",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '02', 1, 0)) "h2",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '03', 1, 0)) "h3",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '04', 1, 0)) "h4",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '05', 1, 0)) "h5",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '06', 1, 0)) "h6",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '07', 1, 0)) "h7",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '08', 1, 0)) "h8",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '09', 1, 0)) "h9",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '10', 1, 0)) "h10",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '11', 1, 0)) "h11",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '12', 1, 0)) "h12",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '13', 1, 0)) "h13",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '14', 1, 0)) "h14",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '15', 1, 0)) "h15",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '16', 1, 0)) "h16",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '17', 1, 0)) "h17",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '18', 1, 0)) "h18",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '19', 1, 0)) "h19",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '20', 1, 0)) "h20",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '21', 1, 0)) "h21",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '22', 1, 0)) "h22",
SUM (DECODE (TO_CHAR (first_time, 'hh24'), '23', 1, 0)) "h23"
FROM v$log_history
GROUP BY TRUNC (first_time), TO_CHAR (first_time, 'Dy')
ORDER BY 1;

OR the following to get size approach:

SELECT TO_CHAR(FIRST_TIME,'MM-DD') DAY,
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'00',MB,0))) AS "00",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'01',MB,0))) AS "01",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'02',MB,0))) AS "02",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'03',MB,0))) AS "03",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'04',MB,0))) AS "04",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'05',MB,0))) AS "05",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'06',MB,0))) AS "06",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'07',MB,0))) AS "07",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'08',MB,0))) AS "08",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'09',MB,0))) AS "09",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'10',MB,0))) AS "10",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'11',MB,0))) AS "11",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'12',MB,0))) AS "12",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'13',MB,0))) AS "13",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'14',MB,0))) AS "14",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'15',MB,0))) AS "15",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'16',MB,0))) AS "16",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'17',MB,0))) AS "17",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'18',MB,0))) AS "18",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'19',MB,0))) AS "19",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'20',MB,0))) AS "20",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'21',MB,0))) AS "21",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'22',MB,0))) AS "22",
ROUND(SUM(DECODE(TO_CHAR(FIRST_TIME,'HH24'),'23',MB,0))) AS "23",
ROUND(SUM(MB)) as "Tot"
FROM (SELECT DISTINCT SEQUENCE#, (BLOCKS*BLOCK_SIZE)/1048576 mb, FIRST_TIME FROM V$ARCHIVED_LOG)
GROUP BY TO_CHAR(FIRST_TIME,'MM-DD')
ORDER BY 1;

In case you need to check this production for a specific time window, this can be rewritten as:

SELECT SUM (BLOCKS*BLOCK_SIZE)/1048576 MB, TRUNC(FIRST_TIME) FROM V$ARCHIVED_LOG WHERE FIRST_TIME BETWEEN TO_DATE('20/12/2021 16:00','DD/MM/YYYY HH24:MI') and to_date('20/12/2021 19:00','DD/MM/YYYY HH24:MI')
GROUP BY TRUNC(FIRST_TIME);

Leave a Reply

Your email address will not be published. Required fields are marked *