Skip to main content

Recently a survey was sent to MVIS customers asking to provide the following usage information:

API request volume:
- Average number of API requests per day:
- Peak number of API requests per day:
- Total number of API requests in the past 30 days:
 
Data volume: 
- Average size of data per request (in KB or MB):
- Total data transferred via API in the past 30 days (in GB):

Usage patterns:
- Typical hours of peak usage (e.g., 9 a.m. – 5 p.m. UTC):

One customer logged a support case asking how they could extract the information from the MVIS logs.

I believe that the following information can be extracted from the "Perfstats_PER_REQ.log" which is created if you enable performance monitoring in all versions of MVIS.

API request volume:
- Average number of API requests per day:
- Peak number of API requests per day:
- Total number of API requests in the past 30 days:

Usage patterns
- Typical hours of peak usage (e.g., 9 a.m. – 5 p.m. UTC):

The basic program "MVIS.REQUEST.TOTALS" (See below) should return the request volume data.
Simply create a pointer, MVIS.LOG, to the directory containing Perfstats_PER_REQ.log file and run the program.
For UniData users the program should be compiled for BASICTYPE 'P'
I tested the program with a limited data set so let me know if you find any unexpected features (Bugs)

I expanded the basic program to gather additional information and then used AI to generate a python program which creates a spreadsheet for each MVIS account and should provide some insight into usage patterns.
Each spreadsheet contains a worksheet for Request counts, Wait times, and Response times with the data broken down into hourly totals and averages for each day.

Simply place the python program "mvisPerfPerReqAnalysis.py" (See below) in the same directory as the Perfstats_PER_REQ.log file and run the program:
- python mvisPerfPerReqAnalysis.py

The python program was tested on Python 3.7.8 and 3.9.2
You may need to install xlsxwriter
- python -m pip install xlsxwriter

Usage volumes would have to be obtained from outside of MVIS 1.3.x but MVIS 2.x offers additional tracing and metrics via OpenTelemetry (OTEL) which should also be able to provide the requested information.

Let me know if you have any comments or found this useful.

Thanks
Stuart

MVIS.REQUEST.TOTALS

************************
* Initialization
************************
   ERR       = ''
   DATES     = ''
   DATE.REQ  = ''
   * Open File and Read Perfstats_PER_REQ.log
   OPEN 'MVIS.LOG' TO F.MVIS.LOG THEN
      READ PERF.LOG FROM F.MVIS.LOG,'Perfstats_PER_REQ.log' ELSE
         ERR = 'Unable to Read Perfstats_PER_REQ.log from MVIS.LOG'
      END
   END ELSE
      ERR = 'Unable to open DIR file MVIS.LOG - Directory containing Perfstats_PER_REQ.log'
   END
   IF ERR THEN CRT ERR ; STOP
*******************
* Compile data
*******************
   TOTAL.LINES = DCOUNT(PERF.LOG,@AM)
   FOR X = 2 TO TOTAL.LINES
      THIS.LINE = CHANGE(TRIM(PERF.LOG<X>),' ',@AM)
      LOCATE THIS.LINE<1> IN DATES SETTING DATE.POS ELSE
         DATES<-1> = THIS.LINE<1>
         DATE.POS = DCOUNT(DATES,@AM)
      END
      DATE.REQ<DATE.POS>  += 1
   NEXT X
****************
* Report Data
****************
   TOTAL.DAYS = DCOUNT(DATES,@AM)
   IF TOTAL.DAYS < 30 THEN 
      START.POS = 1
      DAYS = TOTAL.DAYS
   END ELSE
      START.POS = TOTAL.DAYS - 30
      DAYS = 30
   END
   *
   PEAK.REQ = 0
   TOT.REQ  = 0
   FOR X = START.POS TO DAYS
      IF DATE.REQ<X> > PEAK.REQ THEN PEAK.REQ = DATE.REQ<X>
      TOT.REQ += DATE.REQ<X>
   NEXT X
   *
   CRT @(-1)
   CRT 'Request Data for Last ':DAYS:' Days'
   CRT ''
   CRT 'Avg Request Per Day    ':TOT.REQ/DAYS
   CRT 'Peak Requests Per Day  ':PEAK.REQ
   CRT 'Total Requests         ':TOT.REQ

mvisPerfPerReqAnalysis.py

import csv
import xlsxwriter
from collections import defaultdict
from datetime import datetime
# Initialization
log_file_path = 'Perfstats_PER_REQ.log'
errors = []
accounts = set()
acct_data = defaultdict(lambda: {
    'requests': defaultdict(lambda: [0]*24),
    'waits': defaultdict(lambda: [0.0]*24),
    'responses': defaultdict(lambda: [0.0]*24),
    'totals': defaultdict(int)
})
Columns = ['A' ,'B' ,'C' ,'D' ,'E' ,'F' ,'G' ,'H' ,'I' ,'J' ,'K' ,'L' ,'M' ,'N' ,'O' ,'P' ,'Q' ,'R' ,'S' ,'T' ,'U' ,'V' ,'W' ,'X' ,'Y' ,'Z',
           'AA','AB','AC','AD','AE','AF','AG','AH','AI','AJ','AK','AL','AM','AN','AO','AP','AQ','AR','AS','AT','AU','AV','AW','AX','AY','AZ',
           'BA','BB','BC','BD','BE','BF','BG','BH','BI','BJ','BK','BL','BM','BN','BO','BP','BQ','BR','BS','BT','BU','BV','BW','BX','BY','BZ'
          ]
# Read and parse log file
try:
    with open(log_file_path, 'r') as f:
        reader = csv.reader(f, delimiter=' ')
        next(reader)  # Skip header
        for row in reader:
            row = [r for r in row if r]  # Remove empty strings
            try:
                date_str = row[0]
                hour = int(row[1].split(':')[0])
                acct = row[2]
                wait = float(row[7])
                resp = float(row[8])
            except (IndexError, ValueError):
                errors.append(f"Malformed line: {' '.join(row)}")
                continue

            accounts.add(acct)
            acct_data[acct]['requests'][date_str][hour - 1] += 1
            acct_data[acct]['waits'][date_str][hour - 1] += wait
            acct_data[acct]['responses'][date_str][hour - 1] += resp
            acct_data[acct]['totals'][date_str] += 1
except FileNotFoundError:
    errors.append(f"Unable to open file: {log_file_path}")
# Report
x = 0
for a,acct in enumerate(sorted(accounts)):
    workbook = xlsxwriter.Workbook('PerfStats_PER_REQ_' + acct + '.xlsx') ; # Spreadsheet name to create
    bold       = workbook.add_format({'bold': True})
    center     = workbook.add_format({'align': 'center'})
    BoldGreen  = workbook.add_format({
        'bold': True,
        'align': 'center',
        'color': 'green'
    })
    BoldCenter = workbook.add_format({
        'bold': True,
        'align': 'center',
        'color': 'blue'
    })
    BoldBlue = workbook.add_format({
        'bold': True,
        'color': 'blue'
    })
    #---------------------------
    # Request Counts
    #---------------------------
    worksheet = workbook.add_worksheet('Requests')
    worksheet.write(0, 0, 'Date', BoldBlue)
    worksheet.write(0, 1, 'Requests', BoldCenter)
    for i in range(1, 25):
       worksheet.write(0, 1+i, 'H-'+str(i), BoldCenter)
    for i,date in enumerate(sorted(acct_data[acct]['requests'])):
        counts = acct_data[acct]['requests'][date]
        total = acct_data[acct]['totals'][date]
        worksheet.write(i+1, 0, date)
        worksheet.write(i+1, 1, total, BoldGreen)
        for c,cnt in enumerate(counts):
           worksheet.write(i+1, c+2, cnt)
    # Worksheet Formatting
    worksheet.hide_zero()
    worksheet.autofit()
    worksheet.set_column('B:Z', None, center)
    #---------------------------
    # Request Wait Times
    #---------------------------
    worksheet = workbook.add_worksheet('Wait Times')
    # Col Headers
    worksheet.write(0, 0, 'Date', BoldBlue)
    worksheet.write(0, 1, 'Requests', BoldCenter)
    for i in range(1, 25):
       worksheet.write(0, 1+i, 'H-'+str(i), BoldCenter)
    worksheet.write(0, 26, 'Total', BoldCenter)
    worksheet.write(0, 28, 'Average', BoldCenter)

    for i in range(1, 25):
       worksheet.write(0, 28+i, 'H-'+str(i), BoldCenter)
    for i,date in enumerate(sorted(acct_data[acct]['waits'])):
        waits = acct_data[acct]['waits'][date]
        total = acct_data[acct]['totals'][date]
        worksheet.write(i+1, 0, date)
        worksheet.write(i+1, 1, total, BoldGreen)
        for c,cnt in enumerate(waits):
           worksheet.write(i+1, c+2, cnt)
        # Average Formulas 
        worksheet.write_formula('AA'+str(i+2), '=IFERROR(SUM(C'+str(i+2)+':Z'+str(i+2)+'),0)', BoldGreen) ; # Total Wait Time per Day
        worksheet.write_formula('AC'+str(i+2), '=IFERROR(AA'+str(i+2)+'/B'+str(i+2)+',0)', BoldGreen) ; # Average Wait Time per Day
        for y in range(30, 53):
           worksheet.write_formula(Columns[y]+str(i+2), '=IFERROR('+Columns[y-27]+str(i+2)+'/Requests!'+Columns[y-27]+str(i+2)+',0)') ; # Average Wait Time per Hour 
    # Worksheet Formatting
    worksheet.hide_zero()
    worksheet.autofit()
    worksheet.set_column('B:Z', None, center)
    #---------------------------
    # Request Response Times
    #---------------------------
    worksheet = workbook.add_worksheet(' Response Times')
    # Col Headers
    worksheet.write(0, 0, 'Date', BoldBlue)
    worksheet.write(0, 1, 'Requests', BoldCenter)
    for i in range(1, 25):
       worksheet.write(0, 1+i, 'H-'+str(i), BoldCenter)
    worksheet.write(0, 26, 'Total', BoldCenter)
    worksheet.write(0, 28, 'Average', BoldCenter)
    for i in range(1, 25):
       worksheet.write(0, 28+i, 'H-'+str(i), BoldCenter)
    for i,date in enumerate(sorted(acct_data[acct]['responses'])):
        resps = acct_data[acct]['responses'][date]
        total = acct_data[acct]['totals'][date]
        worksheet.write(i+1, 0, date)
        worksheet.write(i+1, 1, total, BoldGreen)
        for c,cnt in enumerate(resps):
           worksheet.write(i+1, c+2, cnt)
        # Average Formulas 
        worksheet.write_formula('AA'+str(i+2), '=IFERROR(SUM(C'+str(i+2)+':Z'+str(i+2)+'),0)', BoldGreen) ; # Total Wait Time per Day
        worksheet.write_formula('AC'+str(i+2), '=IFERROR(AA'+str(i+2)+'/B'+str(i+2)+',0)', BoldGreen) ; # Average Wait Time per Day
        for y in range(30, 53):
           worksheet.write_formula(Columns[y]+str(i+2), '=IFERROR('+Columns[y-27]+str(i+2)+'/Requests!'+Columns[y-27]+str(i+2)+',0)') ; # Average Response Time per Hour 
    # Worksheet Formatting
    worksheet.hide_zero()
    worksheet.autofit()
    worksheet.set_column('B:Z', None, center)
    workbook.close()
    print('Excel spreadsheet "PerfStats_PER_REQ_' + acct + '.xlsx" created')
# Error Reporting
if errors:
    print("\\nErrors:")
    for err in errors:
        print(err)

------------------------------
Stuart MacKenzie
Rocket Software Inc
CO US
------------------------------