Skip to content

Instantly share code, notes, and snippets.

@ashd97
Last active November 21, 2025 08:23
Show Gist options
  • Select an option

  • Save ashd97/41e25d90ddb1d9cf246fccae5d23a1c2 to your computer and use it in GitHub Desktop.

Select an option

Save ashd97/41e25d90ddb1d9cf246fccae5d23a1c2 to your computer and use it in GitHub Desktop.
Call download tool for Zoom (Eleveo) call recording system. Useful for mass call decryption. The script is for calls data migration/ or integration with third party systems (but its not streaming yet).
#!/bin/bash
####################################################################
#
# Developed by ashd97 ([email protected]) This code is under MIT licence
# 30.05.2018
#
# Script is getting callIDs (COUPLEs.id) for each external call.
# Then it download each call by using native Zoom (Eleveo) API
# to decrypt each call, that is the only way for decryption.
# It is also of course dumping from postgresql most useful calls metadata.
# Script is supposed to run on the Zoom Replay web server.
# Speed should be around 500 calls/min in good environment
#
# Tested on 6хх versions
#
# Configuration:
# Verify User and Password first, name and pass should be "call_download_tool"
# Verify that webserver can be opened by IP by logging in manually.
#
# Run script under root then:
# sh call_download_tool.sh 2
#
# Extracted info will be in /opt/callrec/data/calls/decrypted/
#
# Process can be monitored by tail -f /opt/callrec/tmp/call_decryption_tool.log
#
# How to check after complete
# find /opt/callrec/data/calls/decrypted -name "*.mp3" | wc -l
# wc -l /opt/callrec/data/calls/decrypted/couple_mixed_data.csv
#
####################################################################
last_days_to_extract="$1" # pass the argument as the number of latest days
number_of_threads=$(grep -c ^processor /proc/cpuinfo) # Min value is 2.
postgres_user='postgres'
web_server='127.0.0.1'
mainlog='/opt/callrec/tmp/call_decryption_tool.log'
couples_data='/opt/callrec/tmp/couples_extdata.csv'
couple_mixed_data='/opt/callrec/tmp/couple_mixed_data.csv'
####################################################################
# Dumping metadata day by day, joining COUPLES + cfiles
# Outputs:
# Writes couple_mixed_data.csv with:
# id,path,cfsize,ckvalue,cktype,callerid,callingnr,originalcallednr,sid,direction,start_ts,stop_ts,length
####################################################################
function dump_calls_metadata () {
couple_mixed_data_day='/opt/callrec/tmp/couple_mixed_data_day.csv'
touch $couple_mixed_data_day
chown callrec:callrec $couple_mixed_data_day
chmod 777 $couple_mixed_data_day
# We should aware of 'out of shared memory' messages from Postgres
for ((i=$last_days_to_extract; i>=1; i--))
do
day_to_export=$(date -d "-$(($i-1)) days" +"%Y-%m-%d")
truncate -s 0 $couple_mixed_data_day
echo "
SELECT CONCAT (TO_CHAR(NOW(),'Mon DD hh24:mi:ss')
, ' INFO [CDT] [COPY] For $day_to_export Dump of couple_mixed_data started');
Copy (
WITH ids_of_internal_calls AS (
SELECT
distinct id
FROM COUPLES
WHERE
date(START_TS) = '$day_to_export'
AND (
( LENGTH(COUPLES.CALLINGNR)
+ LENGTH(COUPLES.ORIGINALCALLEDNR) ) < 14 -- calls to RP
OR direction = 'INTERNAL'
OR ( LENGTH(COUPLES.CALLINGNR) = 4
AND COUPLES.ORIGINALCALLEDNR LIKE 'Conference%' )
OR ( LENGTH(COUPLES.ORIGINALCALLEDNR) = 4
AND COUPLES.CALLINGNR LIKE 'Conference%' )
)
)
select
COUPLEs.id -- Zoom id by which attached data can be found
, REPLACE( cfiles.cfpath,'/opt/callrec/data/calls','') as path
, cfiles.cfsize
, cfiles.ckvalue
, cfiles.cktype
--
, CONCAT (COUPLEs.callingpartyname, COUPLEs.calledpartyname ) as CallerID
, COUPLEs.callingnr
, COUPLEs.originalcallednr
, COUPLEs.sid -- uniq sync id in Zoom
, COUPLEs.direction
, COUPLEs.start_ts
, COUPLEs.stop_ts
, COUPLEs.length -- duration of a call in Zoom
from COUPLEs
JOIN cfiles ON cfiles.cplid = COUPLEs.id
where
COUPLEs.id not in ( select id from ids_of_internal_calls )
AND COUPLEs.problemstatus = 'NO_PROBLEM'
AND cfiles.cftype = 'AUDIO'
AND date(COUPLEs.START_TS) = '$day_to_export'
ORDER BY COUPLEs.id, COUPLEs.start_ts ASC
) To '$couple_mixed_data_day' With CSV DELIMITER ',' HEADER;
SELECT CONCAT (TO_CHAR(NOW(),'Mon DD hh24:mi:ss')
, ' INFO [CDT] [COPY] For $day_to_export Dump of couple_mixed_data finished');
" |\
psql -qtAx -d callrec -U $postgres_user |\
sed 's/concat|//g' >> $mainlog
cat $couple_mixed_data_day >> $couple_mixed_data
done
sed -i '1d' $couple_mixed_data
printf '%s INFO [CDT] [COPY] couple_mixed_data recieved = %s , records = %s \n' \
"$(date '+%b %d %H:%M:%S')" \
"$(du -h $couple_mixed_data)" \
"$(wc -l $couple_mixed_data)" >> $mainlog
}
####################################################################
# Dumping metadata day by day in background, EXTDATA
# Outputs:
# Writes couples_data.csv with:
# cplid,key,value,created_ts
####################################################################
function dump_calls_extmetadata () {
couples_data_day='/opt/callrec/tmp/couples_data_day.csv'
touch $couples_data_day
chown callrec:callrec $couples_data_day
chmod 777 $couples_data_day
# We should aware of 'out of shared memory' messages from Postgres
for ((i=$last_days_to_extract; i>=1; i--))
do
day_to_export=$(date -d "-$(($i-1)) days" +"%Y-%m-%d")
truncate -s 0 $couples_data_day
echo "
SELECT CONCAT (TO_CHAR(NOW(),'Mon DD hh24:mi:ss')
, ' INFO [CDT] [COPY] For $day_to_export Dump of COUPLE_EXTDATA started in background');
Copy (
WITH ids_of_internal_calls AS (
SELECT
distinct id
FROM COUPLES
WHERE
date(START_TS) = '$day_to_export'
AND (
( LENGTH(COUPLES.CALLINGNR)
+ LENGTH(COUPLES.ORIGINALCALLEDNR) ) < 14 -- calls to RP
OR direction = 'INTERNAL'
OR ( LENGTH(COUPLES.CALLINGNR) = 4
AND COUPLES.ORIGINALCALLEDNR LIKE 'Conference%')
OR ( LENGTH(COUPLES.ORIGINALCALLEDNR) = 4
AND COUPLES.CALLINGNR LIKE 'Conference%')
)
)
, ids_of_needed_external_calls AS (
SELECT
distinct id
FROM COUPLES
WHERE
COUPLEs.id not in ( select id from ids_of_internal_calls )
AND COUPLEs.problemstatus = 'NO_PROBLEM'
AND date(COUPLEs.START_TS) = '$day_to_export'
)
select *
--
from COUPLE_EXTDATA
where
COUPLE_EXTDATA.cplid IN (select id from ids_of_needed_external_calls)
AND date(COUPLE_EXTDATA.created_ts) = '$day_to_export'
-- COUPLE_EXTDATA.cplid IN ( select CPLID from ids_of_calls_with_CustomerNumber )
ORDER BY COUPLE_EXTDATA.cplid, COUPLE_EXTDATA.created_ts ASC
) To '$couples_data_day' With CSV DELIMITER ',' HEADER;
SELECT CONCAT (TO_CHAR(NOW(),'Mon DD hh24:mi:ss')
, ' INFO [CDT] [COPY] For $day_to_export Dump of COUPLE_EXTDATA finished');
" |\
psql -qtAx -d callrec -U $postgres_user |\
sed 's/concat|//g' >> $mainlog
cat $couples_data_day >> $couples_data
done
printf '%s INFO [CDT] [COPY] COUPLE_EXTDATA recieved = %s , records = %s \n' \
"$(date '+%b %d %H:%M:%S')" \
"$(du -h $couples_data)" \
"$(wc -l $couples_data)" >> $mainlog
mv $couples_data /opt/callrec/data/calls/decrypted/couples_extdata.csv
}
####################################################################
# Getting session id
# Arguments:
# None
# Writes:
# INFO message in mainlog
####################################################################
function get_sessionid () {
curl --insecure -v -b cookies.txt -c cookies.txt -X POST \
-d "loginname=call_download_tool" \
-d "password=call_download_tool" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Accept: text/plain" \
https://$web_server/callrec/loginservlet 2>&1 |\
grep 'Access' |\
sed -r 's/[<>/="]+/ /g' |\
grep 'sessionid' |\
gawk '{ print strftime("%b %d %H:%M:%S"), "INFO [CDT] [exception]" $0 }' >> $mainlog
}
####################################################################
# We need to get one-time token for each call to download it
# Arguments:
# id of a call from COUPLES
# Returns:
# Token from Zoom web server
####################################################################
function get_token_for_a_call () {
id_calls="id_calls=${1}"
curl --insecure --silent -v -b cookies.txt -c cookies.txt -X POST \
-d "$id_calls" \
-d "type=1" \
-d "action=download" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Accept: text/xml" \
https://$web_server/callrec/downloadtoken 2>&1 |\
grep 'reply' |\
sed -r 's/[<>/]+/ /g' |\
awk '{print $2}'
}
####################################################################
# Getting file by using one-time token
# Arguments:
# token from get_token_for_a_call function
# file path given by cfiles.cfpath from couple_mixed_data
# Returns:
# HTTP status code
# Writes:
# Decrypted call in /opt/callrec/data/calls/decrypted
#
# Getting file by token:
# HTTP/1.1 200 OK - normal call download status
# < Content-Disposition: attachment; filename=date_numbers.mp3
# HTTP/1.1 302 - seems expired sessionid, just call get_sessionid
# HTTP/1.1 403 Forbidden - call was already downloaded by a token
# HTTP/1.1 404 Not Found
####################################################################
function get_file_of_a_call () {
token="token=${1}"
curl --insecure -v -b cookies.txt -c cookies.txt -X POST \
-d "$token" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Accept:application/octet-stream" \
-o "$2" https://$web_server/callrec/sendcallfile.mp3 2>&1 |\
grep 'HTTP/1.1 ' |\
awk '{print $3}'
}
####################################################################
# Getting file by using one-time token
# Arguments:
# Thread number
# Part (splitted) of couple_mixed_data.csv
# Writes:
# Debug messages in mainlog
####################################################################
function call_decryption_tool () {
while IFS="" read -r p || [ -n "$p" ]
do
couples_id=$(echo "$p" | awk -F'[,]' '{print $1}')
# Check if couple id is a number
if [[ $couples_id =~ ^[0-9]+$ ]];then
couples_start_ts=$(echo "$p" | awk -F'[,]' '{print $11}')
token_for_a_call=$(get_token_for_a_call "$couples_id")
full_path_of_file=$(echo "$p" |\
awk -F'[,]' '{print "/opt/callrec/data/calls/decrypted" $2}' )
# If folder countains date then mkdir!
# call_date=$(echo "$line" | awk -F "/" '/1/ {print $2}')
# if [ "${#call_date}" -eq "8" ]; then
full_path_to_date_folder=$( echo "$p" |\
sed -r 's/[/]+/ /g' |\
awk '{print "/opt/callrec/data/calls/decrypted/" $2}')
mkdir -p $full_path_to_date_folder
# get_file_of_a_call
http_status_code=$(echo $(get_file_of_a_call $token_for_a_call $full_path_of_file))
# Trying to handle Zoom Web server issues
# 302 or even 403 can be when token is not received
# for 404 usually its receiving HTML document, ASCII text
if [ "$http_status_code" != 200 ]; then
file_description=$(file -b $full_path_of_file)
if echo "$file_description" | grep -q "HTML"; then
[ -f $full_path_of_file ] && mv $full_path_of_file /opt/callrec/tmp
fi
printf '%s DEBUG [CDT] [thread-%s] [exception-info] token=%s http_code=%s couple.id=%s start_ts=%s \n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$token_for_a_call" \
"$http_status_code" \
"$couples_id" "$couples_start_ts" >> $mainlog
# second attempt
get_sessionid
token_for_a_call=$(get_token_for_a_call "$couples_id")
http_status_code=$(echo $(get_file_of_a_call $token_for_a_call $full_path_of_file))
file_description=$(file -b $full_path_of_file)
if echo "$file_description" | grep -q "HTML"; then
[ -f $full_path_of_file ] && mv $full_path_of_file /opt/callrec/tmp
printf '%s DEBUG [CDT] [thread-%s] [exception-fail] token=%s http_code=%s couple.id=%s start_ts=%s \n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$token_for_a_call" \
"$http_status_code" \
"$couples_id" "$couples_start_ts" >> $mainlog
fi
printf '%s DEBUG [CDT] [thread-%s] [exception-handle] token=%s http_code=%s couple.id=%s start_ts=%s [file_check] %s\n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$token_for_a_call" \
"$http_status_code" \
"$couples_id" "$couples_start_ts" \
"$file_description" >> $mainlog
else
file_description=$(file -b $full_path_of_file)
printf '%s DEBUG [CDT] [thread-%s] token=%s http_code=%s couple.id=%s start_ts=%s [file_check] %s\n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$token_for_a_call" \
"$http_status_code" \
"$couples_id" "$couples_start_ts" \
"$file_description" >> $mainlog
fi
else
printf '%s DEBUG [CDT] [thread-%s] [exception-incorrect-id] incorrect couple id in couple_mixed_data = %s\n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$couples_id" >> $mainlog
fi
done < $1
number_of_mp3s=$(find /opt/callrec/data/calls/decrypted -name "*.mp3" | wc -l)
number_of_db_records=$(wc -l /opt/callrec/data/calls/decrypted/couple_mixed_data.csv)
printf '%s DEBUG [CDT] [thread-%s] finished mp3s=%s dbrecords=%s \n' \
"$(date '+%b %d %H:%M:%S')" \
"$2" \
"$number_of_mp3s" \
"$number_of_db_records" >> $mainlog
truncate -s 0 $1
}
main() {
if ! [ $(id -u) = 0 ]; then
echo "This script must be run as root. Exiting..."
exit 1
fi
if ! [ "$last_days_to_extract" -eq "$last_days_to_extract" ] 2> /dev/null
then
echo "WARN [CDT] Provided value for number of last days to extract is not positive integer. ($last_days_to_extract) Exiting..."
exit 1
fi
touch $mainlog
chown callrec:callrec $mainlog
chmod 777 $mainlog
truncate -s 0 $mainlog
printf '\n%s INFO [CDT] script started with last_days_to_extract=%s\n' \
"$(date '+%b %d %H:%M:%S')" \
"$last_days_to_extract" >> $mainlog
mkdir -p /opt/callrec/data/calls/decrypted
# For the dump, we need to prepare empty files with the correct rights
touch $couples_data
touch $couple_mixed_data
truncate -s 0 $couples_data
truncate -s 0 $couple_mixed_data
chown callrec:callrec $couples_data
chown callrec:callrec $couple_mixed_data
chmod 777 $couples_data
chmod 777 $couple_mixed_data
# First we should generate this one mixed dump
# to be able to rename call files to original names on downloading
dump_calls_metadata $couple_mixed_data $last_days_to_extract
# EXTDATA can be dumped in background, there is no need to wait for it.
dump_calls_extmetadata $couples_data $last_days_to_extract &
# We need to split the metadata into the same number of parts as the number of scheduled threads
split -n $number_of_threads --numeric-suffixes=1 $couple_mixed_data \
/opt/callrec/tmp/cdt_calls_part_; \
rename 's/_0{1,}([0-9]+)/_$1/' /opt/callrec/tmp/cdt_calls_part_*
mv $couple_mixed_data /opt/callrec/data/calls/decrypted/couple_mixed_data.csv
# Here we will open a session for the first time,
# then we will call it in case of errors in downloading calls,
# Zoom session expiration mechanism is not very clear.
get_sessionid
# Then we just run threads in the background as many times as needed.
for i in $(seq 1 $number_of_threads); do
full_path='/opt/callrec/tmp/cdt_calls_part_0'
full_path+=$i
call_decryption_tool $full_path $i &
done
}
main "$@"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment