performance improvements; fixes

This commit is contained in:
olli 2024-12-20 13:05:04 +01:00
parent 099a42d214
commit af1497c1c2
2 changed files with 26 additions and 14 deletions

View File

@ -32,6 +32,7 @@ function get_marketdata_yahoo {
[ -z "$f_timeframe" ] && f_timeframe="1d" [ -z "$f_timeframe" ] && f_timeframe="1d"
local f_targetcsv="asset-histories/${f_name}.history-yahoo.${f_timeframe}.csv" local f_targetcsv="asset-histories/${f_name}.history-yahoo.${f_timeframe}.csv"
local f_targetbotcsv="asset-histories/${f_name}.history.${f_timeframe}.csv"
[ "$f_timeframe" = "1w" ] && f_timeframe="1wk" [ "$f_timeframe" = "1w" ] && f_timeframe="1wk"
f_histfile_yahoo="$f_targetcsv" f_histfile_yahoo="$f_targetcsv"
@ -139,6 +140,7 @@ function get_marketdata_yahoo {
fi fi
# put the csvs together # put the csvs together
# history-yahoo file
if [ -s "${f_targetcsv}" ] && [ -s "${f_targetcsvtmp}" ] if [ -s "${f_targetcsv}" ] && [ -s "${f_targetcsvtmp}" ]
then then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsv}" "${f_targetcsvtmp}" | sort -k1,2 -t, -u | sort -k1,1 -t, -u >"${f_targetcsv}.tmp" egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsv}" "${f_targetcsvtmp}" | sort -k1,2 -t, -u | sort -k1,1 -t, -u >"${f_targetcsv}.tmp"
@ -147,4 +149,14 @@ function get_marketdata_yahoo {
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsvtmp}" | sort -k1,2 -t, -u >"$f_targetcsv" egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsvtmp}" | sort -k1,2 -t, -u >"$f_targetcsv"
fi fi
# bots history file
if [ -s "${f_targetbotcsv}" ] && [ -s "${f_targetcsv}" ]
then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetbotcsv}" "${f_targetcsv}" | sort -k1,2 -t, -u | sort -k1,1 -t, -u >"${f_targetbotcsv}.tmp"
mv "${f_targetbotcsv}.tmp" "${f_targetbotcsv}"
else
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsv}" | sort -k1,2 -t, -u >"$f_targetbotcsv"
fi
} }

View File

@ -41,17 +41,9 @@ function get_ohlcv-candles {
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv" f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile" [ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile"
f_add_missing_ohlcv_intervals "$f_histfile" 4h f_add_missing_ohlcv_intervals "$f_histfile" 4h
# elif [ "$f_timeframe" = "1d" ]
# then
# f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
# [ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_1d "$f_1h_histfile" "$f_histfile"
# f_add_missing_ohlcv_intervals "$f_histfile" 1d
# elif [ "$f_timeframe" = "1w" ]
# then
# f_1d_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1d.csv"
# [ -s "$f_1d_histfile" ] && convert_ohlcv_1d_to_1w "$f_1d_histfile" "$f_histfile"
else else
get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}" #get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}"
get_marketdata_yahoo DXY ECONOMY-DXY ${f_timeframe}
fi fi
# refresh latest indicators # refresh latest indicators
[ -s "${f_histfile}" ] && get_indicators "${f_histfile}" 51 [ -s "${f_histfile}" ] && get_indicators "${f_histfile}" 51
@ -156,6 +148,7 @@ function get_ohlcv-candle {
g_array $f_data f_data_ref + g_array $f_data f_data_ref +
else else
# from coinmarketcap/yahoo # from coinmarketcap/yahoo
g_array "$f_histfile_extdata" f_data_ref g_array "$f_histfile_extdata" f_data_ref
fi fi
@ -165,10 +158,11 @@ function get_ohlcv-candle {
g_array "${f_data_array[-1]}" f_last_data_unit_ref , g_array "${f_data_array[-1]}" f_last_data_unit_ref ,
[ -z "$f_extdata" ] && printf -v f_last_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_last_data_unit_ref[0]::-3} [ -z "$f_extdata" ] && printf -v f_last_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_last_data_unit_ref[0]::-3}
[ -n "$f_extdata" ] && f_last_unit_date="${f_last_data_unit_ref[0]}" [ -n "$f_extdata" ] && f_last_unit_date="${f_last_data_unit_ref[0]}"
# exit if we have already in the newest date
#echo "grep -q ^\"$f_last_unit_date\" \"$f_histfile\""
[ -s "$f_histfile" ] && grep -q ^"${f_last_unit_date}," "$f_histfile" && break [ -s "$f_histfile" ] && grep -q ^"${f_last_unit_date}," "$f_histfile" && break
# go through data and write to history file if new units available # go through data and write to history file if new units available
for f_data_unit in "${f_data_array[@]}" for f_data_unit in "${f_data_array[@]}"
do do
@ -294,7 +288,7 @@ function convert_ohlcv_1h_to_4h {
fi fi
# Read the input file line by line # Read the input file line by line
while IFS=',' read -r f_date f_1h_open f_1h_high f_1h_low f_1h_close f_1h_volume f_rest grep -h "$f_latest_date" -A99999 "$f_input_file" | while IFS=',' read -r f_date f_1h_open f_1h_high f_1h_low f_1h_close f_1h_volume f_rest
do do
# check for already converted lines # check for already converted lines
@ -305,6 +299,8 @@ function convert_ohlcv_1h_to_4h {
fi fi
[ -z "$f_go_on" ] && continue [ -z "$f_go_on" ] && continue
echo "$f_date" 1>&2
f_currentdate="${f_date:0:13}" f_currentdate="${f_date:0:13}"
# define intervals by considering local/servers TZ with summer and winter season # define intervals by considering local/servers TZ with summer and winter season
f_hour=${f_date:11:2} f_hour=${f_date:11:2}
@ -351,7 +347,11 @@ function convert_ohlcv_1h_to_4h {
g_calc "$f_volume + $f_1h_volume" g_calc "$f_volume + $f_1h_volume"
f_volume=$g_calc_result f_volume=$g_calc_result
done < "$f_input_file" >>"$f_output_file" done >>"$f_output_file.4htmp"
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "$f_output_file" "$f_output_file.4htmp" | sort -k1,2 -t, -u | sort -k1,1 -t, -u >"$f_output_file.tmp"
mv "$f_output_file.tmp" "$f_output_file"
rm -f "$f_output_file.4htmp"
} }