yahoo disables free history api - using coinmarketcap api now

This commit is contained in:
olli 2024-09-18 21:23:40 +02:00
parent 71ff0b28a8
commit 244667d11a
9 changed files with 410 additions and 65 deletions

View File

@ -24,8 +24,9 @@ dabo is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY
You should have received a copy of the GNU General Public License along with dabo (see COPYING file). If not, see <http://www.gnu.org/licenses/>.
## Data sources
Various data sources such as finance.yahoo.com and crypto exchanges available via ccxt are used. please check whether this is legal in your region before use.
Various data sources such as finance.yahoo.com and crypto exchanges available via ccxt are used. Please check whether this is legal in your region before use.
- query1.finance.yahoo.com (economic data,...)
- api.coinmarketcap.com (crypto data)
- 30rates.com (forecast)
- fapi.binance.com (OpenInterest,...)
- api.alternative.me (Fear and Greed)
@ -88,7 +89,8 @@ Each part runs parallel to the others in its own docker-container.
OHLCV = Open, High, Low, Close and Volume of a time unit
- time units 1w, 1d, 4h, 1h, 15m and 5m
- 4h, 1h, 15m and 5m from the respective stock exchange
- 1d and 1w data from yahoo finace to have longer terms
- 1d and 1w data from coinmarketcap to have longer terms
- economic data from yahoo finance
### Dabo Indicators
- data per time unit
- time units 1w, 1d, 4h, 1h, 15m and 5m
@ -98,6 +100,7 @@ OHLCV = Open, High, Low, Close and Volume of a time unit
- self-calculated significant levels (support/resist)
### Dabo Market Data
- Yahoo Finance
- CoinMarketCyp
### Dabo Orders
### Dabo Transaction History
- Support of additional Exchnages/Brokers JustTrade (CSV-Import) and Bitpanda (API+CSV-Import)

34
dabo/fetch-coinmarketcapids.sh Executable file
View File

@ -0,0 +1,34 @@
#!/bin/bash
# Copyright (c) 2022-2024 olli
#
# This file is part of dabo (crypto bot).
#
# dabo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dabo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dabo. If not, see <http://www.gnu.org/licenses/>.
. /dabo/dabo-prep.sh
while true
do
g_echo_note "Next loop"
[ -s COINMARKETCAPIDS ] || get_marketdata_coinmarketcap_ids
sleeptime=$(($(TZ=UTC date +%s -d "next monday 0:00") - $(date +%s) +2 ))
g_echo_note "Waiting $sleeptime seconds until next run"
sleep $sleeptime
get_marketdata_coinmarketcap_ids
done

View File

@ -22,7 +22,7 @@
. /dabo/dabo-prep.sh
interval=$1
[ -z "$interval" ] && return 1
[ -z "$interval" ] && exit 1
seconds=$2
while true

View File

@ -91,15 +91,15 @@ function currency_converter {
fi
# try direct pair
get_marketdata_yahoo "${f_currency_target}-${f_currency}" "${f_currency_target}${f_currency}" || get_marketdata_yahoo "${f_currency}-${f_currency_target}" "${f_currency}${f_currency_target}"
[ "${f_currency}" = "USD" ] && get_marketdata_coinmarketcap "${f_currency_target}-${f_currency}" "${f_currency_target}${f_currency}"
[ "${f_currency_target}" = "USD" ] && get_marketdata_coinmarketcap "${f_currency}-${f_currency_target}" "${f_currency}${f_currency_target}"
local f_histfile_default="${f_asset_histories}${f_currency_target}${f_currency}.history"
local f_histfile_yahoo="${f_asset_histories}${f_currency_target}${f_currency}.history"
local f_histfile_coinmarketcap="${f_asset_histories}${f_currency_target}${f_currency}.history"
# reverse as backup
local f_histfile_default_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history"
local f_histfile_yahoo_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history"
local f_histfile_coinmarketcap_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history"
# search for rate by date
for f_histfile in "$f_histfile_default" "$f_histfile_default_reverse" "$f_histfile_yahoo" "$f_histfile_yahoo_reverse"
for f_histfile in "$f_histfile_default" "$f_histfile_default_reverse" "$f_histfile_coinmarketcap" "$f_histfile_coinmarketcap_reverse"
do
# histfile has to exist
#if [ -s "${f_histfile}*.csv" ]
@ -136,7 +136,7 @@ function currency_converter {
return $?
fi
fi
g_echo_note "didn't find rate for ${f_currency}-${f_currency_target} - '${FUNCNAME} $@'"
g_echo_error "didn't find rate for ${f_currency}-${f_currency_target} - '${FUNCNAME} $@'"
return 1
fi

View File

@ -38,7 +38,7 @@ function get_indicators_all {
printf '%(%Y-%m-%d %H:%M:%S)T' >"${f_histfile}.indicators-calculating"
get_indicators "${f_histfile}" ${f_last_intervals} && printf '%(%Y-%m-%d %H:%M:%S)T' >>"$f_histfile.indicators-calculated"
# add missing intervals for example from weekends from non-24h-assets like economic data - interval from filename
f_add_missing_ohlcv_intervals "${f_histfile}"
#f_add_missing_ohlcv_intervals "${f_histfile}"
rm -f "${f_histfile}.indicators-calculating"
done

View File

@ -0,0 +1,147 @@
#!/bin/bash
# Copyright (c) 2022-2024 olli
#
# This file is part of dabo (crypto bot).
#
# dabo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# dabo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with dabo. If not, see <http://www.gnu.org/licenses/>.
function get_marketdata_coinmarketcap {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_item="$1"
local f_name="$2"
local f_timeframe="$3"
local f_targetcsvtmp="${g_tmp}/${f_name}.history-coinmarketcap.csv"
local f_targetjsontmp="${g_tmp}/${f_name}.history-coinmarketcap.json"
rm -f "$f_targetcsvtmp" "$f_targetjsontmp"
[ -z "$f_timeframe" ] && f_timeframe="1d"
local f_targetcsv="asset-histories/${f_name}.history-coinmarketcap.${f_timeframe}.csv"
[ "$f_timeframe" = "1w" ] && f_timeframe="7d"
f_histfile_coinmarketcap="$f_targetcsv"
# use EUR EURC stable coin fo EUR
f_item=${f_item//EUR-/EURC-}
# remove -
f_item=${f_item//-//}
# USDT to USD
f_item=${f_item//USDT/USD}
# BUSD to USD
f_item=${f_item//BUSD/USD}
if ! [[ $f_item =~ /USD ]]
then
g_echo_error "${FUNCNAME} $@: Only USD supported"
return 1
fi
# transform CCXT symbols to CoinmarketCap IDs
if [[ $f_item =~ / ]]
then
# remove /*
f_item=${f_item///*/}
## remove :* (for example :USDT in contract markets)
#f_item=${f_item//:*}
# remove spaces
#f_item=${f_item/ /}
fi
local f_id
f_id=$(egrep "^${f_item}," COINMARKETCAPIDS | head -n1 | cut -d, -f2)
if [ -z "$f_id" ]
then
g_echo_error "${FUNCNAME} $@: No CoinMarketCap ID for $f_item"
return 1
fi
# end if already failed the last 5 minutes
if [ -f "FAILED_COINMARKETCAP/${f_name}_HISTORIC_DOWNLOAD" ]
then
find "FAILED_COINMARKETCAP/${f_name}_HISTORIC_DOWNLOAD" -mmin +5 -delete
if [ -f "FAILED_COINMARKETCAP/${f_name}_HISTORIC_DOWNLOAD" ]
then
return 1
fi
fi
# end if already exists and modified under given time
if [ -s "${f_targetcsv}" ] && find "${f_targetcsv}" -mmin -2 | grep -q "${f_targetcsv}"
then
return 0
fi
# cleanup
rm -f "$f_targetcsvtmp" "${f_targetcsvtmp}".err ${f_targetjsontmp} "${f_targetjsontmp}".err
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "7d" ]
then
# Download data from coinmarketcap
g_wget -O "${f_targetjsontmp}" "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical?id=${f_id}&interval=${f_timeframe}" 2>"${f_targetjsontmp}".err
jq -r '.data.quotes[] | .quote.timestamp[0:10] + "," + (.quote.open|tostring) + "," + (.quote.high|tostring) + "," + (.quote.low|tostring) + "," + (.quote.close|tostring) + "," + (.quote.volume|tostring)' "${f_targetjsontmp}" | egrep -v ',0$|,$' >"${f_targetcsvtmp}" 2>"${f_targetjsontmp}".err
else
g_echo_error "${FUNCNAME} $@: Timeframe $f_timeframe in CoinMarketCap not supported."
return 1
fi
# error if no csvfile available
if ! [ -s "${f_targetcsvtmp}" ]
then
mkdir -p FAILED_COINMARKETCAP
cat "${f_targetcsvtmp}.err" "${f_targetjsontmp}.err" > "FAILED_COINMARKETCAP/${f_name}_HISTORIC_DOWNLOAD" 2>&1
f_get_marketdata_coinmarketcap_error=$(cat "${f_targetcsvtmp}.err" "${f_targetjsontmp}.err" 2>/dev/null)
return 1
fi
# put the csvs together
if [ -s "${f_targetcsv}" ] && [ -s "${f_targetcsvtmp}" ]
then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsv}" "${f_targetcsvtmp}" | sort -k1,2 -t, -u | sort -k1,1 -t, -u >"${f_targetcsv}.tmp"
mv "${f_targetcsv}.tmp" "${f_targetcsv}"
else
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsvtmp}" | sort -k1,2 -t, -u >"$f_targetcsv"
fi
}
function get_marketdata_coinmarketcap_ids {
# get symbol ids from coinmarketcap
local f_target=COINMARKETCAPIDS
local f_target_tmp="${f_target}.tmp"
local f_target_loop=$f_target_tmp
# write direct to target if not exists or empty
[ -s "$f_target" ] || f_target_loop=$f_target
for f_id in $(seq 1 50000)
do
curl -s --request GET --url "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical?id=${f_id}&interval=1d" | jq -r '.[] | .symbol + "," + (.id|tostring)' | head -n1
sleep 0.3
done | egrep '^.+,[0-9]*$' >"$f_target_loop"
if [ -s "$f_target_tmp" ]
then
cp -p "$f_target" "${f_target}.old"
sort -u "$f_target_tmp" "${f_target}.old" >"$f_target"
rm "$f_target_tmp"
fi
}

View File

@ -65,27 +65,6 @@ function get_marketdata_yahoo {
[[ $f_item = "USD-EUR" ]] && f_item="USDEUR=X"
[[ $f_item = "EUR-USD" ]] && f_item="EURUSD=X"
# special names of some coins/currencies of yahoo finance
[[ $f_item = "ARB-USD" ]] && f_item="ARB11841-USD"
[[ $f_item = "DUEL-USD" ]] && f_item="DUEL28868-USD"
[[ $f_item = "GMX-USD" ]] && f_item="GMX11857-USD"
[[ $f_item = "MEW-USD" ]] && f_item="MEW30126-USD"
[[ $f_item = "TAO-USD" ]] && f_item="TAO22974-USD"
[[ $f_item = "UNI-USD" ]] && f_item="UNI7083-USD"
[[ $f_item = "SUI-USD" ]] && f_item="SUI20947-USD"
[[ $f_item = "BLAZE-USD" ]] && f_item="BLAZE30179-USD"
[[ $f_item = "BEER-USD" ]] && f_item="BEER31337-USD"
[[ $f_item = "TAI-USD" ]] && f_item="TAI20605-USD"
[[ $f_item = "DEFI-USD" ]] && f_item="DEFI29200-USD"
[[ $f_item = "TON-USD" ]] && f_item="TON11419-USD"
[[ $f_item = "BRETT-USD" ]] && f_item="BRETT29743-USD"
#[[ $f_item = "ADS-USD" ]] && f_item="%24ADS-USD"
[[ $f_item = "AIT-USD" ]] && f_item="AIT28882-USD"
[[ $f_item = "PT-USD" ]] && f_item="PT28582-USD"
[[ $f_item = "BLAST-USD" ]] && f_item="BLAST28480-USD"
[[ $f_item = "GRT-USD" ]] && f_item="GRT6719-USD"
[[ $f_item = "ARTY-USD" ]] && f_item="ARTY23751-USD"
# end if already failed the last 5 minutes
if [ -f "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" ]
then
@ -109,12 +88,15 @@ function get_marketdata_yahoo {
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1wk" ] || [ "$f_timeframe" = "1mo" ]
then
# restrict to finished candles
[ "$f_timeframe" = "1d" ] && f_sec=$(TZ=US/NY date -d 'last day 0:00' '+%s')
[ "$f_timeframe" = "1wk" ] && f_sec=$(TZ=US/NY date -d 'last monday 0:00' '+%s')
[ "$f_timeframe" = "1mo" ] && f_sec=$(TZ=US/NY date -d "$(date -d "@$(date -d "last month" +%s)" +'%Y-%m-01')" +%s)
# Download historical data from yahoo
g_wget -O "${f_targetcsvtmp}" "https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history" 2>"${f_targetcsvtmp}".err
### DEPRECATED - Yahoo Finance deactivated public API 2024-09 !!! ###
g_echo_note "DEPRECATED - Yahoo Finance deactivated public API 2024-09 !!!"
return 1
## restrict to finished candles
#[ "$f_timeframe" = "1d" ] && f_sec=$(TZ=US/NY date -d 'last day 0:00' '+%s')
#[ "$f_timeframe" = "1wk" ] && f_sec=$(TZ=US/NY date -d 'last monday 0:00' '+%s')
#[ "$f_timeframe" = "1mo" ] && f_sec=$(TZ=US/NY date -d "$(date -d "@$(date -d "last month" +%s)" +'%Y-%m-01')" +%s)
## Download historical data from yahoo
#g_wget -O "${f_targetcsvtmp}" "https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history" 2>"${f_targetcsvtmp}".err
else
# Download data from yahoo
g_wget -O "${f_targetjsontmp}" "https://query1.finance.yahoo.com/v7/finance/chart/${f_item}?interval=${f_timeframe}&period2=${f_sec}" 2>"${f_targetjsontmp}".err

View File

@ -35,11 +35,17 @@ function get_ohlcv-candles {
g_echo_note "Fetching/Refreshing $f_eco_asset $f_timeframe"
f_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.${f_timeframe}.csv"
# 4h timefrage does not exist on yahoo finance so calc from 1h
# 4h timefrage does not exist on coinmarketcap finance so calc from 1h
if [ "$f_timeframe" = "4h" ]
then
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile"
#f_add_missing_ohlcv_intervals "$f_histfile" 4h
elif [ "$f_timeframe" = "1d" ]
then
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_1d "$f_1h_histfile" "$f_histfile"
#f_add_missing_ohlcv_intervals "$f_histfile" 1d
else
get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}"
fi
@ -86,36 +92,53 @@ function get_ohlcv-candle {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_yahoo f_date f_unit_date f_data f_data_array f_data_unit f_open f_high f_low f_close f_volume f_last_unit_date f_last_unit_close
local f_extdata f_date f_unit_date f_data f_data_array f_data_unit f_open f_high f_low f_close f_volume f_last_unit_date f_last_unit_close
local f_symbol="$1"
local f_timeframe=$2
local f_histfile="$3"
local f_asset=$4
unset f_histfile_yahoo
#[ -n "$f_asset" ] && f_yahoo=1a
unset f_histfile_yahoo f_histfile_coinmarketcap
#[ -n "$f_asset" ] && f_extdata=1
#local f_histfile_week="$4"
# fetch >=1d from yahoo finance
# fetch >=1d from coinmarketcap
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1w" ] || [ "$f_timeframe" = "1mo" ] || [ -n "$f_asset" ]
then
f_yahoo=1
f_extdata=1
if [ -z "$f_asset" ]
then
f_asset=${f_symbol///}
f_asset=${f_asset//:*}
fi
if [[ $f_asset =~ ^ECONOMY- ]]
then
# economy from yahoo finance
if [ "$f_timeframe" = "1h" ] || [ "$f_timeframe" = "15m" ] || [ "$f_timeframe" = "15m" ]
then
if ! get_marketdata_yahoo "$f_symbol" "$f_asset" $f_timeframe
then
g_echo_error "$f_get_marketdata_yahoo_error"
g_echo_error "$f_get_marketdata_coinmarketcap_error"
return 1
fi
fi
f_histfile_extdata=$f_histfile_yahoo
else
# crypto from coinmarketcap
if ! get_marketdata_coinmarketcap "$f_symbol" "$f_asset" $f_timeframe
then
g_echo_error "$f_get_marketdata_coinmarketcap_error"
return 1
fi
f_histfile_extdata=$f_histfile_coinmarketcap
fi
fi
# fetch OHLCV data (loop because of multiple chunks on exchanges)
while true
do
# fetch data
if [ -z "$f_yahoo" ]
if [ -z "$f_extdata" ]
then
# find latest time which is not fetched already create f_since
get_ohlcv-candle-latest "$f_symbol" "$f_histfile"
@ -131,16 +154,16 @@ function get_ohlcv-candle {
f_data=${f_data//],/+}
g_array $f_data f_data_ref +
else
# from yahoo finance
g_array "$f_histfile_yahoo" f_data_ref
# from coinmarketcap
g_array "$f_histfile_extdata" f_data_ref
fi
f_data_array=("${f_data_ref[@]}")
# check if last data already in history file and end if already present
g_array "${f_data_array[-1]}" f_last_data_unit_ref ,
[ -z "$f_yahoo" ] && printf -v f_last_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_last_data_unit_ref[0]::-3}
[ -n "$f_yahoo" ] && f_last_unit_date="${f_last_data_unit_ref[0]}"
[ -z "$f_extdata" ] && printf -v f_last_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_last_data_unit_ref[0]::-3}
[ -n "$f_extdata" ] && f_last_unit_date="${f_last_data_unit_ref[0]}"
#echo "grep -q ^\"$f_last_unit_date\" \"$f_histfile\""
[ -s "$f_histfile" ] && grep -q ^"${f_last_unit_date}," "$f_histfile" && break
@ -151,8 +174,8 @@ function get_ohlcv-candle {
# use array for each unit and assigned values to vars
g_array "$f_data_unit" f_data_unit_ref ,
[ -z "$f_yahoo" ] && printf -v f_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_data_unit_ref[0]::-3}
[ -n "$f_yahoo" ] && f_unit_date="${f_last_data_unit_ref[0]}"
[ -z "$f_extdata" ] && printf -v f_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_data_unit_ref[0]::-3}
[ -n "$f_extdata" ] && f_unit_date="${f_last_data_unit_ref[0]}"
# check if date is already in history file
[ -s "$f_histfile" ] && grep -q ^"$f_unit_date" "$f_histfile" && continue
@ -172,7 +195,7 @@ function get_ohlcv-candle {
g_num_exponential2normal "$f_close" && f_close=$g_num_exponential2normal_result
f_last_unit_close=$f_close
f_volume=${f_data_unit_ref[5]}
# yahoo historic volume col 6
# coinmarketcap historic volume col 6
[ -n "${f_data_unit_ref[6]}" ] && f_volume=${f_data_unit_ref[6]}
g_num_exponential2normal "$f_volume" && f_volume=$g_num_exponential2normal_result
@ -197,8 +220,8 @@ function get_ohlcv-candle {
done
# end if yahoo (complete file and not time chunks)
[ -n "$f_yahoo" ] && break
# end if coinmarketcap (complete file and not time chunks)
[ -n "$f_extdata" ] && break
# end if lates refresh is this day
printf -v f_date '%(%Y-%m-%d)T\n'
@ -245,6 +268,9 @@ function get_ohlcv-candle-latest {
function convert_ohlcv_1h_to_4h {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_input_file="$1"
local f_output_file="$2"
@ -328,6 +354,104 @@ function convert_ohlcv_1h_to_4h {
}
function convert_ohlcv_1h_to_1d {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_input_file="$1"
local f_output_file="$2"
local f_latestdate f_nextdate f_mytimezone f_line f_date f_open f_high f_low f_close f_volume f_inday
if ! [ -s "$f_input_file" ]
then
g_echo_error "$f_input_file"
return 0
fi
# crypto timezone UTC
local f_target_timezone=UTC
# US economy timezone America/New_York
[[ $f_input_file =~ ECONOMY ]] && f_target_timezone="America/New_York"
[ -s "$f_output_file" ] && f_latestdate=$(tail -n1 "$f_output_file" | cut -d, -f1)
[ -z "$f_latestdate" ] && f_latestdate=$(date -d "$(head -n1 "$f_input_file" | cut -d, -f1)" +%Y-%m-%d)
f_latestdate=$(TZ="$f_target_timezone" date -d "$f_latestdate $f_mytimezone" "+%Y-%m-%d")
f_nextdate=$(date -d "$f_latestdate +1day" "+%Y-%m-%d")
echo $f_latestdate
echo $f_nextdate
# mytimezone, respecting summer/winter time
f_mytimezone=$(date -d "$_latestdate" +%Z)
local f_today=$(TZ="$f_target_timezone" date "+%Y-%m-%d")
grep -A9999 -B24 "^$f_latestdate" "$f_input_file" | grep ':00:00,' | cut -d, -f1,2,3,4,5,6 | while read f_line
do
g_array "$f_line" g_line_array ,
# calculate day in target timezone
g_line_array[0]=$(TZ="$f_target_timezone" date -d "${g_line_array[0]} $f_mytimezone" "+%Y-%m-%d")
[[ ${g_line_array[0]} = $f_today ]] && break
echo "${g_line_array[0]},${g_line_array[1]},${g_line_array[2]},${g_line_array[3]},${g_line_array[4]},${g_line_array[5]}"
done >"${f_output_file}.tmp"
cat "${f_output_file}.tmp" | while read f_line
do
g_array "$f_line" g_line_array ,
[[ ${g_line_array[0]} = $f_today ]] && break
# wait untin next day in target file reached
if [[ ${g_line_array[0]} = $f_nextdate ]]
then
f_end_reached=1
else
[ -z $f_end_reached ] && continue
fi
# if dayend
if [ -n "$f_inday" ] && [[ $f_latestdate != ${g_line_array[0]} ]]
then
#echo "day end $f_date" 1>&2
# day end
f_close=${g_line_array[4]}
echo "$f_date,$f_open,$f_high,$f_low,$f_close,$f_volume"
f_inday=""
fi
# calc values if inday
if [ -n "$f_inday" ]
then
#echo "in day $f_date" 1>&2
# in day
# add volume
g_calc "$f_volume+${g_line_array[5]}"
f_volume=$g_calc_result
# look for higher high
g_num_is_higher ${g_line_array[2]} $f_high && f_high=${g_line_array[2]}
# look for lower low
g_num_is_lower ${g_line_array[3]} $f_low && f_low=${g_line_array[3]}
fi
# if newday
if [ -z "$f_inday" ]
then
#echo "day begin ${g_line_array[0]}" 1>&2
# day begin
f_inday=1
f_date=${g_line_array[0]}
f_latestdate=$f_date
f_open=${g_line_array[1]}
f_high=${g_line_array[2]}
f_low=${g_line_array[3]}
f_volume=${g_line_array[5]}
fi
done >>"$f_output_file"
}
function f_add_missing_ohlcv_intervals {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
@ -354,12 +478,16 @@ function f_add_missing_ohlcv_intervals {
[[ $f_interval = 1w ]] && return 0
[[ $f_histfile =~ \.1w\. ]] && return 0
local f_prev_date f_prev_vals f_curr_date f_curr_vals f_missing_date f_open f_high f_low f_close f_volume f_percent
local f_prev_date f_prev_vals f_curr_date f_curr_vals f_missing_date f_open f_high f_low f_close f_volume f_percent f_open f_counter
# go through csv per line
while IFS=',' read -r f_curr_date f_open f_high f_low f_close f_volume f_percent f_curr_vals
do
# Wegnn das vorherige Datum nicht leer ist
echo "$f_curr_date" 1>&2
# if prev date is not empty
if [ -z "$f_prev_date" ]
then
f_prev_date=$f_curr_date
@ -367,24 +495,38 @@ function f_add_missing_ohlcv_intervals {
continue
fi
while true
echo "$f_curr_date x" 1>&2
# only 10 interations to prevelt endless loop
f_counter=0
while [ $f_counter -lt 10 ]
#while true
do
((f_counter++))
echo "$f_curr_date xx $f_counter" 1>&2
# get second timestamps
f_prev_date_in_seconds=$(date -d"$f_prev_date" +%s)
f_curr_date_in_seconds=$(date -d"$f_curr_date" +%s)
# calculate/check the next timestamp from previ15ms
# and check for summer/winter time
# echo [ "$f_prev_date_in_seconds" -gt "$f_curr_date_in_seconds" ] # && break
# calculate/check the next timestamp from previous
# and check for summer/winter time in 4h or greater interval
if [ $f_interval -gt 3600 ]
then
# reduce an hour because of possible summer/winter time change
#g_calc "$f_curr_date_in_seconds - ($f_counter * $f_prev_date_in_seconds - 3600)"
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds - 3600"
else
#g_calc "$f_curr_date_in_seconds - $f_counter * $f_prev_date_in_seconds"
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds"
fi
if [ $g_calc_result -gt $f_interval ]
then
# calc missing timestamp in seconds
#f_curr_date_in_seconds=$(( f_prev_date_in_seconds + f_interval * f_counter ))
f_curr_date_in_seconds=$(( f_prev_date_in_seconds + f_interval ))
# and calculate next timestamp
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds"
@ -397,14 +539,27 @@ function f_add_missing_ohlcv_intervals {
f_missing_date=$(date -d"@$f_curr_date_in_seconds" +"%F")
fi
# prevent endless loop if something goes wrong (strange errors in 1d ohlcv!)
f_missing_date_in_seconds=$(date -d"$f_missing_date" +%s)
if [ $f_missing_date_in_seconds -lt $f_curr_date_in_seconds ]
then
[ -z "$f_curr_vals" ] && echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent"
[ -n "$f_curr_vals" ] && echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent,$f_curr_vals"
f_prev_date=$f_curr_date
break
fi
# write missing line
echo "$f_missing_date,$f_close,$f_close,$f_close,$f_close,0,0.00,$f_curr_vals"
[ -z "$f_curr_vals" ] && echo "$f_missing_date,$f_open,$f_open,$f_open,$f_open,0,0.00"
[ -n "$f_curr_vals" ] && echo "$f_missing_date,$f_open,$f_open,$f_open,$f_open,0,0.00,$f_curr_vals"
f_prev_date=$f_missing_date
else
f_prev_date=$f_curr_date
echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent,$f_curr_vals"
[ -z "$f_curr_vals" ] && echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent"
[ -n "$f_curr_vals" ] && echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent,$f_curr_vals"
break
fi
done
done < "$f_histfile" > $g_tmp/f_add_missing_ohlcv_intervals_result

View File

@ -235,6 +235,30 @@ services:
cpus: '0.5'
memory: 512M
dabo-fetch-coinmarketcapids:
build:
context: .
dockerfile: Dockerfile
restart: unless-stopped
user: 10000:10000
volumes:
- ./dabo:/dabo:ro
- ./strategies:/dabo/strategies:ro
- ./dabo-bot.conf:/dabo/dabo-bot.override.conf
# - ./watch-assets.csv:/dabo/watch-assets.csv
- ./data:/dabo/htdocs:rw
- ./home:/dabo/home:rw
- ./notify.local.conf:/usr/local/etc/notify.conf:ro
- /etc/localtime:/etc/localtime:ro
entrypoint: /dabo/fetch-coinmarketcapids.sh
cpu_shares: 128
deploy:
resources:
limits:
cpus: '0.5'
memory: 512M
# dabo-test:
# build:
# context: .