Compare commits

...

3 Commits

Author SHA1 Message Date
4f45f1c98a bugfixes, charts, economy-data 2024-07-27 17:44:29 +02:00
e06b575e46 niver code 2024-07-15 17:10:48 +02:00
2bdc7d3ab2 initial charts 2024-07-15 16:34:01 +02:00
21 changed files with 1028 additions and 213 deletions

1
.gitignore vendored
View File

@ -17,3 +17,4 @@
/genpw.sh
/watch-assets.csv*
/data/watch.html
/data/lightweight-charts.standalone.production.js

View File

@ -2,11 +2,12 @@
. /dabo/dabo-prep.sh
rm asset-histories/*.history.*.csv.indicators-calculating
rm -f asset-histories/*.history.*.csv.indicators-calculating
while true
do
sleep 300
sleep 900
g_echo_note "Next loop"
# Reload Config
. ../../dabo-bot.conf
. ../../dabo-bot.override.conf

View File

@ -7,6 +7,7 @@ sleep 12
while true
do
g_echo_note "Next loop"
# Reload Config
. ../../dabo-bot.conf
. ../../dabo-bot.override.conf

View File

@ -8,12 +8,13 @@ seconds=$2
while true
do
g_echo_note "Next loop"
# Reload Config
. ../../dabo-bot.conf
. ../../dabo-bot.override.conf
# Timestamp
export f_timestamp=$(g_date_print)
# get orders
# get candles and indicators
get_ohlcv-candles $interval
[ -n "$seconds" ] && sleeptime=$(( ( ($seconds - $(TZ=UTC printf "%(%s)T") % $seconds) % $seconds + 2 )))
#[[ $interval = 4h ]] &&

View File

@ -4,6 +4,7 @@
while true
do
g_echo_note "Next loop"
# Reload Config
. ../../dabo-bot.conf
. ../../dabo-bot.override.conf

View File

@ -4,6 +4,7 @@
while true
do
g_echo_note "Next loop"
# Reload Config
. ../../dabo-bot.conf
. ../../dabo-bot.override.conf
@ -11,6 +12,11 @@ do
export f_timestamp=$(g_date_print)
# get assets
get_symbols_ticker refetchonly || g_echo_warn "Error while refetching tickers from ${STOCK_EXCHANGE}"
#for sym_file in CCXT_SYMBOLS-$STOCK_EXCHANGE CCXT_SYMBOLS-$STOCK_EXCHANGE-by-volume CCXT_SYMBOLS-$STOCK_EXCHANGE-by-volume-trade
#do
# g_echo_note "$sym_file"
# cat "$sym_file"
#done
sleep 3
done

View File

@ -5,6 +5,7 @@
#sleep 1800
while true
do
g_echo_note "Next loop"
transactions_overview
sleep 3600
done

8
dabo/functions/charts.sh Normal file
View File

@ -0,0 +1,8 @@
function charts {
if ! find ../lightweight-charts.standalone.production.js -mtime -1 2>/dev/null | grep -q "lightweight-charts.standalone.production.js"
then
g_echo_note "Refreshing lightweight-charts.standalone.production.js from https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"
wget ${g_wget_opts} -q https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js -O ../lightweight-charts.standalone.production.js
touch ../lightweight-charts.standalone.production.js
fi
}

View File

@ -64,24 +64,24 @@ function currency_converter {
fi
# try direct pair
get_marketdata_yahoo_historic "${f_currency_target}-${f_currency}" "${f_currency_target}${f_currency}" || get_marketdata_yahoo_historic "${f_currency}-${f_currency_target}" "${f_currency}${f_currency_target}"
local f_histfile_default="${f_asset_histories}${f_currency_target}${f_currency}.history-raw.csv"
local f_histfile_yahoo="${f_asset_histories}${f_currency_target}${f_currency}.history-yahoo.csv"
get_marketdata_yahoo "${f_currency_target}-${f_currency}" "${f_currency_target}${f_currency}" || get_marketdata_yahoo "${f_currency}-${f_currency_target}" "${f_currency}${f_currency_target}"
local f_histfile_default="${f_asset_histories}${f_currency_target}${f_currency}.history"
local f_histfile_yahoo="${f_asset_histories}${f_currency_target}${f_currency}.history"
# reverse as backup
local f_histfile_default_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history-raw.csv"
local f_histfile_yahoo_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history-yahoo.csv"
local f_histfile_default_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history"
local f_histfile_yahoo_reverse="${f_asset_histories}${f_currency}${f_currency_target}.history"
# search for rate by date
for f_histfile in "$f_histfile_default" "$f_histfile_default_reverse" "$f_histfile_yahoo" "$f_histfile_yahoo_reverse"
do
# histfile has to exist
if [ -s "$f_histfile" ]
then
#if [ -s "${f_histfile}*.csv" ]
#then
# search for most precise date
f_line=$(egrep "^$f_currency_date_minute" "$f_histfile" | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_hour" "$f_histfile" | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_day" "$f_histfile" | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_month" "$f_histfile" | tail -n1)
f_line=$(egrep "^$f_currency_date_minute" "$f_histfile".*m.csv 2>/dev/null | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_hour" "$f_histfile".*h.csv 2>/dev/null | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_day" "$f_histfile".*d.csv 2>/dev/null | tail -n1)
[ -z "$f_line" ] && f_line=$(egrep "^$f_currency_date_month" "$f_histfile"*.csv 2>/dev/null | tail -n1)
[ -n "$f_line" ] && f_rate=$(echo "$f_line" | cut -d, -f2)
f_reverse=false
if [ -n "$f_rate" ]
@ -92,7 +92,7 @@ function currency_converter {
[ $f_currency_target = "EUR" ] && [ $f_currency = "USD" ] && f_reverse=true
[[ $f_line =~ ^$f_currency_date_hour ]] && break
fi
fi
#fi
done
# end if no rate found

View File

@ -6,9 +6,24 @@ function get_indicators_all {
local f_histfile f_symbol
shopt -s nullglob
find asset-histories -maxdepth 1 -name "ECONOMY-*.history.[0-5][5dhwm]*.csv" | sort | while read f_histfile
do
if [ -s "${f_histfile}.fetching" ] || [ -s "${f_histfile}.indicators-calculating" ]
then
g_echo_note "Fetching/Indicators-calculating already active on ${f_histfile}"
continue
fi
# find all hitory files
# do the job
printf '%(%Y-%m-%d %H:%M:%S)T' >"${f_histfile}.indicators-calculating"
get_indicators "${f_histfile}" ${f_last_intervals} && printf '%(%Y-%m-%d %H:%M:%S)T' >>"$f_histfile.indicators-calculated"
# add missing intervals for example from weekends from non-24h-assets like economic data - interval from filename
f_add_missing_ohlcv_intervals "${f_histfile}"
rm -f "${f_histfile}.indicators-calculating"
done
shopt -s nullglob
# find all history files of traded symbols
get_symbols_ticker
for f_symbol in "${f_symbols_array_trade[@]}"
do
@ -20,17 +35,13 @@ function get_indicators_all {
if [ -s "$f_histfile" ]
then
# check for already running jobs
if [ -s "${f_histfile}.fetching" ]
if [ -s "${f_histfile}.fetching" ] || [ -s "${f_histfile}.indicators-calculating" ]
then
g_echo_note "Fetching active on ${f_histfile}"
continue
fi
if [ -s "${f_histfile}.indicators-calculating" ]
then
g_echo_note "Indicators-Calculating already active on ${f_histfile}"
g_echo_note "Fetching/Indicators-calculating active on ${f_histfile}"
continue
fi
# do the job
printf '%(%Y-%m-%d %H:%M:%S)T' >"${f_histfile}.indicators-calculating"
get_indicators "${f_histfile}" ${f_last_intervals} && printf '%(%Y-%m-%d %H:%M:%S)T' >>"$f_histfile.indicators-calculated"
rm -f "${f_histfile}.indicators-calculating"
@ -38,6 +49,7 @@ function get_indicators_all {
fi
done
done
shopt +s nullglob
}
@ -47,6 +59,7 @@ function get_indicators {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_histfile="$1"
local f_last_intervals="$2"
local f_fill_missing_ohlcv_intervals=$3
local f_line
# history
@ -75,12 +88,13 @@ function get_indicators {
# check for missing percentage change
if [ -z "${v_csv_array_associative[change_${i}]}" ]
then
if ! [ $p -lt 0 ]
then
#if ! [ $p -lt 0 ]
#then
#echo "g_percentage-diff ${v_csv_array_associative[close_${p}]} ${v_csv_array_associative[close_${i}]}"
g_percentage-diff ${v_csv_array_associative[close_${p}]} ${v_csv_array_associative[close_${i}]} && f_change=1
#g_percentage-diff ${v_csv_array_associative[close_${p}]} ${v_csv_array_associative[close_${i}]} && f_change=1
g_percentage-diff ${v_csv_array_associative[open_${i}]} ${v_csv_array_associative[close_${i}]} && f_change=1
v_csv_array_associative[change_${i}]=${g_percentage_diff_result}
fi
#fi
fi
# ath (all-time-high) of present data

View File

@ -27,7 +27,8 @@ function get_levels_all {
f_price=${f_tickers_array[$f_symbol_in_array]}
# get relevant data max 30000 (+-15000) prices from current price
mapfile -t f_prices < <((cut -d, -f3,4,5 "asset-histories/${f_symbol}.history.1d.csv" "asset-histories/${f_symbol}.history.4h.csv" "asset-histories/${f_symbol}.history.15m.csv" ; echo $f_price) | sed 's/,/\n/g' | sort -rnu | grep -C 15000 "^${f_price}$")
#mapfile -t f_prices < <((cut -d, -f3,4,5 "asset-histories/${f_symbol}.history.1d.csv" "asset-histories/${f_symbol}.history.4h.csv" "asset-histories/${f_symbol}.history.15m.csv" ; echo $f_price) | sed 's/,/\n/g' | sort -rnu | grep -C 15000 "^${f_price}$")
mapfile -t f_prices < <((cut -d, -f5 "asset-histories/${f_symbol}.history.1d.csv" "asset-histories/${f_symbol}.history.4h.csv" "asset-histories/${f_symbol}.history.15m.csv" ; echo $f_price) | sed 's/,/\n/g' | sort -rnu | grep -C 15000 "^${f_price}$")
# calculate levels
get_levels && printf '%(%Y-%m-%d %H:%M:%S)T' >"${f_levelsfile}.levels-calculated"
@ -68,7 +69,7 @@ function get_levels {
# calc threshold (avarage of percentual price difference)
local f_price_range_percentage=${g_percentage_diff_result//-/}
g_calc "$f_price_range_percentage / $f_number_of_prices" || return 1
g_calc "$f_price_range_percentage / $f_number_of_prices / 100" || return 1
local f_threshold=$g_calc_result
# calc threshold in range (1/100 of percentual range)
g_calc "$f_price_range_percentage / 100" || return 1
@ -78,7 +79,9 @@ function get_levels {
#return 0
# how much occurencies / same prices have so show up for a defined level - percentage from number of prices
local f_min_occurrences=3
#local f_min_occurrences=3
g_calc "$f_number_of_prices / 100 * 0.2" || return 1
local f_min_occurrences=$g_calc_result
# Loop through the f_prices and compare each number with the next
@ -118,7 +121,8 @@ function get_levels {
if [ -n "$f_level_count" ]
then
# end of level
if [ "$f_level_count" -ge "$f_min_occurrences" ]
#if [ "$f_level_count" -ge "$f_min_occurrences" ]
if g_num_is_higher_equal $f_level_count $f_min_occurrences
then
g_calc "($f_level_prices)/$f_level_count"
f_levels+=("$g_calc_result")

View File

@ -2,18 +2,6 @@ function get_marketdata {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
get_marketdata_from_url YM=F DOW-JONES-INDEX
get_marketdata_from_url ES=F SP500-INDEX
get_marketdata_from_url NQ=F NASDAQ-INDEX
get_marketdata_from_url MFS=F MSCI-EAFE-INDEX
get_marketdata_from_url ZB=F 10-YEAR-TREASURY-YIELD-INDEX
get_marketdata_from_url MCL=F OIL-INVERTED-INDEX
get_marketdata_from_url GC=F GOLD-INDEX
get_marketdata_from_url IWDA.AS MSCI-WORLD-INDEX
get_marketdata_from_url IEO OIL-GAS-INVERTED-INDEX
get_marketdata_from_url KRE KRE-BANKING-INDEX
get_marketdata_from_url DX=F DXY-INVERTED-INDEX
get_marketdata_from_url EURUSD=X EURUSD
get_marketdata_from_url https://www.investing.com/economic-calendar/unemployment-rate-300/ US-UNEMPLOYMENT-INDEX
get_marketdata_from_url https://www.investing.com/economic-calendar/cpi-733 US-CONSUMER-PRICE-INDEX
get_marketdata_from_url https://www.investing.com/indices/fed-funds-composite-interest-rate-opinion US-FED-FEDERAL-FUNDS-RATE-INVERTED-INDEX
@ -56,7 +44,6 @@ function get_marketdata_from_url {
else
# default to Yahoo Finace Symbols via API
echo "wget ${g_wget_opts} -q -O - https://query1.finance.yahoo.com/v8/finance/chart/${f_url} | jq -r '.[].result[].meta.regularMarketPrice'" >MARKET_DATA_CMD
get_marketdata_yahoo_historic "$f_url" "$f_name"
fi
g_runcmd g_retrycmd sh MARKET_DATA_CMD >MARKET_DATA_CMD_OUT-${f_name}.tmp 2>MARKET_DATA_CMD_OUT-${f_name}.tmp.err

View File

@ -0,0 +1,131 @@
function get_marketdata_yahoo {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_item="$1"
local f_name="$2"
local f_timeframe="$3"
#local f_targetcsv="asset-histories/${f_name}.history-yahoo.csv"
local f_targetcsvtmp="${g_tmp}/${f_name}.history-yahoo.csv"
local f_targetjsontmp="${g_tmp}/${f_name}.history-yahoo.json"
rm -f "$f_targetcsvtmp" "$f_targetjsontmp"
[ -z "$f_timeframe" ] && f_timeframe="1d"
local f_targetcsv="asset-histories/${f_name}.history-yahoo.${f_timeframe}.csv"
[ "$f_timeframe" = "1w" ] && f_timeframe="1wk"
f_histfile_yahoo="$f_targetcsv"
# transform CCXT symbols to Yahoo symbol
if [[ $f_item =~ / ]]
then
# change / to -
f_item=${f_item////-}
# remove :* (:USDT in contract markets)
f_item=${f_item//:*}
# remove spaces
f_item=${f_item/ /}
fi
# USDT to USD
f_item=${f_item//USDT/USD}
# BUSD to USD
f_item=${f_item//BUSD/USD}
# special names of some economy data/indexes of yahoo finance
[[ $f_item = "DXY" ]] && f_item="DX=F"
[[ $f_item = "DOWJONES" ]] && f_item="YM=F"
[[ $f_item = "SP500" ]] && f_item="ES=F"
[[ $f_item = "NASDAQ" ]] && f_item="NQ=F"
[[ $f_item = "MSCIEAFE" ]] && f_item="MFS=F"
[[ $f_item = "MSCIWORLD" ]] && f_item="IWDA.AS"
[[ $f_item = "10YRTREASURY" ]] && f_item="ZB=F"
[[ $f_item = "OIL" ]] && f_item="MCL=F"
[[ $f_item = "GOLD" ]] && f_item="GC=F"
[[ $f_item = "OILGAS" ]] && f_item="IEO"
[[ $f_item = "USD-EUR" ]] && f_item="USDEUR=X"
[[ $f_item = "EUR-USD" ]] && f_item="EURUSD=X"
# special names of some coins/currencies of yahoo finance
[[ $f_item = "ARB-USD" ]] && f_item="ARB11841-USD"
[[ $f_item = "DUEL-USD" ]] && f_item="DUEL28868-USD"
[[ $f_item = "GMX-USD" ]] && f_item="GMX11857-USD"
[[ $f_item = "MEW-USD" ]] && f_item="MEW30126-USD"
[[ $f_item = "TAO-USD" ]] && f_item="TAO22974-USD"
[[ $f_item = "UNI-USD" ]] && f_item="UNI7083-USD"
[[ $f_item = "SUI-USD" ]] && f_item="SUI20947-USD"
[[ $f_item = "BLAZE-USD" ]] && f_item="BLAZE30179-USD"
[[ $f_item = "BEER-USD" ]] && f_item="BEER31337-USD"
[[ $f_item = "TAI-USD" ]] && f_item="TAI20605-USD"
[[ $f_item = "DEFI-USD" ]] && f_item="DEFI29200-USD"
[[ $f_item = "TON-USD" ]] && f_item="TON11419-USD"
[[ $f_item = "BRETT-USD" ]] && f_item="BRETT29743-USD"
[[ $f_item = "ADS-USD" ]] && f_item="%24ADS-USD"
[[ $f_item = "PT-USD" ]] && f_item="PT28582-USD"
# end if already failed the last 5 minutes
if [ -f "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" ]
then
find "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" -mmin +5 -delete
if [ -f "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" ]
then
return 1
fi
fi
# end if already exists and modified under given time
if [ -s "${f_targetcsv}" ] && find "${f_targetcsv}" -mmin -2 | grep -q "${f_targetcsv}"
then
return 0
fi
local f_sec
printf -v f_sec '%(%s)T'
# cleanup
rm -f "$f_targetcsvtmp" "${f_targetcsvtmp}".err ${f_targetjsontmp} "${f_targetjsontmp}".err
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1wk" ] || [ "$f_timeframe" = "1mo" ]
then
# Download historical data from yahoo
g_wget -O "${f_targetcsvtmp}" "https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history" 2>"${f_targetcsvtmp}".err
else
# Download data from yahoo
g_wget -O "${f_targetjsontmp}" "https://query1.finance.yahoo.com/v7/finance/chart/${f_item}?interval=${f_timeframe}&period2=${f_sec}" 2>"${f_targetjsontmp}".err
jq -r '.chart.result[0] as $result | range(0; $result.timestamp | length) | [$result.timestamp[.], $result.indicators.quote[0].open[.], $result.indicators.quote[0].high[.], $result.indicators.quote[0].low[.], $result.indicators.quote[0].close[.], $result.indicators.quote[0].volume[.]] | @csv' "${f_targetjsontmp}" >"${f_targetcsvtmp}.unixtime" 2>"${f_targetjsontmp}".err
# change unix time to human readable and fill unfilled lines, ignore lines not with 00 secolds (last line)
local date_time open high low close lastopen lasthigh lastlow lastclose volume
while IFS=, read -r timestamp open high low close volume; do
date_time=$(printf "%(%Y-%m-%d %H:%M:%S)T" $timestamp)
[ -z "$open" ] && open=$lastopen
[ -z "$high" ] && high=$lasthigh
[ -z "$low" ] && low=$lastlow
[ -z "$close" ] && close=$lastclose
[ -z "$volume" ] && volume=0
lastopen=$open
lasthigh=$high
lastlow=$low
lastclose=$close
echo "$date_time,$open,$high,$low,$close,$volume"
done < "${f_targetcsvtmp}.unixtime" | grep ":00," >${f_targetcsvtmp}
fi
# error if no csvfile available
if ! [ -s "${f_targetcsvtmp}" ]
then
mkdir -p FAILED_YAHOO
cat "${f_targetcsvtmp}.err" "${f_targetjsontmp}.err" > "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" 2>&1
f_get_marketdata_yahoo_error=$(cat "${f_targetcsvtmp}.err" "${f_targetjsontmp}.err" 2>/dev/null)
return 1
fi
# put the csvs together
if [ -s "${f_targetcsv}" ] && [ -s "${f_targetcsvtmp}" ]
then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsvtmp}" "${f_targetcsv}" | sort -k1,2 -t, -u >"${f_targetcsv}.tmp"
mv "${f_targetcsv}.tmp" "${f_targetcsv}"
else
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9].*,[0-9]" "${f_targetcsvtmp}" | sort -k1,2 -t, -u >"$f_targetcsv"
fi
}

View File

@ -1,102 +0,0 @@
function get_marketdata_yahoo_historic {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_item="$1"
local f_name="$2"
local f_timeframe="$3"
local f_targetcsv="asset-histories/${f_name}.history-yahoo.csv"
local f_targetcsvtmp="${g_tmp}/${f_name}.history-yahoo.csv"
[ -n "$f_timeframe" ] && f_targetcsv="${g_tmp}/${f_name}.history-yahoo.${f_timeframe}.csv"
f_histfile_yahoo="$f_targetcsv"
[ "$f_timeframe" = "1w" ] && f_timeframe="1wk"
[ -z "$f_timeframe" ] && f_timeframe="1d"
# transform CCXT symbols to Yahoo symbol
if [[ $f_item =~ / ]]
then
# change / to -
f_item=${f_item////-}
# remove :* (:USDT in contract markets)
f_item=${f_item//:*}
# remove spaces
f_item=${f_item/ /}
fi
# USDT to USD
f_item=${f_item//USDT/USD}
# BUSD to USD
f_item=${f_item//BUSD/USD}
# special names of some coins/currencies of yahoo finance
[[ $f_item = "USD-EUR" ]] && f_item="USDEUR=X"
[[ $f_item = "EUR-USD" ]] && f_item="EURUSD=X"
[[ $f_item = "ARB-USD" ]] && f_item="ARB11841-USD"
[[ $f_item = "DUEL-USD" ]] && f_item="DUEL28868-USD"
[[ $f_item = "GMX-USD" ]] && f_item="GMX11857-USD"
[[ $f_item = "MEW-USD" ]] && f_item="MEW30126-USD"
[[ $f_item = "TAO-USD" ]] && f_item="TAO22974-USD"
[[ $f_item = "UNI-USD" ]] && f_item="UNI7083-USD"
[[ $f_item = "SUI-USD" ]] && f_item="SUI20947-USD"
[[ $f_item = "BLAZE-USD" ]] && f_item="BLAZE30179-USD"
[[ $f_item = "BEER-USD" ]] && f_item="BEER31337-USD"
[[ $f_item = "TAI-USD" ]] && f_item="TAI20605-USD"
[[ $f_item = "DEFI-USD" ]] && f_item="DEFI29200-USD"
[[ $f_item = "TON-USD" ]] && f_item="TON11419-USD"
[[ $f_item = "BRETT-USD" ]] && f_item="BRETT29743-USD"
[[ $f_item = "ADS-USD" ]] && f_item="%24ADS-USD"
[[ $f_item = "PT-USD" ]] && f_item="PT28582-USD"
# end if already failed the last hour
if [ -f "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" ]
then
find "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" -mmin +60 -delete
if [ -f "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD" ]
then
#g_echo_note "${f_targetcsv} already failed to downloaded within last hour"
return 1
fi
fi
# end if already exists and modified under given time
if [ -s "${f_targetcsv}" ] && find "${f_targetcsv}" -mmin -2 | grep -q "${f_targetcsv}"
then
#g_echo_note "${f_targetcsv} has already been downloaded within 2 minutes"
return 0
fi
local f_sec
printf -v f_sec '%(%s)T'
# cleanup
rm -f "$f_targetcsvtmp" "${f_targetcsvtmp}".err
# Download historical data from yahoo
#g_echo_note "Fetching Yahoo-Historical data of $f_name"
g_wget -O ${f_targetcsvtmp} "https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history" 2>"${f_targetcsvtmp}".err
if [ -s "${f_targetcsv}" ] && [ -s "${f_targetcsvtmp}" ]
then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9],[0-9]" "${f_targetcsvtmp}" "${f_targetcsv}" | sort -u >"${f_targetcsv}.tmp"
mv "${f_targetcsv}.tmp" "${f_targetcsv}"
elif [ -s "${f_targetcsvtmp}" ]
then
egrep -h "^[1-9][0-9][0-9][0-9]-[0-1][0-9]-[0-9][0-9],[0-9]" "${f_targetcsvtmp}" >"$f_targetcsv"
else
# report error
# g_echo_note "Fetching historical data of $f_name from Yahoo failed!
#CMD:
#wget -q -O ${f_targetcsvtmp} ${g_wget_opts} \"https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history\"
#
#OUT:
#$(cat "${f_targetcsvtmp}")
#
#ERR:
#$(cat "${f_targetcsvtmp}".err)
#"
mkdir -p FAILED_YAHOO
mv "${f_targetcsvtmp}.err" "FAILED_YAHOO/${f_name}_HISTORIC_DOWNLOAD"
return 1
fi
}

View File

@ -2,10 +2,34 @@ function get_ohlcv-candles {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_asset f_histfile f_symbol f_timeframe
local f_histfile f_symbol f_timeframe f_1h_histfile
local f_timeframes="1w 1d 4h 1h 15m 5m"
[ -n $1 ] && f_timeframes=$1
# fetch economy candles from yahoo finance
local f_eco_assets="DXY DOWJONES SP500 NASDAQ MSCIEAFE 10YRTREASURY OIL GOLD MSCIWORLD OILGAS KRE EUR-USD"
local f_eco_asset
for f_eco_asset in $f_eco_assets
do
for f_timeframe in $f_timeframes
do
echo "=== Fetching/Refreshing $f_eco_asset $f_timeframe ==="
f_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.${f_timeframe}.csv"
# 4h timefrage does not exist on yahoo finance so calc from 1h
if [ "$f_timeframe" = "4h" ]
then
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile"
else
get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}"
fi
# refresh latest indicators
[ -s "${f_histfile}" ] && get_indicators "${f_histfile}" 900
done
done
# fetch crypto candles
get_symbols_ticker
for f_symbol in BTC/$CURRENCY "${f_symbols_array_trade[@]}"
do
@ -30,11 +54,12 @@ function get_ohlcv-candles {
get_ohlcv-candle "$f_symbol" $f_timeframe "${f_histfile}" && printf '%(%Y-%m-%d %H:%M:%S)T' >>"$f_histfile.fetched"
# refresh latest indicators
get_indicators "${f_histfile}" 801
get_indicators "${f_histfile}" 900
rm -f "${f_histfile}.fetching"
done
done
}
@ -42,17 +67,29 @@ function get_ohlcv-candle {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_yahoo f_date f_unit_date f_data f_data_array f_data_unit f_open f_high f_low f_close f_volume f_last_unit_date f_last_unit_close
local f_symbol="$1"
local f_timeframe=$2
local f_histfile="$3"
local f_asset=$4
unset f_histfile_yahoo
#[ -n "$f_asset" ] && f_yahoo=1a
#local f_histfile_week="$4"
local f_yahoo f_date f_unit_date f_data f_data_array f_data_unit f_open f_high f_low f_close f_volume
# fetch >=1d from yahoo finance
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1w" ] || [ "$f_timeframe" = "1mo" ]
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1w" ] || [ "$f_timeframe" = "1mo" ] || [ -n "$f_asset" ]
then
get_marketdata_yahoo_historic "$f_symbol" "$f_asset" $f_timeframe && f_yahoo=1
f_yahoo=1
if [ -z "$f_asset" ]
then
f_asset=${f_symbol///}
f_asset=${f_asset//:*}
fi
if ! get_marketdata_yahoo "$f_symbol" "$f_asset" $f_timeframe
then
g_echo_error "$f_get_marketdata_yahoo_error"
return 1
fi
fi
# fetch OHLCV data (loop because of multiple chunks on exchanges)
@ -76,16 +113,13 @@ function get_ohlcv-candle {
g_array $f_data f_data_ref +
else
# from yahoo finance
g_array $f_histfile_yahoo f_data_ref
g_array "$f_histfile_yahoo" f_data_ref
fi
f_data_array=("${f_data_ref[@]}")
#echo "+++ $f_data"
#echo "--- ${f_data_array[-1]}"
# check if last data already in history file and end if already present
g_array ${f_data_array[-1]} f_last_data_unit_ref ,
g_array "${f_data_array[-1]}" f_last_data_unit_ref ,
[ -z "$f_yahoo" ] && printf -v f_last_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_last_data_unit_ref[0]::-3}
[ -n "$f_yahoo" ] && f_last_unit_date="${f_last_data_unit_ref[0]}"
@ -96,8 +130,8 @@ function get_ohlcv-candle {
for f_data_unit in "${f_data_array[@]}"
do
# use array for each unit and assignt values to vars
g_array $f_data_unit f_data_unit_ref ,
# use array for each unit and assigned values to vars
g_array "$f_data_unit" f_data_unit_ref ,
[ -z "$f_yahoo" ] && printf -v f_unit_date '%(%Y-%m-%d %H:%M:%S)T' ${f_data_unit_ref[0]::-3}
[ -n "$f_yahoo" ] && f_unit_date="${f_last_data_unit_ref[0]}"
@ -105,16 +139,22 @@ function get_ohlcv-candle {
[ -s "$f_histfile" ] && grep -q ^"$f_unit_date" "$f_histfile" && continue
# define field vars and convert exponential number (for example 9.881e-05) to "normal" notation
f_open=$f_last_unit_close
if [ -z "$f_open" ]
then
f_open=${f_data_unit_ref[1]}
g_num_exponential2normal "$f_open" && f_open=$g_num_exponential2normal_result
fi
g_num_exponential2normal "$f_open" && f_open=$g_num_exponential2normal_resul
f_high=${f_data_unit_ref[2]}
g_num_exponential2normal "$f_high" && f_high=$g_num_exponential2normal_result
f_low=${f_data_unit_ref[3]}
g_num_exponential2normal "$f_low" && f_low=$g_num_exponential2normal_result
f_close=${f_data_unit_ref[4]}
g_num_exponential2normal "$f_close" && f_close=$g_num_exponential2normal_result
f_last_unit_close=$f_close
f_volume=${f_data_unit_ref[5]}
[ -n "$f_yahoo" ] && f_volume=${f_data_unit_ref[6]}
# yahoo historic volume col 6
[ -n "${f_data_unit_ref[6]}" ] && f_volume=${f_data_unit_ref[6]}
g_num_exponential2normal "$f_volume" && f_volume=$g_num_exponential2normal_result
# check date for valid date
@ -127,8 +167,8 @@ function get_ohlcv-candle {
# check vars for valid numbers
if ! g_num_valid_number "$f_open" "$f_high" "$f_low" "$f_close" "$f_volume"
then
echo "$f_open $f_high $f_low $f_close $f_volume"
g_echo_warn "Data in \"$f_data_unit\" seems to be invalid @$f_histfile:$f_unit_date"
g_echo_warn "Data in \"$f_data_unit\" seems to be invalid @$f_histfile:$f_unit_date
$f_open $f_high $f_low $f_close $f_volume"
break
fi
@ -161,10 +201,13 @@ function get_ohlcv-candle-latest {
#local f_histfile_week="$3"
# find latest time which is not fetched already
if [ -s "$f_histfile" ]
[ -s "$f_histfile" ] && local f_last_line=$(tail -n1 "$f_histfile" | grep ^[0-9] | cut -d, -f1,5)
if [ -n "$f_last_line" ]
then
# get latest date from histfile if it exists
f_since=$(date -d "$(grep ^[0-9] "$f_histfile" | tail -n1 | cut -d, -f1)" +%s000)
local f_last_line=$(tail -n1 "$f_histfile" | grep ^[0-9] | cut -d, -f1,5)
f_since=$(date -d "${f_last_line/,*/}" +%s000)
f_last_unit_close=${f_last_line/*,/}
else
# if hist does not exist
# get week to find the oldest point in time available in exchange
@ -180,3 +223,180 @@ function get_ohlcv-candle-latest {
printf -v f_since_date '%(%Y-%m-%d)T\n' ${f_since::-3}
}
function convert_ohlcv_1h_to_4h {
local f_input_file="$1"
local f_output_file="$2"
local f_high=0
local f_volume=0
local f_tz f_hour f_lastdate f_currentdate f_latest_date f_go_on f_1h_open f_1h_high f_1h_low f_1h_close f_1h_volume f_date f_open f_low f_close f_4hintervals f_rest
# possibloe 4h intervals
local f_4hintervals0='^0$|^4$|^8$|^12$|^16$|^20$'
local f_4hintervals1='^1$|^5$|^9$|^13$|^17$|^21$'
local f_4hintervals2='^2$|^6$|^10$|^14$|^18$|^22$'
local f_4hintervals3='^3$|^7$|^11$|^15$|^19$|^23$'
# check for already converted lines
if [ -s "$f_output_file" ]
then
f_latest_date=$(tail -n1 "$f_output_file" | cut -d, -f1)
else
f_go_on=1
fi
# Read the input file line by line
while IFS=',' read -r f_date f_1h_open f_1h_high f_1h_low f_1h_close f_1h_volume f_rest
do
# check for already converted lines
if [[ $f_latest_date = $f_date ]]
then
f_go_on=1
continue
fi
[ -z "$f_go_on" ] && continue
f_currentdate="${f_date:0:13}"
# define intervals by considering local/servers TZ with summer and winter season
f_hour=${f_date:11:2}
f_hour=${f_hour#0}
f_tz=$(date -d "$f_currentdate" +%:z)
f_tz=${f_tz//:*}
f_tz=${f_tz#+}
f_tz=${f_tz#-}
f_tz=${f_tz#0}
f_4hintervals=$f_4hintervals0
[[ $f_tz =~ ^1$|^5$|^9$|^13$ ]] && f_4hintervals=$f_4hintervals1
[[ $f_tz =~ ^2$|^6$|^10$|^14$ ]] && f_4hintervals=$f_4hintervals2
[[ $f_tz =~ ^3$|^7$|^11$ ]] && f_4hintervals=$f_4hintervals3
# is there a new 4h interval
if [[ $f_hour =~ $f_4hintervals ]]
then
# If it's not the first loop, print the previous 4h interval before cleaning the variables
#if [ -n "$f_lastdate" ]
if [ -n "$f_open" ]
then
echo "${f_lastdate}:00:00,$f_open,$f_high,$f_low,$f_close,$f_volume"
fi
# reset the variables for the new 4h interval
f_low=""
f_high=0
f_lastdate=$f_currentdate
f_volume=0
# set open for next interval to close from last interval
f_open=$f_close
fi
# set close to 1h close
f_close=$f_1h_close
# check if the current value is higher or lower than the current high/low
g_num_is_higher_equal $f_1h_high $f_high && f_high=$f_1h_high
[ -z "$f_low" ] && f_low=$f_1h_low
g_num_is_lower_equal $f_1h_low $f_low && f_low=$f_1h_low
# add volume to the current 4h volume
g_calc "$f_volume + $f_1h_volume"
f_volume=$g_calc_result
done < "$f_input_file" >>"$f_output_file"
}
function f_add_missing_ohlcv_intervals {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
local f_histfile="$1"
local f_interval="$2"
[[ $f_interval = 5m ]] && f_interval=300
[[ $f_interval = 15m ]] && f_interval=900
[[ $f_interval = 1h ]] && f_interval=3600
[[ $f_interval = 4h ]] && f_interval=14400
[[ $f_interval = 1d ]] && f_interval=86400
# get interval from filename if not given
if [ -z "$f_interval" ]
then
[[ $f_histfile =~ \.5m\. ]] && f_interval=300
[[ $f_histfile =~ \.15m\. ]] && f_interval=900
[[ $f_histfile =~ \.1h\. ]] && f_interval=3600
[[ $f_histfile =~ \.4h\. ]] && f_interval=14400
[[ $f_histfile =~ \.1d\. ]] && f_interval=86400
fi
# 1w should be complete in every case
[[ $f_interval = 1w ]] && return 0
[[ $f_histfile =~ \.1w\. ]] && return 0
local f_prev_date f_prev_vals f_curr_date f_curr_vals f_missing_date f_open f_high f_low f_close f_volume f_percent
# go through csv per line
while IFS=',' read -r f_curr_date f_open f_high f_low f_close f_volume f_percent f_curr_vals
do
# Wegnn das vorherige Datum nicht leer ist
if [ -z "$f_prev_date" ]
then
f_prev_date=$f_curr_date
echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent,$f_curr_vals"
continue
fi
while true
do
# get second timestamps
f_prev_date_in_seconds=$(date -d"$f_prev_date" +%s)
f_curr_date_in_seconds=$(date -d"$f_curr_date" +%s)
# calculate/check the next timestamp from previ15ms
# and check for summer/winter time
if [ $f_interval -gt 3600 ]
then
# reduce an hour because of possible summer/winter time change
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds - 3600"
else
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds"
fi
if [ $g_calc_result -gt $f_interval ]
then
# calc missing timestamp in seconds
f_curr_date_in_seconds=$(( f_prev_date_in_seconds + f_interval ))
# and calculate next timestamp
g_calc "$f_curr_date_in_seconds - $f_prev_date_in_seconds"
# change date format if day or week
if [ $f_interval -lt 86400 ]
then
f_missing_date=$(date -d"@$f_curr_date_in_seconds" +"%F %T")
else
f_missing_date=$(date -d"@$f_curr_date_in_seconds" +"%F")
fi
# write missing line
echo "$f_missing_date,$f_close,$f_close,$f_close,$f_close,0,0.00,$f_curr_vals"
f_prev_date=$f_missing_date
else
f_prev_date=$f_curr_date
echo "$f_curr_date,$f_open,$f_high,$f_low,$f_close,$f_volume,$f_percent,$f_curr_vals"
break
fi
done
done < "$f_histfile" > $g_tmp/f_add_missing_ohlcv_intervals_result
# replace old file with new if they are different
if ! cmp --silent "$f_histfile" "$g_tmp/f_add_missing_ohlcv_intervals_result"
then
g_echo_note "Replacing $f_histfile"
#diff "$g_tmp/f_add_missing_ohlcv_intervals_result" "$f_histfile"
cat "$g_tmp/f_add_missing_ohlcv_intervals_result" >"$f_histfile"
fi
}

View File

@ -95,10 +95,16 @@ function get_symbols_ticker {
f_symbols=${f_symbols//$'\n'/'+'}
if [ -z "$f_symbols" ]
then
if [ "$f_fetch" = "retry" ]
then
g_echo_warn "Could not get symbols list - empty"
return 1
fi
sleep 5
get_symbols_ticker retry || return 1
g_echo_note "Could not get symbols list - empty - retrying"
fi
}

View File

@ -33,12 +33,9 @@ function get_transactions {
touch --time=mtime -t $(date -d "now -1 day" +%Y%m%d%H%M) TRANSACTIONS-TIMESTAMP
# go through symbols
#local f_orig_IFS=$IFS
#IFS=+
#for f_symbol in $f_symbols
for f_symbol in "${f_symbols_array[@]}"
do
f_symbol=${f_symbol//,*}
#f_symbol=${f_symbol//,*}
# binance does not allow derivate trading in many countries so ignore because of 400-Error
[[ $f_symbol =~ : ]] && [[ $f_exchange = binance ]] && continue
@ -60,7 +57,7 @@ function get_transactions {
#[ -z "$f_ccxt_result" ] && continue
# get f_asset+f_currency from symbol (BTC/USDT)
g_array $f_symbol f_symbol_array /
g_array "$f_symbol" f_symbol_array /
f_asset=${f_symbol_array[0]}
f_currency=${f_symbol_array[1]}

View File

@ -78,7 +78,7 @@ function transactions_overview {
f_fee_currency=$f_currency
else
#g_echo_warn "!!!!!! Could not convert currency $f_fee_currency to $f_currency"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Could not convert currency $f_fee_currency to $f_currency"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Could not convert currency $f_fee_currency to $f_currency" 1>&2
continue
fi
fi
@ -178,14 +178,14 @@ function transactions_overview {
if [ $f_currency_spent = 0 ]
then
#g_echo_warn "!!!!!! Sell never buyed!? Spent currency on $f_asset is 0"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Sell never buyed!? Spent currency on $f_asset is 0"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Sell never buyed!? Spent currency on $f_asset is 0" 1>&2
continue
fi
# if sell wahats not exists!?
if [ $f_asset_quantity = 0 ]
then
#g_echo_warn "!!!!!! Sell never buyed!? Buyed asset $f_asset is 0"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Sell never buyed!? Buyed asset $f_asset is 0"
echo "$f_date,$f_exchange,$f_type,$f_asset,$f_asset_amount,$f_currency,$f_currency_amount,$f_one_year_ago,$f_currency_spent,$f_asset_quantity,$f_result,$f_sell_result,$f_tax_type,$f_taxable,$f_currency_amount_eur,$f_result_eur,$f_sell_result_eur,$f_asset_quantity_remaining,$f_note,Sell never buyed!? Buyed asset $f_asset is 0" 1>&2
continue
fi
fi
@ -217,12 +217,12 @@ function transactions_overview {
# Alterntively check for remaining dust only to find end of trade and calculate PNL
if [ ${g_calc_result} -eq 0 ]
then
currency_converter $f_asset_quantity_remaining $f_asset $f_currency "${f_date}"
currency_converter $f_asset_quantity_remaining $f_asset $f_currency "${f_date}" || g_echo_warn "Error converting currency"
f_currency_remaining=$f_currency_converter_result
if g_num_is_between $f_currency_remaining -5 5
then
f_dust=$f_currency_remaining
currency_converter $f_asset_quantity_remaining $f_asset EUR "${f_date}"
currency_converter $f_asset_quantity_remaining $f_asset EUR "${f_date}" || g_echo_warn "Error converting currency to EUR"
f_dust_eur=$f_currency_converter_result
g_echo_note "Quantity ($f_asset_quantity $f_asset - $f_dust (USD) looks like dust - Ending trade" >>ALL_TRANSACTIONS_OVERVIEW.log
f_note="$f_asset_quantity $f_asset - $f_dust (USD) looks like dust - Ending trade"

View File

@ -2,11 +2,9 @@ function webpage {
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
charts
webpage_transactions
# Get charts.css
[ -e ../charts.min.css ] || wget ${g_wget_opts} -q https://raw.githubusercontent.com/ChartsCSS/charts.css/main/dist/charts.min.css -O ../charts.min.css
# create status webpage
echo "<html>
<head>
@ -193,34 +191,6 @@ function webpage {
echo "<a href=\"botdata/MARKET_PERFORMANCE.csv\">Complete list</a>" >>../index.html.tmp
# echo '<h2>Available Assets and histories</h2>' >>../index.html.tmp
# echo "<table width='100%'><tr>" >>../index.html.tmp
# echo "<tr><td>Asset</td><td>Date</td><td>Price ${CURRENCY}</td><td>24h change (USD)</td></tr>" >>../index.html.tmp
# local asset
# cat ASSETS | egrep -v "${BLACKLIST}" | sort | while read asset
# do
# [ -s asset-histories/${asset}.history.csv ] || continue
# echo "<td><a href=\"botdata/asset-histories/${asset}.history.csv\">${asset}</a> <a href=\"https://www.coingecko.com/de/munze/$(egrep -i ^$(echo ${asset} | sed "s/${CURRENCY}$//"), COINGECKO_IDS | cut -d, -f2 )\">🔗</a></td>" >>../index.html.tmp
# kcurrency=$(echo ${asset} | sed "s/${CURRENCY}//")
# #get_rate_percentage_min_before_and_now ${kcurrency} ${CURRENCY} 1440
# local assetin=($(cat asset-histories/${asset}.history.csv | egrep -v "0.00000000$" | tail -n2 | head -n1 | sed 's/,/ /g'))
# echo "<td>${assetin[0]} ${assetin[1]}</td>
# <td>${CURRENCY} ${assetin[2]}</td>
# <td> $(grep "^$kcurrency," ASSET_PRICE_CHANGE_PERCENTAGE_24H | cut -d, -f2)%</td>
# </tr><tr><td colspan='4'>" >>../index.html.tmp
# echo "<details><summary>${asset} Charts</summary>" >>../index.html.tmp
# echo "Price, EMA, Levels" >>../index.html.tmp
# genchart asset-histories/${asset}.history.csv 50 2,25,26,27,28,29,30,31,32,33,34,35,5,36,37,38 green,DarkSlateGrey,DarkSlateGrey,Gold,DarkOrange,DarkOrange,GoldenRod,GoldenRod,GoldenRod,GoldenRod,DarkOrange,DarkOrange,MidnightBlue,Indigo,DarkSlateBlue,DodgerBlue,DeepSkyBlue >>../index.html.tmp
# echo "MACD" >>../index.html.tmp
# genchart asset-histories/${asset}.history.csv 50 8,6,7 >>../index.html.tmp
# echo "RSIs" >>../index.html.tmp
# genchart asset-histories/${asset}.history.csv 50 10,11,12,14,15,16,17,13 >>../index.html.tmp
# echo "</details></td></tr>" >>../index.html.tmp
#
# done
# echo "</table>" >>../index.html.tmp
# color magic
cat ../index.html.tmp | perl -pe 's/ (\-[0-9]+\.[0-9]+\%)/<font color=red>$1<\/font>/g; s/ ([0-9]+\.[0-9]+\%)/<font color=green>$1<\/font>/g;' >../index.html

23
data/charts.html Normal file
View File

@ -0,0 +1,23 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<meta
name="viewport"
content="width=device-width,initial-scale=1.0,maximum-scale=1.0,minimum-scale=1.0"
/>
<title>Charts</title>
<script type="text/javascript" src="lightweight-charts.standalone.production.js"></script>
<style>
body {
padding: 0;
margin: 0;
}
</style>
</head>
<body>
<div id="container" style="position: absolute; width: 100%; height: 100%"></div>
<script type="text/javascript" src="charts.js"></script>
</body>
</html>

545
data/charts.js Normal file
View File

@ -0,0 +1,545 @@
const urlParams = new URLSearchParams(window.location.search);
const symbol = urlParams.get('symbol');
const time = urlParams.get('time');
const heightrsimacdchart = 100
function timeToLocal(originalTime) {
const d = new Date(originalTime * 1000);
return Date.UTC(d.getFullYear(), d.getMonth(), d.getDate(), d.getHours(), d.getMinutes(), d.getSeconds(), d.getMilliseconds()) / 1000;
}
function parseCSV(data) {
const rows = data.split("\n");
const result = [];
let start = Math.max(rows.length - 1000, 0);
let lastElements = rows.slice(start)
for (let i = start; i < rows.length; i++) {
const cols = rows[i].split(",");
if (cols.length >= 23 && cols.every(element => element !== undefined && element !== null)) { // check for existing lines
// parse the date so seconds since 1970
cols[0] = Date.parse(cols[0])/1000,result.push(cols);
cols[0] = timeToLocal(cols[0]);
// coloring for MACD-Histogram
if (cols[20] < 0) {
cols[100] = "orange";
if (cols[23] > 20) {
cols[100] = "red";
}
}
else {
cols[100] = "lightgreen";
if (cols[23] > 20) {
cols[100] = "green";
}
}
}
else {
console.log("invalid line on linenr " + i + ": " +rows[i]);
}
}
return result;
}
function getCrosshairDataPoint(series, param) {
if (!param.time) {
return null;
}
const dataPoint = param.seriesData.get(series);
return dataPoint || null;
}
function syncCrosshair(chart, series, dataPoint) {
if (dataPoint) {
chart.setCrosshairPosition(dataPoint.value, dataPoint.time, series);
return;
}
chart.clearCrosshairPosition();
}
// Create the Lightweight Chart within the container element
const chart = LightweightCharts.createChart(document.getElementById('container'),
{
rightPriceScale: {
minimumWidth: 100,
borderVisible: false
},
height: 500,
crosshair: {
mode: 0,
},
timeScale: {
timeVisible: true,
secondsVisible: false,
},
layout: {
background: {
type: 'solid',
color: '#222',
},
textColor: '#DDD',
},
grid: {
vertLines: { color: '#444' },
horzLines: { color: '#444' },
},
});
chart.applyOptions({
watermark: {
visible: true,
fontSize: 18,
horzAlign: 'top',
vertAlign: 'left',
color: '#DDD',
text: symbol + " " + time,
}
});
// define chart
const candleSeries = chart.addCandlestickSeries({upColor: 'green', wickUpColor: 'green', downColor: 'red', wickDownColor: 'red', borderVisible: false,});
const lineSeriesEMA12 = chart.addLineSeries({ color: 'red', lineWidth: 1, priceLineVisible: false, title: 'EMA12'});
const lineSeriesEMA26 = chart.addLineSeries({ color: 'pink', lineWidth: 1, lineStyle: 2, priceLineVisible: false, title: 'EMA26'});
const lineSeriesEMA50 = chart.addLineSeries({ color: 'cyan', lineWidth: 1, priceLineVisible: false, title: 'EMA50'});
const lineSeriesEMA100 = chart.addLineSeries({ color: 'yellow', lineWidth: 1, priceLineVisible: false, title: 'EMA100'});
const lineSeriesEMA200 = chart.addLineSeries({ color: 'white', lineWidth: 1, priceLineVisible: false, title: 'EMA200'});
const lineSeriesEMA400 = chart.addLineSeries({ color: 'orange', lineWidth: 1, priceLineVisible: false, title: 'EMA400'});
const lineSeriesEMA800 = chart.addLineSeries({ color: 'purple', lineWidth: 1, priceLineVisible: false, title: 'EMA800'});
// RSI Chart
const chartrsi = LightweightCharts.createChart(document.getElementById("container"),
{
rightPriceScale: {
minimumWidth: 100,
borderVisible: false
},
height: heightrsimacdchart,
timeScale: {
visible: false,
},
layout: {
background: {
type: 'solid',
color: '#222',
},
textColor: '#DDD',
},
grid: {
vertLines: { color: '#444' },
horzLines: { color: '#444' },
},
});
chartrsi.applyOptions({
watermark: {
visible: true,
fontSize: 18,
horzAlign: 'top',
vertAlign: 'left',
color: '#DDD',
text: 'RSI 5,14,21',
}
});
const lineSeriesRSI5 = chartrsi.addLineSeries({ color: 'orange', lineWidth: 1, lineStyle: 2, priceLineVisible: false, title: 'RSI5'});
const lineSeriesRSI14 = chartrsi.addLineSeries({ color: 'yellow', lineWidth: 2, priceLineVisible: false, title: 'RSI14'});
const lineSeriesRSI21 = chartrsi.addLineSeries({ color: 'lightgreen', lineWidth: 1, lineStyle: 2, priceLineVisible: false, title: 'RSI21'});
// MACD Chart
const chartmacd = LightweightCharts.createChart(document.getElementById("container"),
{
rightPriceScale: {
minimumWidth: 100,
borderVisible: false
},
height: heightrsimacdchart,
timeScale: {
timeVisible: true,
secondsVisible: false,
},
layout: {
background: {
type: 'solid',
color: '#222',
},
textColor: '#DDD',
},
grid: {
vertLines: { color: '#444' },
horzLines: { color: '#444' },
},
});
chartmacd.applyOptions({
watermark: {
visible: true,
fontSize: 18,
horzAlign: 'top',
vertAlign: 'left',
color: '#DDD',
text: 'MACD 12 26',
}
});
const lineSeriesMACD = chartmacd.addLineSeries({ color: 'blue', lineWidth: 1, lineStyle: 0, priceLineVisible: false, title: 'MACD'});
const lineSeriesMACDSignal = chartmacd.addLineSeries({ color: 'orange', lineWidth: 1, lineStyle: 0, priceLineVisible: false, title: 'Signal'});
const histogramSeriesMACD = chartmacd.addHistogramSeries({
priceFormat: {
type: 'volume',
color: 'orange',
},
priceLineVisible: false,
});
fetch("/botdata/asset-histories/" + symbol + ".history." + time + ".csv", { cache: 'no-store' })
.then(response => response.text())
.then(data => {
const parsedData = parseCSV(data);
// OHLC Data
const bars = parsedData.map(item => ({
time: item[0],
open: item[1],
high: item[2],
low: item[3],
close: item[4]
}));
candleSeries.setData(bars);
// EMA Data
candleSeries.setData(bars);
const lineSeriesEMA12Data = parsedData.map(item => ({
time: item[0],
value: item[8]
}));
lineSeriesEMA12.setData(lineSeriesEMA12Data);
const lineSeriesEMA26Data = parsedData.map(item => ({
time: item[0],
value: item[9]
}));
lineSeriesEMA26.setData(lineSeriesEMA26Data);
const lineSeriesEMA50Data = parsedData.map(item => ({
time: item[0],
value: item[10]
}));
lineSeriesEMA50.setData(lineSeriesEMA50Data);
const lineSeriesEMA100Data = parsedData.map(item => ({
time: item[0],
value: item[11]
}));
lineSeriesEMA100.setData(lineSeriesEMA100Data);
const lineSeriesEMA200Data = parsedData.map(item => ({
time: item[0],
value: item[12]
}));
lineSeriesEMA200.setData(lineSeriesEMA200Data);
const lineSeriesEMA400Data = parsedData.map(item => ({
time: item[0],
value: item[13]
}));
lineSeriesEMA400.setData(lineSeriesEMA400Data);
const lineSeriesEMA800Data = parsedData.map(item => ({
time: item[0],
value: item[14]
}));
lineSeriesEMA800.setData(lineSeriesEMA800Data);
// RSI Data
const lineSeriesRSI5Data = parsedData.map(item => ({
time: item[0],
value: item[15]
}));
lineSeriesRSI5.setData(lineSeriesRSI5Data);
const lineSeriesRSI14Data = parsedData.map(item => ({
time: item[0],
value: item[16]
}));
lineSeriesRSI14.setData(lineSeriesRSI14Data);
const lineSeriesRSI21Data = parsedData.map(item => ({
time: item[0],
value: item[17]
}));
lineSeriesRSI21.setData(lineSeriesRSI21Data);
// MACD Data
const lineSeriesMACDData = parsedData.map(item => ({
time: item[0],
value: item[18]
}));
lineSeriesMACD.setData(lineSeriesMACDData);
const lineSeriesMACDSignalData = parsedData.map(item => ({
time: item[0],
value: item[19]
}));
lineSeriesMACDSignal.setData(lineSeriesMACDSignalData);
const histogramSeriesMACDData = parsedData.map(item => ({
time: item[0],
value: item[20],
color: item[100]
}));
histogramSeriesMACD.setData(histogramSeriesMACDData);
});
// Lines for price levels
fetch("/botdata/asset-histories/" + symbol + ".history.csv.levels", { cache: 'no-store' })
.then(response => response.text())
.then(text => {
const levels = text.split('\n');
levels.forEach(function(level) {
candleSeries.createPriceLine({price: level, color: "blue", lineWidth: 0.5, lineStyle: 3, axisLabelVisible: true, title: 'Level'});
});
});
// Lines for RSIs
lineSeriesRSI14.createPriceLine({price: 45, color: "green", lineWidth: 0.5, lineStyle: 3, axisLabelVisible: false});
lineSeriesRSI14.createPriceLine({price: 50, color: "lightyellow", lineWidth: 0.5, lineStyle: 3, axisLabelVisible: false});
lineSeriesRSI14.createPriceLine({price: 55, color: "red", lineWidth: 0.5, lineStyle: 3, axisLabelVisible: false});
// DXY //
const DXYchart = LightweightCharts.createChart(document.getElementById('container'),
{
rightPriceScale: {
minimumWidth: 100,
borderVisible: false
},
height: 500,
crosshair: {
mode: 0,
},
timeScale: {
timeVisible: true,
secondsVisible: false,
},
layout: {
background: {
type: 'solid',
color: '#222',
},
textColor: '#DDD',
},
grid: {
vertLines: { color: '#444' },
horzLines: { color: '#444' },
},
});
DXYchart.applyOptions({
watermark: {
visible: true,
fontSize: 18,
horzAlign: 'top',
vertAlign: 'left',
color: '#DDD',
text: "DXY" + time,
}
});
// define DXY chart
const DXYcandleSeries = DXYchart.addCandlestickSeries({upColor: 'green', wickUpColor: 'green', downColor: 'red', wickDownColor: 'red', borderVisible: false,});
const DXYlineSeriesEMA200 = DXYchart.addLineSeries({ color: 'white', lineWidth: 1, priceLineVisible: false, title: 'EMA200'});
const DXYlineSeriesEMA800 = DXYchart.addLineSeries({ color: 'purple', lineWidth: 1, priceLineVisible: false, title: 'EMA800'});
const DXYlineSeriesEMA50 = DXYchart.addLineSeries({ color: 'cyan', lineWidth: 1, priceLineVisible: false, title: 'EMA50'});
// DXY RSI Chart
const DXYchartrsi = LightweightCharts.createChart(document.getElementById("container"),
{
rightPriceScale: {
minimumWidth: 100,
borderVisible: false
},
height: heightrsimacdchart,
timeScale: {
visible: false,
},
layout: {
background: {
type: 'solid',
color: '#222',
},
textColor: '#DDD',
},
grid: {
vertLines: { color: '#444' },
horzLines: { color: '#444' },
},
});
DXYchartrsi.applyOptions({
watermark: {
visible: true,
fontSize: 18,
horzAlign: 'top',
vertAlign: 'left',
color: '#DDD',
text: 'DXY RSI 5,14,21',
}
});
const DXYlineSeriesRSI5 = DXYchartrsi.addLineSeries({ color: 'orange', lineWidth: 1, lineStyle: 2, priceLineVisible: false, title: 'RSI5'});
const DXYlineSeriesRSI14 = DXYchartrsi.addLineSeries({ color: 'yellow', lineWidth: 2, priceLineVisible: false, title: 'RSI14'});
const DXYlineSeriesRSI21 = DXYchartrsi.addLineSeries({ color: 'lightgreen', lineWidth: 1, lineStyle: 2, priceLineVisible: false, title: 'RSI21'});
fetch("/botdata/asset-histories/ECONOMY-DXY.history." + time + ".csv", { cache: 'no-store' })
.then(response => response.text())
.then(data => {
const DYXparsedData = parseCSV(data);
// OHLC Data
const DXYbars = DYXparsedData.map(item => ({
time: item[0],
open: item[1],
high: item[2],
low: item[3],
close: item[4]
}));
DXYcandleSeries.setData(DXYbars);
const DXYlineSeriesEMA50Data = DYXparsedData.map(item => ({
time: item[0],
value: item[10]
}));
DXYlineSeriesEMA50.setData(DXYlineSeriesEMA50Data);
const DXYlineSeriesEMA200Data = DYXparsedData.map(item => ({
time: item[0],
value: item[12]
}));
DXYlineSeriesEMA200.setData(DXYlineSeriesEMA200Data);
const DXYlineSeriesEMA800Data = DYXparsedData.map(item => ({
time: item[0],
value: item[14]
}));
DXYlineSeriesEMA800.setData(DXYlineSeriesEMA800Data);
// RSI Data
const DXYlineSeriesRSI5Data = DYXparsedData.map(item => ({
time: item[0],
value: item[15]
}));
DXYlineSeriesRSI5.setData(DXYlineSeriesRSI5Data);
const DXYlineSeriesRSI14Data = DYXparsedData.map(item => ({
time: item[0],
value: item[16]
}));
DXYlineSeriesRSI14.setData(DXYlineSeriesRSI14Data);
const DXYlineSeriesRSI21Data = DYXparsedData.map(item => ({
time: item[0],
value: item[17]
}));
DXYlineSeriesRSI21.setData(DXYlineSeriesRSI21Data);
});
// Sync charts timeScale
chart.timeScale().fitContent();
chart.timeScale().subscribeVisibleLogicalRangeChange(timeRange => {
chartrsi.timeScale().setVisibleLogicalRange(timeRange);
chartmacd.timeScale().setVisibleLogicalRange(timeRange);
DXYchartrsi.timeScale().setVisibleLogicalRange(timeRange);
DXYchart.timeScale().setVisibleLogicalRange(timeRange);
});
chartrsi.timeScale().subscribeVisibleLogicalRangeChange(timeRange => {
chart.timeScale().setVisibleLogicalRange(timeRange);
});
chartmacd.timeScale().subscribeVisibleLogicalRangeChange(timeRange => {
chart.timeScale().setVisibleLogicalRange(timeRange);
});
DXYchart.timeScale().subscribeVisibleLogicalRangeChange(timeRange => {
chart.timeScale().setVisibleLogicalRange(timeRange);
});
DXYchartrsi.timeScale().subscribeVisibleLogicalRangeChange(timeRange => {
chart.timeScale().setVisibleLogicalRange(timeRange);
});
chart.subscribeCrosshairMove(param => {
const dataPoint = getCrosshairDataPoint(lineSeriesEMA50, param);
syncCrosshair(chartrsi, lineSeriesRSI14, dataPoint);
const dataPointmacd = getCrosshairDataPoint(lineSeriesEMA50, param);
syncCrosshair(chartmacd, lineSeriesMACD, dataPointmacd);
const DXYdataPoint = getCrosshairDataPoint(lineSeriesEMA50, param);
syncCrosshair(DXYchart, DXYlineSeriesEMA50, DXYdataPoint);
});
chartrsi.subscribeCrosshairMove(param => {
const dataPoint = getCrosshairDataPoint(lineSeriesRSI14, param);
syncCrosshair(chart, lineSeriesEMA50, dataPoint);
const dataPointmacd = getCrosshairDataPoint(lineSeriesRSI14, param);
syncCrosshair(chartmacd, lineSeriesMACD, dataPointmacd);
const DXYdataPoint = getCrosshairDataPoint(lineSeriesRSI14, param);
syncCrosshair(DXYchart, DXYlineSeriesEMA50, DXYdataPoint);
});
chartmacd.subscribeCrosshairMove(param => {
const dataPoint = getCrosshairDataPoint(lineSeriesMACD, param);
syncCrosshair(chart, lineSeriesEMA50, dataPoint);
const dataPointrsi = getCrosshairDataPoint(lineSeriesMACD, param);
syncCrosshair(chartrsi, lineSeriesRSI14, dataPointrsi);
const DXYdataPoint = getCrosshairDataPoint(lineSeriesMACD, param);
syncCrosshair(DXYchart, DXYlineSeriesEMA50, DXYdataPoint);
});
DXYchart.subscribeCrosshairMove(param => {
const dataPoint = getCrosshairDataPoint(DXYlineSeriesEMA50, param);
syncCrosshair(chart, lineSeriesEMA50, dataPoint);
const dataPointrsi = getCrosshairDataPoint(DXYlineSeriesEMA50, param);
syncCrosshair(chartrsi, lineSeriesRSI14, dataPointrsi);
const dataPointmacd = getCrosshairDataPoint(DXYlineSeriesEMA50, param);
syncCrosshair(chartmacd, lineSeriesMACD, dataPointrsi);
});