yahoo finance
This commit is contained in:
parent
4dc63bd6ab
commit
29e04d9533
@ -143,7 +143,7 @@ function get_marketdata {
|
|||||||
fi
|
fi
|
||||||
rm "${f_histfile}.tmp"
|
rm "${f_histfile}.tmp"
|
||||||
|
|
||||||
# calc indicators ans if 1d then generate 1w histfile
|
# calc indicators and if 1d then generate 1w histfile
|
||||||
if [[ $f_interval = 1d ]]
|
if [[ $f_interval = 1d ]]
|
||||||
then
|
then
|
||||||
get_indicators "${f_histfile}"
|
get_indicators "${f_histfile}"
|
||||||
@ -154,5 +154,3 @@ function get_marketdata {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
#https://production.dataviz.cnn.io/index/fearandgreed/graphdata
|
|
||||||
|
|
||||||
|
@ -86,38 +86,48 @@ function get_marketdata_yahoo {
|
|||||||
# cleanup
|
# cleanup
|
||||||
rm -f "$f_targetcsvtmp" "${f_targetcsvtmp}".err ${f_targetjsontmp} "${f_targetjsontmp}".err
|
rm -f "$f_targetcsvtmp" "${f_targetcsvtmp}".err ${f_targetjsontmp} "${f_targetjsontmp}".err
|
||||||
|
|
||||||
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1wk" ] || [ "$f_timeframe" = "1mo" ]
|
local f_from
|
||||||
then
|
[ "$f_timeframe" = "5m" ] && f_from=$(date -d "now -86000 minutes" +%s)
|
||||||
### DEPRECATED - Yahoo Finance deactivated public API 2024-09 !!! ###
|
[ "$f_timeframe" = "15m" ] && f_from=$(date -d "now -86000 minutes" +%s)
|
||||||
g_echo_note "DEPRECATED - Yahoo Finance deactivated public API 2024-09 !!!"
|
[ "$f_timeframe" = "1h" ] && f_from=$(date -d "now -17510 hour" +%s)
|
||||||
return 1
|
[ "$f_timeframe" = "1d" ] && f_from=1
|
||||||
## restrict to finished candles
|
[ "$f_timeframe" = "1wk" ] && f_from=1
|
||||||
#[ "$f_timeframe" = "1d" ] && f_sec=$(TZ=US/NY date -d 'last day 0:00' '+%s')
|
[ "$f_timeframe" = "1mo" ] && f_from=1
|
||||||
#[ "$f_timeframe" = "1wk" ] && f_sec=$(TZ=US/NY date -d 'last monday 0:00' '+%s')
|
|
||||||
#[ "$f_timeframe" = "1mo" ] && f_sec=$(TZ=US/NY date -d "$(date -d "@$(date -d "last month" +%s)" +'%Y-%m-01')" +%s)
|
|
||||||
## Download historical data from yahoo
|
|
||||||
#g_wget -O "${f_targetcsvtmp}" "https://query1.finance.yahoo.com/v7/finance/download/${f_item}?period1=0&period2=${f_sec}&interval=${f_timeframe}&events=history" 2>"${f_targetcsvtmp}".err
|
|
||||||
else
|
|
||||||
# Download data from yahoo
|
|
||||||
g_wget -O "${f_targetjsontmp}" "https://query1.finance.yahoo.com/v7/finance/chart/${f_item}?interval=${f_timeframe}&period2=${f_sec}" 2>"${f_targetjsontmp}".err
|
|
||||||
jq -r '.chart.result[0] as $result | range(0; $result.timestamp | length) | [$result.timestamp[.], $result.indicators.quote[0].open[.], $result.indicators.quote[0].high[.], $result.indicators.quote[0].low[.], $result.indicators.quote[0].close[.], $result.indicators.quote[0].volume[.]] | @csv' "${f_targetjsontmp}" >"${f_targetcsvtmp}.unixtime" 2>"${f_targetjsontmp}".err
|
|
||||||
|
|
||||||
# change unix time to human readable and fill unfilled lines, ignore lines not with 00 secolds (last line)
|
# Download data from yahoo
|
||||||
local date_time open high low close lastopen lasthigh lastlow lastclose volume
|
g_wget -O "${f_targetjsontmp}" "https://query1.finance.yahoo.com/v8/finance/chart/${f_item}?interval=${f_timeframe}&period1=${f_from}&period2=${f_sec}" 2>"${f_targetjsontmp}".err
|
||||||
while IFS=, read -r timestamp open high low close volume; do
|
|
||||||
date_time=$(printf "%(%Y-%m-%d %H:%M:%S)T" $timestamp)
|
# Create csv from json
|
||||||
[ -z "$open" ] && open=$lastopen
|
jq -r '.chart.result[0] as $result | range(0; $result.timestamp | length) | [$result.timestamp[.], $result.indicators.quote[0].open[.], $result.indicators.quote[0].high[.], $result.indicators.quote[0].low[.], $result.indicators.quote[0].close[.], $result.indicators.quote[0].volume[.]] | @csv' "${f_targetjsontmp}" >"${f_targetcsvtmp}.unixtime" 2>"${f_targetjsontmp}".err
|
||||||
[ -z "$high" ] && high=$lasthigh
|
|
||||||
[ -z "$low" ] && low=$lastlow
|
# remove last/open timeframe (use only closed)
|
||||||
[ -z "$close" ] && close=$lastclose
|
sed -i '$d' "${f_targetcsvtmp}.unixtime"
|
||||||
[ -z "$volume" ] && volume=0
|
|
||||||
lastopen=$open
|
# change unix time to human readable and fill unfilled lines, ignore lines not with 00 secolds (last line)
|
||||||
lasthigh=$high
|
local date_time open high low close lastopen lasthigh lastlow lastclose volume
|
||||||
lastlow=$low
|
while IFS=, read -r timestamp open high low close volume
|
||||||
lastclose=$close
|
do
|
||||||
echo "$date_time,$open,$high,$low,$close,$volume"
|
if [ "$f_timeframe" = "1d" ] || [ "$f_timeframe" = "1mo" ]
|
||||||
done < "${f_targetcsvtmp}.unixtime" | grep ":00," >${f_targetcsvtmp}
|
then
|
||||||
fi
|
printf -v date_time "%(%Y-%m-%d)T" $timestamp
|
||||||
|
elif [ "$f_timeframe" = "1wk" ]
|
||||||
|
then
|
||||||
|
# on week 1 day back like crypto assets
|
||||||
|
date_time=$(date -d "yesterday $(date -d "@$timestamp" "+%Y-%m-%d")" "+%Y-%m-%d")
|
||||||
|
else
|
||||||
|
printf -v date_time "%(%Y-%m-%d %H:%M:%S)T" $timestamp
|
||||||
|
fi
|
||||||
|
[ -z "$open" ] && open=$lastopen
|
||||||
|
[ -z "$high" ] && high=$lasthigh
|
||||||
|
[ -z "$low" ] && low=$lastlow
|
||||||
|
[ -z "$close" ] && close=$lastclose
|
||||||
|
[ -z "$volume" ] && volume=0
|
||||||
|
lastopen=$open
|
||||||
|
lasthigh=$high
|
||||||
|
lastlow=$low
|
||||||
|
lastclose=$close
|
||||||
|
echo "$date_time,$open,$high,$low,$close,$volume"
|
||||||
|
done < "${f_targetcsvtmp}.unixtime" >${f_targetcsvtmp}
|
||||||
|
|
||||||
# error if no csvfile available
|
# error if no csvfile available
|
||||||
if ! [ -s "${f_targetcsvtmp}" ]
|
if ! [ -s "${f_targetcsvtmp}" ]
|
||||||
@ -138,4 +148,3 @@ function get_marketdata_yahoo {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,21 +35,21 @@ function get_ohlcv-candles {
|
|||||||
g_echo_note "Fetching/Refreshing $f_eco_asset $f_timeframe"
|
g_echo_note "Fetching/Refreshing $f_eco_asset $f_timeframe"
|
||||||
f_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.${f_timeframe}.csv"
|
f_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.${f_timeframe}.csv"
|
||||||
|
|
||||||
# 4h timefrage does not exist on coinmarketcap finance so calc from 1h
|
# 4h timeframe does not exist on yahoo finance so calc from 1h
|
||||||
if [ "$f_timeframe" = "4h" ]
|
if [ "$f_timeframe" = "4h" ]
|
||||||
then
|
then
|
||||||
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
|
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
|
||||||
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile"
|
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_4h "$f_1h_histfile" "$f_histfile"
|
||||||
f_add_missing_ohlcv_intervals "$f_histfile" 4h
|
f_add_missing_ohlcv_intervals "$f_histfile" 4h
|
||||||
elif [ "$f_timeframe" = "1d" ]
|
# elif [ "$f_timeframe" = "1d" ]
|
||||||
then
|
# then
|
||||||
f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
|
# f_1h_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1h.csv"
|
||||||
[ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_1d "$f_1h_histfile" "$f_histfile"
|
# [ -s "$f_1h_histfile" ] && convert_ohlcv_1h_to_1d "$f_1h_histfile" "$f_histfile"
|
||||||
f_add_missing_ohlcv_intervals "$f_histfile" 1d
|
# f_add_missing_ohlcv_intervals "$f_histfile" 1d
|
||||||
elif [ "$f_timeframe" = "1w" ]
|
# elif [ "$f_timeframe" = "1w" ]
|
||||||
then
|
# then
|
||||||
f_1d_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1d.csv"
|
# f_1d_histfile="asset-histories/ECONOMY-${f_eco_asset}.history.1d.csv"
|
||||||
[ -s "$f_1d_histfile" ] && convert_ohlcv_1d_to_1w "$f_1d_histfile" "$f_histfile"
|
# [ -s "$f_1d_histfile" ] && convert_ohlcv_1d_to_1w "$f_1d_histfile" "$f_histfile"
|
||||||
else
|
else
|
||||||
get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}"
|
get_ohlcv-candle "${f_eco_asset}" ${f_timeframe} "${f_histfile}" "ECONOMY-${f_eco_asset}"
|
||||||
fi
|
fi
|
||||||
@ -358,126 +358,126 @@ function convert_ohlcv_1h_to_4h {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function convert_ohlcv_1h_to_1d {
|
#function convert_ohlcv_1h_to_1d {
|
||||||
|
#
|
||||||
g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
|
# g_echo_note "RUNNING FUNCTION ${FUNCNAME} $@"
|
||||||
|
#
|
||||||
local f_input_file="$1"
|
# local f_input_file="$1"
|
||||||
local f_output_file="$2"
|
# local f_output_file="$2"
|
||||||
|
#
|
||||||
local f_latestdate f_nextdate f_mytimezone f_line f_date f_open f_high f_low f_close f_volume f_inday i
|
# local f_latestdate f_nextdate f_mytimezone f_line f_date f_open f_high f_low f_close f_volume f_inday i
|
||||||
|
#
|
||||||
if ! [ -s "$f_input_file" ]
|
# if ! [ -s "$f_input_file" ]
|
||||||
then
|
# then
|
||||||
g_echo_error "$f_input_file"
|
# g_echo_error "$f_input_file"
|
||||||
return 0
|
# return 0
|
||||||
fi
|
# fi
|
||||||
|
#
|
||||||
# crypto timezone UTC
|
# # crypto timezone UTC
|
||||||
local f_target_timezone=UTC
|
# local f_target_timezone=UTC
|
||||||
# US economy timezone America/New_York
|
# # US economy timezone America/New_York
|
||||||
[[ $f_input_file =~ ECONOMY ]] && f_target_timezone="America/New_York"
|
# [[ $f_input_file =~ ECONOMY ]] && f_target_timezone="America/New_York"
|
||||||
|
#
|
||||||
[ -s "$f_output_file" ] && f_latestdate=$(tail -n1 "$f_output_file" | cut -d, -f1)
|
# [ -s "$f_output_file" ] && f_latestdate=$(tail -n1 "$f_output_file" | cut -d, -f1)
|
||||||
[ -z "$f_latestdate" ] && f_latestdate=$(date -d "$(head -n1 "$f_input_file" | cut -d, -f1)" +%Y-%m-%d)
|
# [ -z "$f_latestdate" ] && f_latestdate=$(date -d "$(head -n1 "$f_input_file" | cut -d, -f1)" +%Y-%m-%d)
|
||||||
f_latestdate=$(TZ="$f_target_timezone" date -d "$f_latestdate $f_mytimezone" "+%Y-%m-%d")
|
# f_latestdate=$(TZ="$f_target_timezone" date -d "$f_latestdate $f_mytimezone" "+%Y-%m-%d")
|
||||||
f_nextdate=$(date -d "$f_latestdate +1day" "+%Y-%m-%d")
|
# f_nextdate=$(date -d "$f_latestdate +1day" "+%Y-%m-%d")
|
||||||
|
#
|
||||||
#echo $f_latestdate
|
# # mytimezone, respecting summer/winter time
|
||||||
#echo $f_nextdate
|
# f_mytimezone=$(date -d "$_latestdate" +%Z)
|
||||||
|
#
|
||||||
# mytimezone, respecting summer/winter time
|
# local f_today=$(TZ="$f_target_timezone" date "+%Y-%m-%d")
|
||||||
f_mytimezone=$(date -d "$_latestdate" +%Z)
|
# # check if there is a $f_latestdate
|
||||||
|
# grep -A9999 -B24 "^$f_latestdate" "$f_input_file" >"$g_tmp/convert_ohlcv_1h_to_1d_nextlines"
|
||||||
local f_today=$(TZ="$f_target_timezone" date "+%Y-%m-%d")
|
# if ! [ -s "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" ]
|
||||||
# check if there is a $f_latestdate
|
# then
|
||||||
grep -A9999 -B24 "^$f_latestdate" "$f_input_file" >"$g_tmp/convert_ohlcv_1h_to_1d_nextlines"
|
# cat "$f_input_file" >"$g_tmp/convert_ohlcv_1h_to_1d_nextlines"
|
||||||
if ! [ -s "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" ]
|
# f_nextdate=$(date -d "$(head -n1 "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" | cut -d, -f1)" +%Y-%m-%d)
|
||||||
then
|
# fi
|
||||||
cat "$f_input_file" >"$g_tmp/convert_ohlcv_1h_to_1d_nextlines"
|
#
|
||||||
f_nextdate=$(date -d "$(head -n1 "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" | cut -d, -f1)" +%Y-%m-%d)
|
# # go through lines and switch to $f_target_timezone
|
||||||
fi
|
# cat "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" | grep ':00:00,' | cut -d, -f1,2,3,4,5,6 | while read f_line
|
||||||
|
# do
|
||||||
# go through lines and switch to $f_target_timezone
|
# g_array "$f_line" g_line_array ,
|
||||||
cat "$g_tmp/convert_ohlcv_1h_to_1d_nextlines" | grep ':00:00,' | cut -d, -f1,2,3,4,5,6 | while read f_line
|
# # calculate day in target timezone
|
||||||
do
|
# g_line_array[0]=$(TZ="$f_target_timezone" date -d "${g_line_array[0]} $f_mytimezone" "+%Y-%m-%d")
|
||||||
g_array "$f_line" g_line_array ,
|
# [[ ${g_line_array[0]} = $f_today ]] && break
|
||||||
# calculate day in target timezone
|
# echo "${g_line_array[0]},${g_line_array[1]},${g_line_array[2]},${g_line_array[3]},${g_line_array[4]},${g_line_array[5]}"
|
||||||
g_line_array[0]=$(TZ="$f_target_timezone" date -d "${g_line_array[0]} $f_mytimezone" "+%Y-%m-%d")
|
# done >"${f_output_file}.tmp"
|
||||||
[[ ${g_line_array[0]} = $f_today ]] && break
|
#
|
||||||
echo "${g_line_array[0]},${g_line_array[1]},${g_line_array[2]},${g_line_array[3]},${g_line_array[4]},${g_line_array[5]}"
|
# # check if $f_nextdate really exists in $f_target_timezone if not add a day until it exists
|
||||||
done >"${f_output_file}.tmp"
|
# # useful for weekends
|
||||||
|
# i=1
|
||||||
# check if $f_nextdate really exists in $f_target_timezone if not add a day until it exists
|
# until grep -q "^$f_nextdate" "${f_output_file}.tmp"
|
||||||
# useful for weekends
|
# do
|
||||||
i=1
|
# #echo $f_nextdate
|
||||||
until grep -q "^$f_nextdate" "${f_output_file}.tmp"
|
# [[ $f_nextdate = $f_today ]] && return 0
|
||||||
do
|
# f_nextdate=$(date -d "$f_nextdate +1day" "+%Y-%m-%d")
|
||||||
echo $f_nextdate
|
# i=$((i++))
|
||||||
f_nextdate=$(date -d "$f_nextdate +1day" "+%Y-%m-%d")
|
# if [ $i -gt 10 ]
|
||||||
i=$((i++))
|
# then
|
||||||
if [ $i -gt 10 ]
|
# g_echo_warn "${FUNCNAME} $@: no nextdate found after >10 iterations"
|
||||||
then
|
# return 1
|
||||||
g_echo_warn "${FUNCNAME} $@: no nextdate found after >10 iterations"
|
# fi
|
||||||
return 1
|
# done
|
||||||
fi
|
#
|
||||||
done
|
# # set ent mark to store latest complete day
|
||||||
|
# echo END >>"${f_output_file}.tmp"
|
||||||
# go through converted lines
|
#
|
||||||
cat "${f_output_file}.tmp" | while read f_line
|
# # go through converted lines
|
||||||
do
|
# cat "${f_output_file}.tmp" | while read f_line
|
||||||
g_array "$f_line" g_line_array ,
|
# do
|
||||||
[[ ${g_line_array[0]} = $f_today ]] && break
|
# g_array "$f_line" g_line_array ,
|
||||||
|
# [[ ${g_line_array[0]} = $f_today ]] && break
|
||||||
# wait untin next day in target file reached
|
#
|
||||||
if [[ ${g_line_array[0]} = $f_nextdate ]]
|
# # wait until next day in target file reached
|
||||||
then
|
# if [[ ${g_line_array[0]} = $f_nextdate ]]
|
||||||
f_end_reached=1
|
# then
|
||||||
else
|
# f_end_reached=1
|
||||||
[ -z $f_end_reached ] && continue
|
# else
|
||||||
fi
|
# [ -z $f_end_reached ] && continue
|
||||||
|
# fi
|
||||||
# if dayend
|
#
|
||||||
if [ -n "$f_inday" ] && [[ $f_latestdate != ${g_line_array[0]} ]]
|
# # if dayend
|
||||||
then
|
# if [ -n "$f_inday" ] && [[ $f_latestdate != ${g_line_array[0]} ]]
|
||||||
#echo "day end $f_date" 1>&2
|
# then
|
||||||
# day end
|
# # day end
|
||||||
f_close=${g_line_array[4]}
|
# echo "$f_date,$f_open,$f_high,$f_low,$f_close,$f_volume"
|
||||||
echo "$f_date,$f_open,$f_high,$f_low,$f_close,$f_volume"
|
# f_inday=""
|
||||||
f_inday=""
|
# fi
|
||||||
fi
|
#
|
||||||
|
# # calc values if inday
|
||||||
# calc values if inday
|
# if [ -n "$f_inday" ]
|
||||||
if [ -n "$f_inday" ]
|
# then
|
||||||
then
|
# #echo "in day $f_date" 1>&2
|
||||||
#echo "in day $f_date" 1>&2
|
# # in day
|
||||||
# in day
|
# # add volume
|
||||||
# add volume
|
# g_calc "$f_volume+${g_line_array[5]}"
|
||||||
g_calc "$f_volume+${g_line_array[5]}"
|
# f_volume=$g_calc_result
|
||||||
f_volume=$g_calc_result
|
# # look for higher high
|
||||||
# look for higher high
|
# g_num_is_higher ${g_line_array[2]} $f_high && f_high=${g_line_array[2]}
|
||||||
g_num_is_higher ${g_line_array[2]} $f_high && f_high=${g_line_array[2]}
|
# # look for lower low
|
||||||
# look for lower low
|
# g_num_is_lower ${g_line_array[3]} $f_low && f_low=${g_line_array[3]}
|
||||||
g_num_is_lower ${g_line_array[3]} $f_low && f_low=${g_line_array[3]}
|
# fi
|
||||||
fi
|
#
|
||||||
|
# # if newday
|
||||||
# if newday
|
# if [ -z "$f_inday" ]
|
||||||
if [ -z "$f_inday" ]
|
# then
|
||||||
then
|
# #echo "day begin ${g_line_array[0]}" 1>&2
|
||||||
#echo "day begin ${g_line_array[0]}" 1>&2
|
# # day begin
|
||||||
# day begin
|
# f_inday=1
|
||||||
f_inday=1
|
# f_date=${g_line_array[0]}
|
||||||
f_date=${g_line_array[0]}
|
# f_latestdate=$f_date
|
||||||
f_latestdate=$f_date
|
# f_open=${g_line_array[1]}
|
||||||
f_open=${g_line_array[1]}
|
# f_high=${g_line_array[2]}
|
||||||
f_high=${g_line_array[2]}
|
# f_low=${g_line_array[3]}
|
||||||
f_low=${g_line_array[3]}
|
# f_close=${g_line_array[4]}
|
||||||
f_volume=${g_line_array[5]}
|
# f_volume=${g_line_array[5]}
|
||||||
fi
|
# fi
|
||||||
|
#
|
||||||
done >>"$f_output_file"
|
# done >>"$f_output_file"
|
||||||
|
#
|
||||||
}
|
#}
|
||||||
|
|
||||||
function convert_ohlcv_1d_to_1w {
|
function convert_ohlcv_1d_to_1w {
|
||||||
|
|
||||||
@ -528,6 +528,7 @@ function convert_ohlcv_1d_to_1w {
|
|||||||
|
|
||||||
f_close_prices[$f_week_year]=$f_close
|
f_close_prices[$f_week_year]=$f_close
|
||||||
|
|
||||||
|
[ -z "$f_volume" ] && f_volume=0
|
||||||
g_calc "${f_volume_prices[$f_week_year]:-0}+$f_volume"
|
g_calc "${f_volume_prices[$f_week_year]:-0}+$f_volume"
|
||||||
f_volume_prices[$f_week_year]=$g_calc_result
|
f_volume_prices[$f_week_year]=$g_calc_result
|
||||||
done
|
done
|
||||||
@ -535,7 +536,7 @@ function convert_ohlcv_1d_to_1w {
|
|||||||
# go through array(s) and write down missing week data
|
# go through array(s) and write down missing week data
|
||||||
for f_week_year in "${!f_open_prices[@]}"
|
for f_week_year in "${!f_open_prices[@]}"
|
||||||
do
|
do
|
||||||
f_week_date=$(date -d "${f_week_year:0:4}-01-01 +$((${f_week_year:4}-1)) week" +%F)
|
f_week_date=$(date -d "${f_week_year:0:4}-01-01 +$((${f_week_year:4})) week -1day" +%F)
|
||||||
# ignore if date alerady exists
|
# ignore if date alerady exists
|
||||||
grep -q ^$f_week_date, "$f_output_file" && continue
|
grep -q ^$f_week_date, "$f_output_file" && continue
|
||||||
echo "$f_week_date,${f_open_prices[$f_week_year]},${f_high_prices[$f_week_year]},${f_low_prices[$f_week_year]},${f_close_prices[$f_week_year]},${f_volume_prices[$f_week_year]}"
|
echo "$f_week_date,${f_open_prices[$f_week_year]},${f_high_prices[$f_week_year]},${f_low_prices[$f_week_year]},${f_close_prices[$f_week_year]},${f_volume_prices[$f_week_year]}"
|
||||||
|
Loading…
Reference in New Issue
Block a user