OHLC rework
This commit is contained in:
@@ -1,13 +1,17 @@
|
|||||||
import {useChartOrderStore} from "@/orderbuild.js";
|
import {useChartOrderStore} from "@/orderbuild.js";
|
||||||
|
|
||||||
export function nearestOhlcStart(time) {
|
|
||||||
// todo subtract OHLC root time
|
const OHLC_START = new Date(1231027200*1000) // Sunday January 4th, 2009 just before Bitcoin Genesis
|
||||||
const period = useChartOrderStore().intervalSecs
|
|
||||||
return Math.round(time / period) * period
|
|
||||||
|
export function nearestOhlcStart(time, periodSeconds=null) {
|
||||||
|
if (periodSeconds===null)
|
||||||
|
periodSeconds = useChartOrderStore().intervalSecs
|
||||||
|
return Math.round((time-OHLC_START) / periodSeconds) * periodSeconds + OHLC_START
|
||||||
}
|
}
|
||||||
|
|
||||||
export function pointsToOhlcStart(points) {
|
export function pointsToOhlcStart(points) {
|
||||||
return points === null ? null : points.map((p) => {
|
return points === null ? null : points.map((p) => {
|
||||||
return {time: nearestOhlcStart(p.time), price: p.price}
|
return {time: nearestOhlcStart(p.time), price: p.price}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// import {subscribeOnStream, unsubscribeFromStream,} from './streaming.js';
|
// import {subscribeOnStream, unsubscribeFromStream,} from './streaming.js';
|
||||||
|
|
||||||
import {jBars, tvResolutionToPeriodString} from './jBars.js';
|
import {loadOHLC} from './ohlc.js';
|
||||||
import {metadata} from "@/version.js";
|
import {metadata} from "@/version.js";
|
||||||
import FlexSearch from "flexsearch";
|
import FlexSearch from "flexsearch";
|
||||||
import {useChartOrderStore} from "@/orderbuild.js";
|
import {useChartOrderStore} from "@/orderbuild.js";
|
||||||
@@ -318,7 +318,7 @@ export const DataFeed = {
|
|||||||
// todo need to consider the selected fee tier
|
// todo need to consider the selected fee tier
|
||||||
let bars, metadata;
|
let bars, metadata;
|
||||||
const pool = useChartOrderStore().selectedPool;
|
const pool = useChartOrderStore().selectedPool;
|
||||||
[bars, metadata] = await jBars(lookupSymbol(symbolInfo.ticker), pool[0], from, to, resolution); // This is the one that does all the work
|
[bars, metadata] = await loadOHLC(lookupSymbol(symbolInfo.ticker), pool[0], from, to, resolution); // This is the one that does all the work
|
||||||
if (firstDataRequest) {
|
if (firstDataRequest) {
|
||||||
lastBarsCache.set(symbolInfo.full_name, {
|
lastBarsCache.set(symbolInfo.full_name, {
|
||||||
...bars[bars.length - 1],
|
...bars[bars.length - 1],
|
||||||
|
|||||||
@@ -1,205 +0,0 @@
|
|||||||
import {useStore} from "@/store/store.js";
|
|
||||||
|
|
||||||
// disable debug messages logging
|
|
||||||
let console = { log: function() {} }
|
|
||||||
|
|
||||||
const file_res = ['1m', '3m', '5m', '10m', '15m', '30m', '1H', '2H', '4H', '8H', '12H', '1D', '2D', '3D', '1W',];
|
|
||||||
const supported_res = ['1', '3', '5', '10', '15', '30', '60', '120', '240', '480', '720', '1D', '2D', '3D', '1W',];
|
|
||||||
|
|
||||||
const resMap = {}
|
|
||||||
for (const i in file_res) {
|
|
||||||
resMap[supported_res[i]] = file_res[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function tvResolutionToPeriodString(res) {
|
|
||||||
return resMap[res]
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function jBars (symbol, contract, from, to, res) {
|
|
||||||
console.log('[jBars]: Method call', res, from, to, symbol, contract);
|
|
||||||
|
|
||||||
const toDate = new Date(to*1000);
|
|
||||||
const fromDate = new Date(from*1000);
|
|
||||||
if (res==="1W") { // for 1W, day must be Sunday
|
|
||||||
const day = fromDate.getUTCDay(); // 0<=day<7
|
|
||||||
fromDate.setDate(fromDate.getDate() + (7-day)%7 );
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set fromDate to be compatible with Tim's datafiles.
|
|
||||||
// This potentially increases number of samples returned.
|
|
||||||
|
|
||||||
if (res.endsWith("W") || res.endsWith("D")) { // Days/Weeks -- set to midnight
|
|
||||||
fromDate.setUTCHours(0, 0, 0);
|
|
||||||
} else {
|
|
||||||
let minutesRes = parseInt(res);
|
|
||||||
if (minutesRes >= 60) { // Hours
|
|
||||||
let hoursRes = Math.floor(minutesRes/60);
|
|
||||||
let fromHours = fromDate.getUTCHours();
|
|
||||||
fromDate.setUTCHours(fromHours - fromHours % hoursRes, 0, 0, 0);
|
|
||||||
} else { // Minutes
|
|
||||||
let fromMinutes = fromDate.getUTCMinutes();
|
|
||||||
fromDate.setUTCMinutes(fromMinutes - fromMinutes % minutesRes, 0, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log("fromDate:", fromDate.toUTCString());
|
|
||||||
console.log("toDate: ", toDate.toUTCString());
|
|
||||||
|
|
||||||
// contract = "0xC31E54c7a869B9FcBEcc14363CF510d1c41fa443";
|
|
||||||
// contract = "0xC6962004f452bE9203591991D15f6b388e09E8D0";
|
|
||||||
|
|
||||||
const daily_res = ['1', '3', '5', '10', '15', '30'];
|
|
||||||
const single_res = ['1W'];
|
|
||||||
|
|
||||||
if (!supported_res.includes(res)) throw Error(`resolution ${res} not supported.`);
|
|
||||||
|
|
||||||
const is_daily_res = daily_res.includes(res);
|
|
||||||
const is_single_res = single_res.includes(res);
|
|
||||||
const is_monthly_res = !is_single_res && !is_daily_res;
|
|
||||||
|
|
||||||
let bars = [];
|
|
||||||
let inverted;
|
|
||||||
|
|
||||||
for ( // Once around for each sample in from-to range
|
|
||||||
let iDate = fromDate,
|
|
||||||
// loop state
|
|
||||||
iFile = undefined,
|
|
||||||
iohlc = 0,
|
|
||||||
ohlc;
|
|
||||||
iDate < toDate;
|
|
||||||
) {
|
|
||||||
|
|
||||||
let bar = undefined;
|
|
||||||
|
|
||||||
// Fetch one sample file as needed
|
|
||||||
|
|
||||||
if (iFile === undefined ? true :
|
|
||||||
is_monthly_res ? iFile.getUTCMonth() !== iDate.getUTCMonth() :
|
|
||||||
is_daily_res ? iFile.getUTCDate() !== iDate.getUTCDate() :
|
|
||||||
false // is_single_res
|
|
||||||
) {
|
|
||||||
|
|
||||||
const fres = tvResolutionToPeriodString(res)
|
|
||||||
const yr = iDate.getUTCFullYear();
|
|
||||||
const yrdir = is_single_res ? "" : `/${yr}`;
|
|
||||||
const mo = String(iDate.getUTCMonth()+1).padStart(2, '0'); // January is month 0 in Date object
|
|
||||||
const date = is_daily_res ? String(iDate.getUTCDate()).padStart(2, '0') : "";
|
|
||||||
const yrmo = !is_single_res ? `-${yr}${mo}` : "";
|
|
||||||
let baseURL = "https://beta.dexorder.trade/ohlc/"
|
|
||||||
|
|
||||||
let chainId = useStore().chainId
|
|
||||||
|
|
||||||
inverted = symbol.inverted
|
|
||||||
if (symbol.x?.data) {
|
|
||||||
baseURL = symbol.x.data.uri
|
|
||||||
chainId = symbol.x.data.chain
|
|
||||||
contract = symbol.x.data.symbol
|
|
||||||
inverted ^= symbol.x.data.inverted
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = `${baseURL}${chainId}/${contract}/${fres}${yrdir}/${contract}-${fres}${yrmo}${date}.json`;
|
|
||||||
|
|
||||||
let response = await fetch(url);
|
|
||||||
if (response.ok) {
|
|
||||||
ohlc = await response.json();
|
|
||||||
// todo handle inversion
|
|
||||||
console.log(`Fetch: ${ohlc.length} resolution ${res} samples from ${url}`)
|
|
||||||
console.log(`first: ${new Date(ohlc[0][0]*1000).toUTCString()}`)
|
|
||||||
console.log(`last: ${new Date(ohlc[ohlc.length-1][0]*1000).toUTCString()}`)
|
|
||||||
} else {
|
|
||||||
console.log(`Fetch: file not found: ${url}`)
|
|
||||||
ohlc = []; // no file, then empty data
|
|
||||||
}
|
|
||||||
iFile = new Date(iDate);
|
|
||||||
iohlc = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip samples not for our time
|
|
||||||
|
|
||||||
for(; iohlc < ohlc.length; iohlc++ ) {
|
|
||||||
// if ( new Date(ohlc[iohlc][0]+'Z').getTime() >= iDate.getTime() ) break;
|
|
||||||
if ( ohlc[iohlc][0]*1000 >= iDate.getTime() ) break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let ohlcDate = iohlc >= ohlc.length ? undefined : new Date(ohlc[iohlc][0]*1000);
|
|
||||||
|
|
||||||
// no ohlc sample file, or sample file exists and asking for sample beyond last sample, insert missing sample
|
|
||||||
|
|
||||||
const insert_missing_samples = false;
|
|
||||||
const visible_missing_samples = false;
|
|
||||||
if (ohlcDate === undefined) {
|
|
||||||
bar = {
|
|
||||||
time: iDate.getTime(),
|
|
||||||
}
|
|
||||||
if (visible_missing_samples) bar = Object.assign(bar, {
|
|
||||||
// Comment these to make invisible
|
|
||||||
open: 50,
|
|
||||||
high: 50,
|
|
||||||
low: 0,
|
|
||||||
close: 0,
|
|
||||||
});
|
|
||||||
if (!insert_missing_samples) bar = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
// file exists, but ohlc sample not for this time, insert missing sample
|
|
||||||
else if ( iDate.getTime() !== ohlcDate.getTime() ) {
|
|
||||||
bar = {
|
|
||||||
time: iDate.getTime(),
|
|
||||||
}
|
|
||||||
if (visible_missing_samples) bar = Object.assign(bar, {
|
|
||||||
open: 100,
|
|
||||||
high: 100,
|
|
||||||
low: 0,
|
|
||||||
close: 0,
|
|
||||||
});
|
|
||||||
if (!insert_missing_samples) bar = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy ohlc sample
|
|
||||||
else {
|
|
||||||
bar = {
|
|
||||||
time: iDate.getTime(),
|
|
||||||
open: ohlc[iohlc][1] ?? ohlc[iohlc][4], // open
|
|
||||||
high: ohlc[iohlc][2] ?? ohlc[iohlc][4], // high
|
|
||||||
low: ohlc[iohlc][3] ?? ohlc[iohlc][4], // low
|
|
||||||
close: ohlc[iohlc][4], // close
|
|
||||||
}
|
|
||||||
iohlc++;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bar !== undefined) {
|
|
||||||
if (inverted) {
|
|
||||||
bar.open = 1/bar.open
|
|
||||||
let high = bar.high
|
|
||||||
bar.high = 1/bar.low
|
|
||||||
bar.low = 1/high
|
|
||||||
bar.close = 1/bar.close
|
|
||||||
}
|
|
||||||
bars.push(bar);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Increment loop variable
|
|
||||||
|
|
||||||
if (supported_res.indexOf(res)<supported_res.indexOf('1D')) { // <1day
|
|
||||||
const mins = parseInt(res);
|
|
||||||
iDate.setUTCMinutes(iDate.getUTCMinutes()+mins)
|
|
||||||
} else if (res==='1W') { // 1W
|
|
||||||
iDate.setUTCDate(iDate.getUTCDate()+7);
|
|
||||||
} else { // <1W >1day
|
|
||||||
iDate.setUTCDate(iDate.getUTCDate()+1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// noData should be set only if no bars are in the requested period and earlier.
|
|
||||||
// In our case, we are guaranteed to have contiguous samples.
|
|
||||||
// So we only return no bars (bars.length==0) if:
|
|
||||||
// 1. period is entirely before first data available.
|
|
||||||
// 2. period is entirely after last data available.
|
|
||||||
// Returning noData based on bars.length works perfectly assuming that TV never asks for case 2.
|
|
||||||
// This is probably not a safe assumption. The alternative would be to search
|
|
||||||
// backward to find beginning of history. How far to search?
|
|
||||||
|
|
||||||
let noData = bars.length === 0;
|
|
||||||
if (noData) console.log("noData == true!");
|
|
||||||
return [bars, {noData}];
|
|
||||||
}
|
|
||||||
247
src/charts/ohlc.js
Normal file
247
src/charts/ohlc.js
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
import {useStore} from "@/store/store.js";
|
||||||
|
import {nearestOhlcStart} from "@/charts/chart-misc.js";
|
||||||
|
|
||||||
|
|
||||||
|
function dailyFile(resName) {
|
||||||
|
function _filename(symbol, timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
const year = date.getUTCFullYear()
|
||||||
|
const month = ('0'+(date.getUTCMonth() + 1)).slice(-2)
|
||||||
|
const day = ('0'+date.getUTCDate()).slice(-2)
|
||||||
|
return `${resName}/${year}/${month}/${symbol}-${resName}-${year}${month}${day}.csv`
|
||||||
|
}
|
||||||
|
return _filename
|
||||||
|
}
|
||||||
|
|
||||||
|
function monthlyFile(resName) {
|
||||||
|
function _filename(symbol, timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
const year = date.getUTCFullYear()
|
||||||
|
const month = ('0'+(date.getUTCMonth() + 1)).slice(-2)
|
||||||
|
return `${resName}/${year}/${symbol}-${resName}-${year}${month}.csv`
|
||||||
|
}
|
||||||
|
return _filename
|
||||||
|
}
|
||||||
|
|
||||||
|
function yearlyFile(resName) {
|
||||||
|
function _filename(symbol, timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
const year = date.getUTCFullYear()
|
||||||
|
return `${resName}/${symbol}-${resName}-${year}.csv`
|
||||||
|
}
|
||||||
|
return _filename
|
||||||
|
}
|
||||||
|
|
||||||
|
function singleFile(resName) {
|
||||||
|
function _filename(symbol, timestamp) {
|
||||||
|
return `${resName}/${symbol}-${resName}.csv`
|
||||||
|
}
|
||||||
|
return _filename
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function nextDay(timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
return Date.UTC(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDate() + 1) / 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
function nextMonth(timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
return Date.UTC(date.getUTCFullYear(), date.getUTCMonth() + 1, date.getUTCDate()) / 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
function nextYear(timestamp) {
|
||||||
|
const date = new Date(timestamp*1000)
|
||||||
|
return Date.UTC(date.getUTCFullYear() + 1, date.getUTCMonth(), date.getUTCDate()) / 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function never(_timestamp) {
|
||||||
|
return Number.MAX_SAFE_INTEGER
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const resolutions = [
|
||||||
|
{ period: 1, tvRes: '1', filename: dailyFile( '1m'), nextStart: nextDay, },
|
||||||
|
{ period: 3, tvRes: '3', filename: dailyFile( '3m'), nextStart: nextDay, },
|
||||||
|
{ period: 5, tvRes: '5', filename: dailyFile( '5m'), nextStart: nextDay, },
|
||||||
|
{ period: 10, tvRes: '10', filename: dailyFile('10m'), nextStart: nextDay, },
|
||||||
|
{ period: 15, tvRes: '15', filename: dailyFile('15m'), nextStart: nextDay, },
|
||||||
|
{ period: 30, tvRes: '30', filename: dailyFile('30m'), nextStart: nextDay, },
|
||||||
|
{ period: 60, tvRes: '60', filename: monthlyFile( '1H'), nextStart: nextMonth, },
|
||||||
|
{ period: 120, tvRes: '120', filename: monthlyFile( '2H'), nextStart: nextMonth, },
|
||||||
|
{ period: 240, tvRes: '240', filename: monthlyFile( '4H'), nextStart: nextMonth, },
|
||||||
|
{ period: 480, tvRes: '480', filename: monthlyFile( '8H'), nextStart: nextMonth, },
|
||||||
|
{ period: 720, tvRes: '720', filename: monthlyFile('12H'), nextStart: nextMonth, },
|
||||||
|
{ period: 1440, tvRes: '1D', filename: yearlyFile( '1D'), nextStart: nextYear, },
|
||||||
|
{ period: 2880, tvRes: '2D', filename: yearlyFile( '2D'), nextStart: nextYear, },
|
||||||
|
{ period: 4320, tvRes: '3D', filename: yearlyFile( '3D'), nextStart: nextYear, },
|
||||||
|
{ period: 10080, tvRes: '1W', filename: singleFile( '1W'), nextStart: never, },
|
||||||
|
]
|
||||||
|
|
||||||
|
const resMap = {}
|
||||||
|
for (const res of resolutions)
|
||||||
|
resMap[res.tvRes] = res
|
||||||
|
|
||||||
|
|
||||||
|
const seriesStarts = {}
|
||||||
|
|
||||||
|
|
||||||
|
async function getUrl(url) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(url)
|
||||||
|
console.log('got response', response)
|
||||||
|
if (response.ok)
|
||||||
|
return await response.text()
|
||||||
|
else
|
||||||
|
console.error(`could not fetch ${url}: status ${response.statusText}`)
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.error(`Could not fetch ${url}`, e)
|
||||||
|
}
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export async function loadOHLC (symbol, contract, from, to, tvRes) {
|
||||||
|
console.log('loadOHLC', tvRes, new Date(1000*from), new Date(1000*to), symbol, contract);
|
||||||
|
let chainId
|
||||||
|
let bars = [];
|
||||||
|
let inverted = symbol.inverted;
|
||||||
|
let baseURL
|
||||||
|
let latest = null // latest time, price
|
||||||
|
|
||||||
|
function fill(end, period) {
|
||||||
|
if (latest===null) return
|
||||||
|
const [start, price] = latest
|
||||||
|
for (let now=nearestOhlcStart(start, period*60); now < end; now += period )
|
||||||
|
bars.push({time:now * 1000, open:price, high:price, low:price, close:price})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (symbol.x?.data) {
|
||||||
|
baseURL = symbol.x.data.uri
|
||||||
|
contract = symbol.x.data.symbol
|
||||||
|
chainId = symbol.x.data.chain
|
||||||
|
inverted ^= symbol.x.data.inverted
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
baseURL = `//ohlc/`
|
||||||
|
chainId = useStore().chainId
|
||||||
|
}
|
||||||
|
baseURL += `${chainId}/${contract}/`
|
||||||
|
|
||||||
|
console.log('baseURL', baseURL)
|
||||||
|
|
||||||
|
const res = resMap[tvRes]
|
||||||
|
const fetches = []
|
||||||
|
let start = from
|
||||||
|
if (!(baseURL in seriesStarts)) {
|
||||||
|
try {
|
||||||
|
const response = await getUrl(baseURL+'quote.csv')
|
||||||
|
if (response.length) {
|
||||||
|
seriesStarts[baseURL] = parseInt(response.split(',')[0])
|
||||||
|
console.log(`Series ${baseURL} starts at ${new Date(start*1000)}`)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.error(`Bad response while fetching ${baseURL+'quote.csv'}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.error(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (baseURL in seriesStarts)
|
||||||
|
start = Math.max(start, seriesStarts[baseURL])
|
||||||
|
|
||||||
|
for(let now = start; now < to; now = res.nextStart(now)) {
|
||||||
|
const url = baseURL + res.filename(contract, now);
|
||||||
|
console.log('fetching', url)
|
||||||
|
const prom = getUrl(url)
|
||||||
|
fetches.push(prom);
|
||||||
|
}
|
||||||
|
|
||||||
|
const responses = await Promise.all(fetches)
|
||||||
|
for (const response of responses) {
|
||||||
|
if (response.length) {
|
||||||
|
let lineNum = 0
|
||||||
|
response.split('\n').forEach((line) => {
|
||||||
|
lineNum++
|
||||||
|
console.log(`processing line ${lineNum}`, line)
|
||||||
|
const row = line.split(',')
|
||||||
|
let time, open, high, low, close=null
|
||||||
|
switch (row.length) {
|
||||||
|
case 1:
|
||||||
|
if (row[0].length !== 0)
|
||||||
|
console.log(`Warning: weird nonempty row at OHLC line ${lineNum}: ${line}`)
|
||||||
|
break
|
||||||
|
case 2:
|
||||||
|
time = parseInt(row[0])
|
||||||
|
if (time < start || time >= to)
|
||||||
|
break
|
||||||
|
let price = parseFloat(row[1])
|
||||||
|
if (inverted)
|
||||||
|
price = 1/price
|
||||||
|
open = high = low = close = price
|
||||||
|
break
|
||||||
|
case 3:
|
||||||
|
time = parseInt(row[0])
|
||||||
|
if (time < start || time >= to)
|
||||||
|
break
|
||||||
|
open = parseFloat(row[1])
|
||||||
|
close = parseFloat(row[2])
|
||||||
|
if (inverted) {
|
||||||
|
open = 1/open
|
||||||
|
close = 1/close
|
||||||
|
}
|
||||||
|
high = Math.max(open, close)
|
||||||
|
low = Math.min(open,close)
|
||||||
|
break
|
||||||
|
case 5:
|
||||||
|
time = parseInt(row[0])
|
||||||
|
if (time < start || time >= to)
|
||||||
|
break
|
||||||
|
open = parseFloat(row[1])
|
||||||
|
high = parseFloat(row[2])
|
||||||
|
low = parseFloat(row[3])
|
||||||
|
close = parseFloat(row[4])
|
||||||
|
if (inverted) {
|
||||||
|
open = 1/open
|
||||||
|
high = 1/high
|
||||||
|
low = 1/low
|
||||||
|
close = 1/close
|
||||||
|
}
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
console.log(`Warning: could not parse line ${lineNum} of OHLC file:\n${line}`)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if (close!==null) {
|
||||||
|
console.log(`filling up to ${time}`)
|
||||||
|
fill(time, res.period)
|
||||||
|
const bar = {time:time*1000, open, high, low, close};
|
||||||
|
console.log('pushing bar', bar)
|
||||||
|
bars.push(bar)
|
||||||
|
latest = [time, close]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
console.log(`processed ${lineNum} lines`)
|
||||||
|
}
|
||||||
|
else { console.log('response was empty') }
|
||||||
|
}
|
||||||
|
|
||||||
|
fill(to, res.period)
|
||||||
|
|
||||||
|
// noData should be set only if no bars are in the requested period and earlier.
|
||||||
|
// In our case, we are guaranteed to have contiguous samples.
|
||||||
|
// So we only return no bars (bars.length==0) if:
|
||||||
|
// 1. period is entirely before first data available.
|
||||||
|
// 2. period is entirely after last data available.
|
||||||
|
// Returning noData based on bars.length works perfectly assuming that TV never asks for case 2.
|
||||||
|
// This is probably not a safe assumption. The alternative would be to search
|
||||||
|
// backward to find beginning of history. How far to search?
|
||||||
|
|
||||||
|
let noData = bars.length === 0;
|
||||||
|
if (noData) console.log("noData == true!");
|
||||||
|
console.log('bars', bars)
|
||||||
|
return [bars, {noData}];
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user