Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
while True:
server_time = self._next()
session_label = server_time.floor('1D')
if not self.calendar.is_session(session_label):
# wait until next session
# sleep(1)
continue
if current_session is None or current_session != session_label:
yield session_label, SESSION_START
current_session = session_label
self._before_trading_start_bar_yielded = False
delta = pd.Timedelta(
hours=self.before_trading_start_minute[0].hour,
minutes=self.before_trading_start_minute[0].minute,
)
before_trading_start = (
current_session
.tz_localize(None)
.tz_localize(self.before_trading_start_minute[1])
) + delta
session_open = self.calendar.session_open(current_session)
session_close = self.calendar.session_close(current_session)
if (server_time >= before_trading_start and
not self._before_trading_start_bar_yielded):
self._last_emit = server_time
output.put_nowait(snapshot_msg)
self.logger().debug(f"Saved order book snapshot for {trading_pair}")
# Be careful not to go above API rate limits.
await asyncio.sleep(5.0)
except asyncio.CancelledError:
raise
except Exception:
self.logger().network(
f"Unexpected error with WebSocket connection.",
exc_info=True,
app_warning_msg=f"Unexpected error with WebSocket connection. Retrying in 5 seconds. "
f"Check network connection."
)
await asyncio.sleep(5.0)
this_hour: pd.Timestamp = pd.Timestamp.utcnow().replace(minute=0, second=0, microsecond=0)
next_hour: pd.Timestamp = this_hour + pd.Timedelta(hours=1)
delta: float = next_hour.timestamp() - time.time()
await asyncio.sleep(delta)
except asyncio.CancelledError:
raise
except Exception:
self.logger().error("Unexpected error.", exc_info=True)
await asyncio.sleep(5.0)
ax.set_xticks(data.index)
ax.xaxis.set_major_formatter(DateFormatter("%b %Y"))
fig.autofmt_xdate(rotation=90, ha='center')
for month, coverage in zip(coverage.index, coverage.values):
ax2.imshow(np.array([[mpl.colors.to_rgb(COLOR_PALETTE.primary)],
[mpl.colors.to_rgb(COLOR_PALETTE.primary_80)]]),
interpolation='gaussian', extent=(mdates.date2num(month - pd.Timedelta('10days')),
mdates.date2num(month + pd.Timedelta('10days')),
0, coverage), aspect='auto', zorder=1)
ax2.bar(mdates.date2num(month), coverage, edgecolor=COLOR_PALETTE.secondary, linewidth=0.3,
fill=False, zorder=0)
ax2.set_ylim(0, 1)
ax.set_ylim(bottom=0)
ax.set_xlim(data.index[0] - pd.Timedelta('20days'), data.index[-1] + pd.Timedelta('20days'))
ax.xaxis.set_tick_params(rotation=90)
ax.set_zorder(3)
ax2.yaxis.grid(True)
ax2.set_axisbelow(True)
ax.patch.set_visible(False)
ax2.set_ylabel('Coverage [-]')
ax2.yaxis.tick_right()
ax2.yaxis.set_label_position("right")
plt.close()
return ax2.get_figure()
plt.close()
return ax.get_figure()
today_smsstatus_obj = SmsStatus.objects.filter(price_info_incoming=today_caller_object)
no_sms_sent = today_smsstatus_obj.count()
no_sms_dilivered = today_smsstatus_obj.filter(status='D').count()
# Queries with rates available
today_rates_available_count = today_caller_object.filter(is_rate_available=0).count()
# values to be calculated
no_call_backs_time_limit = ''
no_first_attempt_success = today_caller_object.filter(info_status=1, prev_query_code__isnull=False).count()
# timedelay for delivered messages
df = pd.DataFrame(list(SmsStatus.objects.filter(price_info_incoming=today_caller_object).values('id','price_info_incoming','status', 'delivery_time', 'api_call_initiation_time')))
df['time_delay'] = df.groupby('price_info_incoming')['api_call_initiation_time', 'delivery_time'].diff(axis='columns')['delivery_time']
df_max = df.groupby('price_info_incoming')['time_delay'].max()
time_delay = pd.Timedelta(seconds=delay)
no_sms_diliver_time_limit = df_max.loc[lambda x : x > time_delay].count()
self.send_mail(email_subject=email_subject, start_date=start_date, period_label=period_label_str, no_incoming_sms=no_incoming_sms, no_sms_users=no_sms_users,
per_correct_code_entered_sms=per_correct_code_entered_sms, no_incoming_call=no_incoming_call, per_correct_code_entered_call=per_correct_code_entered_call, \
no_call_backs_time_limit=no_call_backs_time_limit, no_first_attempt_success=no_first_attempt_success, today_caller_object_sms_count=today_caller_object_sms_count, no_sms_sent=no_sms_sent, \
no_sms_dilivered=no_sms_dilivered, no_sms_diliver_time_limit=no_sms_diliver_time_limit, total_correct_code_entered_sms=per_correct_code_entered_sms, today_rates_available_count=today_rates_available_count, delay=delay )
def addVerticalBarrier(tEvents, close, numDays=1):
t1=close.index.searchsorted(tEvents+pd.Timedelta(days=numDays))
t1=t1[t1
def evaluate_fetch_interval(self, end_timestamp):
if not self.current_timestamp:
self.current_timestamp = end_timestamp
return None, None
time_delta = end_timestamp - self.current_timestamp
if time_delta.total_seconds() >= self.trading_level.to_second():
return self.current_timestamp + pd.Timedelta(seconds=self.trading_level.to_second()), end_timestamp
return None, None
def shift_time(start_time, mins) -> str:
"""
Shift start time by mins
Args:
start_time: start time in terms of HH:MM string
mins: number of minutes (+ / -)
Returns:
end time in terms of HH:MM string
"""
s_time = pd.Timestamp(start_time)
e_time = s_time + np.sign(mins) * pd.Timedelta(f'00:{abs(mins)}:00')
return e_time.strftime('%H:%M')
def change_page():
if pageNumber < 10:
begin = alarms.alarms[alarmNumber] - pd.Timedelta('315 seconds') + pageNumber*pd.Timedelta('60 seconds')
end = begin + pd.Timedelta('60 seconds')
# BEST PRACTICE --- update .data in one step with a new dict (according to Bokeh site/docs).
# Create new dictionaries which will hold new "step" of data.
newPpgData = dict()
newPpgData2 = dict()
newQosData = dict()
newQosData2 = dict()
newEkgData = dict()
newHrData = dict()
newSpo2Data = dict()
newNibpSysData = dict()
newNibpMeanData = dict()
newNibpDiaData = dict()
newEkgData['x'] = np.hstack(data.ecg[begin:end].index.to_series().apply(expand_ecg_times))
df = pd.DataFrame({'forecast': fx,
'observation': obs,
'reference': ref_fx})
# get normalization factor
normalization = processed_fx_obs.normalization_factor
# get uncertainty.
deadband = processed_fx_obs.uncertainty
cost_params = processed_fx_obs.cost
# Force `groupby` to be consistent with `interval_label`, i.e., if
# `interval_label == ending`, then the last interval should be in the bin
if processed_fx_obs.interval_label == "ending":
df.index -= pd.Timedelta("1ns")
metric_vals = []
# Calculate metrics
for category in set(categories):
# total (special category)
if category == 'total':
index_category = lambda x: 0 # NOQA: E731
else:
index_category = getattr(df.index, category)
# Calculate each metric
for metric_ in metrics:
# Group by category
for cat, group in df.groupby(index_category):
def getWakeFrequency(self):
return pd.Timedelta(self.wake_up_freq)