So I have a Bokeh plot, server and a periodic callback function. The periodic callback function was working just fine yesterday, but today I am all of a sudden getting the error:
TypeError: 'NoneType' object is not callable
every time the callback is called. In order to initially populate some of the data on the plot, I call the callback function 10 times. The callback function works just fine when it is called while initializing the plot, it just breaks during the actual callback after it is loaded in the browser.
First, is there any reason this might be happening? I haven’t changed anything in my code since yesterday when it was working. Second, how can I troubleshoot this error? I have tried putting print statements into the code, but none of the print statements execute, it just throws the error. Additionally, the error displayed doesn’t show the actual line of code that is not working, which makes troubleshooting very difficult. How can I get around this? Is there a way to step through and debug the bokeh callback while it is actually running on the bokeh server? The callback works fine in all other cases.
Here is the full error thrown:
Error thrown from periodic callback:
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/tornado/gen.py", line 526, in callback
result_list.append(f.result())
File "/usr/local/lib/python3.6/site-packages/bokeh/server/session.py", line 67, in
_needs_document_lock_wrapper
result = func(self, *args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/bokeh/server/session.py", line 195, in
with_document_locked
return func(*args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/bokeh/document/document.py", line 1212,
in wrapper
return doc._with_self_as_curdoc(invoke)
File "/usr/local/lib/python3.6/site-packages/bokeh/document/document.py", line 1198,
in _with_self_as_curdoc
return f()
File "/usr/local/lib/python3.6/site-packages/bokeh/document/document.py", line 1211,
in invoke
return f(*args, **kwargs)
TypeError: 'NoneType' object is not callable
Here is the actual code from the callback function. It updates two plots and two data tables with ship positions and a scrolling spectrogram.
rollover = 185127
def update_spectrogram():
# try to grab the next segment, if at the end of the wc object, then create a new one with new times
try:
segment = next(wc)
except StopIteration:
print("Stop iteration reached")
#Change this to query the latest time from the predictions database, then user that time to query the
#wavcrawler
# compare old time to new time, if no difference, then return, continue checking but don't update
# spectrogram data if no new data
app_db = os.environ.get('DATABASE_URL') #or 'sqlite:///../app.db'
app_engine = sqlalchemy.create_engine(app_db)
app_connection = app_engine.connect()
latest_time_query = "SELECT MAX(end_time) FROM PREDICTIONS"
t2 = app_connection.execute(latest_time_query)
t2 = int(t2.all()[0][0])
t1 = t2-1800
app_connection.close()
app_engine.dispose()
# if no new predictions, then continue without updating data
# check if the current segments first timestamp is greater than the t1 of the new timestamp from the predictions
# If it is not, then that means there are new predictions, so create a new wavcrawler
# If it is, there are no new predictions, and the plot will stay paused at current values
if int(spectrogram_source.data['Time'][0]) >= t1:
print("No new predictions available, waiting...")
return
print("Creating new Wavcrawler object for new predictions")
new_wc = WavCrawler(file,t1, t2, segment_length=8000, overlap=0.25)
segment = next(new_wc)
print("Creating spectrogram")
signal = segment.samples[0, :]
f, t, Sxx = spectrogram(signal, sample_rate)
i=0
df_length = f.shape[0] * t.shape[0]
new_df_spectrogram = pd.DataFrame(np.nan, index=range(0,df_length), columns=['Frequency', 'Time', 'Sxx'])
for freq in range(f.shape[0]):
for time in range(t.shape[0]):
new_df_spectrogram.loc[i] = [f[freq],t[time],Sxx[freq][time]]
i = i+1
new_df_spectrogram['Time'] = new_df_spectrogram['Time'] + float(segment.time_stamp)
new_df_spectrogram['Time'] = pd.to_datetime(new_df_spectrogram['Time'], unit='s')
# Data to keep in frame, should be desired number of seconds * 8000 (sample rate)
spectrogram_source.stream(new_df_spectrogram, rollover=rollover)
new_df_spectrogram_time = new_df_spectrogram.iloc[[new_df_spectrogram['Time'].idxmax()]]
new_df_spectrogram_time = new_df_spectrogram_time.copy()
new_df_spectrogram_time.loc[:,'str_time'] = new_df_spectrogram_time.loc[:,'Time'].dt.strftime('%Y-%b-%d %H:%M')
text_source.data = ColumnDataSource.from_df(new_df_spectrogram_time)
t1 = int(segment.time_stamp)
app_db = os.environ.get('DATABASE_URL') #or 'sqlite:///../app.db'
app_engine = sqlalchemy.create_engine(app_db)
app_connection = app_engine.connect()
print(app_engine)
print(app_connection)
print("Creating model predictions")
#---------------------Update model predictions--------------------------------------
query = "SELECT * FROM PREDICTIONS WHERE START_TIME <= " + str(t1) + " AND END_TIME >=" + str(t1)
predictions = pd.read_sql_query(query, app_engine)
if not predictions.empty:
pred_json = json.loads(predictions['model_predictions'][0])
pred_df = pd.DataFrame(pred_json).T
pred_df.index = pred_df.index.rename("Model ID")
prediction_df = pred_df.sort_values(by=['Model ID'])
prediction_df = prediction_df.reset_index()
prediction_df['Model Name'] = prediction_df.apply(lambda x: get_model_info(x['Model ID'], 'model_name', app_connection),axis=1)
prediction_df['Model Type'] = prediction_df.apply(lambda x: get_model_info(x['Model ID'], 'model_type', app_connection),axis=1)
prediction_df['Channels'] = prediction_df.apply(lambda x: get_model_info(x['Model ID'], 'channels', app_connection),axis=1)
else:
prediction_df = pd.DataFrame(data={"Model ID":[None],"Model Name":[None],"Model Type":[None],"Channels":[None],\
"pred":[None],"pred_max_p":[None],"pred_vi_mean_max":[None],"entropy":[None],"nll":[None],"pred_std":[None],"var":[None],\
"norm_entropy":[None],"epistemic":[None],"aleatoric":[None]})
prediction_source.data = prediction_df
t1_ais = t1 - (60*60*1) #(60*60*24)
t2 = t1 + 1
radius = 40
query = 'SELECT * FROM AIS WHERE "timeOfFix" >= ' + str(t1_ais) + ' AND "timeOfFix" <= ' + str(t2) + \
' AND dist_from_sensor_km <=' + str(radius)
new_ship_pos = pd.read_sql_query(query, app_engine)
if new_ship_pos.empty:
print("No ship positions for this time, check AIS stream")
# Check if any ship exited, delete if they did
allowed_mmsis = new_ship_pos.groupby('mmsi').agg({'timeOfFix':'max'})
allowed_mmsis = allowed_mmsis.reset_index()
allowed_mmsis = allowed_mmsis[allowed_mmsis.apply(lambda x: check_range(x['timeOfFix'], t1, radius, x['mmsi'], app_connection), axis=1)]['mmsi']
new_ship_pos = new_ship_pos[new_ship_pos['mmsi'].isin(allowed_mmsis)]
new_ship_pos['timeOfFix'] = pd.to_datetime(new_ship_pos['timeOfFix'], unit='s')
source.data = ColumnDataSource.from_df(new_ship_pos)
# Update source for lines
new_ships_grouped_df = new_ship_pos.groupby('mmsi')
colors_list = []
class_list = []
for key, data in new_ships_grouped_df:
colors_list.append(color_dictionary[data['ship_class'].unique()[0]])
class_list.append(data['ship_class'].unique()[0])
new_ships_data = dict(
xs=[list(x[1]) for x in new_ships_grouped_df.merc_longitude],
ys=[list(y[1]) for y in new_ships_grouped_df.merc_latitude],
classes=class_list,
color=colors_list
)
idx = new_ship_pos.groupby(['mmsi'])['timeOfFix'].transform(max) == new_ship_pos['timeOfFix']
new_circle_ship_df = new_ship_pos[idx].copy()
new_circle_ship_df['color'] = new_circle_ship_df['ship_class'].map(color_dictionary)
line_source.data = new_ships_data
circle_source.data = ColumnDataSource.from_df(new_circle_ship_df)
# Update ship classes present
classes = []
for mmsi, group in new_ships_grouped_df:
classes.append(group['ship_class'].iloc[0])
pred_labels = {"AIS Labels":classes}
label_source.data = pred_labels
app_connection.close()
app_engine.dispose()
return
EDIT
Ok so now I know that my code is not causing this issue. I commented out all of the code in the callback so that the only thing getting executed is this:
def update_spectrogram():
print("Beginning")
print("End")
return
and I am still receiving this error. What would be the cause of this?