Reconfigured folder structure
Added Evaluation, Preprocessing, Training, Transformation folders. Preprocessing is just a rework of the folder for the new structure of the old preprocessing folder. Training and Transformation are the old project file broken up into two parts and restructured. Evaluation is for evaluating the predictive power of the model.
This commit is contained in:
@@ -1,532 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Project Notebook\n",
|
||||
"This is the full and complete notebook that takes in the data from NOAA and processes it into frames to be used in the PredNet architecture and produce a resulting prediction."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import os\n",
|
||||
"from tqdm import tqdm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Getting a list of files in raw data folder\n",
|
||||
"filenames = os.listdir('D:/Nico/Desktop/processed_data')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"header_wanted = [\n",
|
||||
" 'HOURLYVISIBILITY',\n",
|
||||
" 'HOURLYDRYBULBTEMPC',\n",
|
||||
" 'HOURLYWETBULBTEMPC',\n",
|
||||
" 'HOURLYDewPointTempC',\n",
|
||||
" 'HOURLYRelativeHumidity',\n",
|
||||
" 'HOURLYWindSpeed',\n",
|
||||
" 'HOURLYWindGustSpeed',\n",
|
||||
" 'HOURLYStationPressure',\n",
|
||||
" 'HOURLYPressureTendency',\n",
|
||||
" 'HOURLYPressureChange',\n",
|
||||
" 'HOURLYSeaLevelPressure',\n",
|
||||
" 'HOURLYPrecip',\n",
|
||||
" 'HOURLYAltimeterSetting']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"usecols = ['DATE','STATION'] + header_wanted"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Loading all files into a pandas Dataframe\n",
|
||||
"tqdm.pandas()\n",
|
||||
"df = pd.concat([pd.read_csv('D:/Nico/Desktop/processed_data/{}'.format(x), usecols=usecols, low_memory=False) for x in tqdm(filenames)])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"At this point all the data has been loaded into a single dataframe and any data changes have been made. The next step is to break the data up by WBAN and place in a 2D array at the appropriate grid cell. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"stations = pd.read_csv(\"../Playground/stations_unique.csv\", usecols = ['STATION_ID', 'LON_SCALED', 'LAT_SCALED'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"height = 20\n",
|
||||
"width = 40"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mask = [([0] * width) for i in range(height)]\n",
|
||||
"\n",
|
||||
"wban_loc = dict(zip(stations.STATION_ID,zip(stations.LON_SCALED,stations.LAT_SCALED)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"grid = [([pd.DataFrame()] * width) for i in range(height)]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for key, value in tqdm(wban_loc.items()):\n",
|
||||
" mask[value[1]][value[0]] = 1\n",
|
||||
" grid[value[1]][value[0]] = df.loc[df.STATION == key]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(mask)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#TODO Handle different sized data some stacks too short\n",
|
||||
"def create_frames(data,height, width, depth):\n",
|
||||
" days = []\n",
|
||||
" frames = []\n",
|
||||
" for i in tqdm(range(depth)):\n",
|
||||
" frame = np.zeros((height,width,12))\n",
|
||||
" for y in range(height):\n",
|
||||
" for x in range(width):\n",
|
||||
" if(not data[y][x].empty):\n",
|
||||
" frame[y][x] = data[y][x].iloc[[i],1:13].values.flatten()\n",
|
||||
" if((i+1)%24 != 0):\n",
|
||||
" frames.append(frame)\n",
|
||||
" else:\n",
|
||||
" frames.append(frame)\n",
|
||||
" days.append(frames)\n",
|
||||
" frames = []\n",
|
||||
" return days"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def average_grid_fill(mask,data, height, width):\n",
|
||||
" \n",
|
||||
" for i in range(height):\n",
|
||||
" for j in range(width):\n",
|
||||
" if(mask[i][j] != 1):\n",
|
||||
" neighbors = get_neighbors(j,i,data)\n",
|
||||
" data[i][j] = np.mean(neighbors)\n",
|
||||
" \n",
|
||||
" return data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def get_neighbors(x,y,g):\n",
|
||||
" neighbors = []\n",
|
||||
" for i in [y-1,y,y+1]:\n",
|
||||
" for j in [x-1,x,x+1]:\n",
|
||||
" if(i >= 0 and j >= 0):\n",
|
||||
" if(i != y or j != x ):\n",
|
||||
" try:\n",
|
||||
" neighbors.append(g[i][j])\n",
|
||||
" except:\n",
|
||||
" pass\n",
|
||||
" return neighbors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def store_sequence(frames):\n",
|
||||
" import hickle as hkl\n",
|
||||
" source_list = []\n",
|
||||
" \n",
|
||||
" for days in range(len(frames)):\n",
|
||||
" for day in range(len(frames[days])):\n",
|
||||
" source_list += '{}'.format(days)\n",
|
||||
" \n",
|
||||
" hkl.dump(frames, './data/train/x_train.hkl')\n",
|
||||
" hkl.dump(source_list, './data/train/x_sources.hkl')\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Splits is a dictionary holding train, test, val\n",
|
||||
"the values for train, test, and val are lists of tuples holding category and folder name\n",
|
||||
"in the end each image gets a source associated with it\n",
|
||||
"there is only one data and one source hickle dump for each of train test and val"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"frames = create_frames(grid, height, width,504)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#TODO use loop to average each frame\n",
|
||||
"for x in tqdm(range(len(frames))):\n",
|
||||
" for y in range(len(frames[0])):\n",
|
||||
" frames[x][y] = average_grid_fill(mask, frames[x][y], height, width )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"store_sequence(frames)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np_frames = np.array(frames)\n",
|
||||
"np_frames.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"store_sequence(np_frames)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"At this point I have processed the data and made it into discrete frames of data and it is time to run it through the PredNet architecture for training."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Using TensorFlow backend.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"np.random.seed(123)\n",
|
||||
"from six.moves import cPickle\n",
|
||||
"\n",
|
||||
"from keras import backend as K\n",
|
||||
"from keras.models import Model\n",
|
||||
"from keras.layers import Input, Dense, Flatten\n",
|
||||
"from keras.layers import LSTM\n",
|
||||
"from keras.layers import TimeDistributed\n",
|
||||
"from keras.callbacks import LearningRateScheduler, ModelCheckpoint\n",
|
||||
"from keras.optimizers import Adam\n",
|
||||
"\n",
|
||||
"from prednet import PredNet\n",
|
||||
"from data_utils import SequenceGenerator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"WEIGHTS_DIR = './weights/'\n",
|
||||
"DATA_DIR = './data/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"save_model = True # if weights will be saved\n",
|
||||
"weights_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_weights.hdf5') # where weights will be saved\n",
|
||||
"json_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_model.json')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Data files\n",
|
||||
"#TODO: Use the files from NOAA and process them into proper frames\n",
|
||||
"train_file = os.path.join(DATA_DIR,'train/', 'x_train.hkl')\n",
|
||||
"train_sources = os.path.join(DATA_DIR, 'train/', 'x_sources.hkl')\n",
|
||||
"#val_file = os.path.join(DATA_DIR, 'X_val.hkl')\n",
|
||||
"#val_sources = os.path.join(DATA_DIR, 'sources_val.hkl')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Training parameters\n",
|
||||
"nb_epoch = 1\n",
|
||||
"batch_size = 4\n",
|
||||
"samples_per_epoch = 500\n",
|
||||
"N_seq_val = 100 # number of sequences to use for validation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Model parameters\n",
|
||||
"n_channels, im_height, im_width = (12, 20, 40)\n",
|
||||
"input_shape = (n_channels, im_height, im_width) if K.image_data_format() == 'channels_first' else (im_height, im_width, n_channels)\n",
|
||||
"stack_sizes = (n_channels, 48, 96)\n",
|
||||
"R_stack_sizes = stack_sizes\n",
|
||||
"A_filt_sizes = (3, 3)\n",
|
||||
"Ahat_filt_sizes = (3, 3, 3)\n",
|
||||
"R_filt_sizes = (3, 3, 3)\n",
|
||||
"layer_loss_weights = np.array([1., 0., 0.]) # weighting for each layer in final loss; \"L_0\" model: [1, 0, 0, 0], \"L_all\": [1, 0.1, 0.1, 0.1]\n",
|
||||
"layer_loss_weights = np.expand_dims(layer_loss_weights, 1)\n",
|
||||
"nt = 24 # number of timesteps used for sequences in training\n",
|
||||
"time_loss_weights = 1./ (nt - 1) * np.ones((nt,1)) # equally weight all timesteps except the first\n",
|
||||
"time_loss_weights[0] = 0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"prednet = PredNet(stack_sizes, R_stack_sizes,\n",
|
||||
" A_filt_sizes, Ahat_filt_sizes, R_filt_sizes,\n",
|
||||
" output_mode='error', return_sequences=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inputs = Input(shape=(nt,) + input_shape)\n",
|
||||
"errors = prednet(inputs) # errors will be (batch_size, nt, nb_layers)\n",
|
||||
"errors_by_time = TimeDistributed(Dense(1, trainable=False), weights=[layer_loss_weights, np.zeros(1)], trainable=False)(errors) # calculate weighted error by layer\n",
|
||||
"errors_by_time = Flatten()(errors_by_time) # will be (batch_size, nt)\n",
|
||||
"final_errors = Dense(1, weights=[time_loss_weights, np.zeros(1)], trainable=False)(errors_by_time) # weight errors by time\n",
|
||||
"model = Model(inputs=inputs, outputs=final_errors)\n",
|
||||
"model.compile(loss='mean_absolute_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"_________________________________________________________________\n",
|
||||
"Layer (type) Output Shape Param # \n",
|
||||
"=================================================================\n",
|
||||
"input_1 (InputLayer) (None, 24, 20, 40, 12) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"pred_net_1 (PredNet) (None, 24, 3) 1645548 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"time_distributed_1 (TimeDist (None, 24, 1) 4 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"flatten_1 (Flatten) (None, 24) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"dense_2 (Dense) (None, 1) 25 \n",
|
||||
"=================================================================\n",
|
||||
"Total params: 1,645,577\n",
|
||||
"Trainable params: 1,645,548\n",
|
||||
"Non-trainable params: 29\n",
|
||||
"_________________________________________________________________\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"truth = []\n",
|
||||
"for i in range(20):\n",
|
||||
" truth.append(np.random.randint(255,size=(1)))\n",
|
||||
"output = np.array(truth)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_generator = SequenceGenerator(train_file, train_sources, nt, batch_size=batch_size, shuffle=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"lr_schedule = lambda epoch: 0.001 if epoch < 75 else 0.0001 # start with lr of 0.001 and then drop to 0.0001 after 75 epochs\n",
|
||||
"callbacks = [LearningRateScheduler(lr_schedule)]\n",
|
||||
"#history = model.fit(np_frames, output ,batch_size, nb_epoch, callbacks=callbacks)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Epoch 1/1\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"history = model.fit_generator(train_generator, samples_per_epoch / batch_size, nb_epoch, callbacks=callbacks)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
240
Project Final/Evaluation/Evalutation.ipynb
Normal file
240
Project Final/Evaluation/Evalutation.ipynb
Normal file
@@ -0,0 +1,240 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Using TensorFlow backend.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import numpy as np\n",
|
||||
"from six.moves import cPickle\n",
|
||||
"import matplotlib\n",
|
||||
"matplotlib.use('Agg')\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import matplotlib.gridspec as gridspec\n",
|
||||
"%matplotlib inline\n",
|
||||
"from keras import backend as K\n",
|
||||
"from keras.models import Model, model_from_json\n",
|
||||
"from keras.layers import Input, Dense, Flatten\n",
|
||||
"\n",
|
||||
"from prednet import PredNet\n",
|
||||
"from data_utils import SequenceGenerator\n",
|
||||
"\n",
|
||||
"from tqdm import tqdm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"n_plot = 40\n",
|
||||
"batch_size = 10\n",
|
||||
"nt = 24"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"WEIGHTS_DIR = '../Training/weights/'\n",
|
||||
"DATA_DIR = '../data/'\n",
|
||||
"RESULTS_SAVE_DIR = './weather_results/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"weights_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_weights.hdf5')\n",
|
||||
"json_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_model.json')\n",
|
||||
"test_file = os.path.join(DATA_DIR, 'x_test.hkl')\n",
|
||||
"test_sources = os.path.join(DATA_DIR, 'sources_test.hkl')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Load trained model\n",
|
||||
"f = open(json_file, 'r')\n",
|
||||
"json_string = f.read()\n",
|
||||
"f.close()\n",
|
||||
"train_model = model_from_json(json_string, custom_objects = {'PredNet': PredNet})\n",
|
||||
"train_model.load_weights(weights_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Create testing model (to output predictions)\n",
|
||||
"layer_config = train_model.layers[1].get_config()\n",
|
||||
"layer_config['output_mode'] = 'prediction'\n",
|
||||
"data_format = layer_config['data_format'] if 'data_format' in layer_config else layer_config['dim_ordering']\n",
|
||||
"test_prednet = PredNet(weights=train_model.layers[1].get_weights(), **layer_config)\n",
|
||||
"input_shape = list(train_model.layers[0].batch_input_shape[1:])\n",
|
||||
"input_shape[0] = nt\n",
|
||||
"inputs = Input(shape=tuple(input_shape))\n",
|
||||
"predictions = test_prednet(inputs)\n",
|
||||
"test_model = Model(inputs=inputs, outputs=predictions)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"test_generator = SequenceGenerator(test_file, test_sources, nt, sequence_start_mode='unique', data_format=data_format)\n",
|
||||
"X_test = test_generator.create_all()\n",
|
||||
"X_hat = test_model.predict(X_test, batch_size)\n",
|
||||
"if data_format == 'channels_first':\n",
|
||||
" X_test = np.transpose(X_test, (0, 1, 3, 4, 2))\n",
|
||||
" X_hat = np.transpose(X_hat, (0, 1, 3, 4, 2))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Compare MSE of PredNet predictions vs. using last frame. Write results to prediction_scores.txt\n",
|
||||
"mse_model = np.nanmean( (X_test[:, 1:] - X_hat[:, 1:])**2 ) # look at all timesteps except the first\n",
|
||||
"mse_prev = np.nanmean( (X_test[:, :-1] - X_test[:, 1:])**2 )\n",
|
||||
"if not os.path.exists(RESULTS_SAVE_DIR): os.mkdir(RESULTS_SAVE_DIR)\n",
|
||||
"f = open(RESULTS_SAVE_DIR + 'prediction_scores.txt', 'w')\n",
|
||||
"f.write(\"Model MSE: %f\\n\" % mse_model)\n",
|
||||
"f.write(\"Previous Frame MSE: %f\" % mse_prev)\n",
|
||||
"f.close()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Model MSE:\t 14.119876861572266\n",
|
||||
"Prev Frame MSE:\t 0.02834348939359188\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(\"Model MSE:\\t {}\\nPrev Frame MSE:\\t {}\".format(mse_model,mse_prev))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
" 63%|███████████████████████████████████████████████████▉ | 19/30 [09:38<05:35, 30.47s/it]"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Plot some predictions\n",
|
||||
"aspect_ratio = float(X_hat.shape[3]) / X_hat.shape[2]\n",
|
||||
"plt.figure(figsize = (nt, 7*2*aspect_ratio))\n",
|
||||
"gs = gridspec.GridSpec(2*7, nt)\n",
|
||||
"gs.update(wspace=0., hspace=0.2)\n",
|
||||
"plot_save_dir = os.path.join(RESULTS_SAVE_DIR, 'prediction_plots/')\n",
|
||||
"if not os.path.exists(plot_save_dir): os.mkdir(plot_save_dir)\n",
|
||||
"plot_idx = np.random.permutation(X_test.shape[0])[:n_plot]\n",
|
||||
"for i in tqdm(plot_idx):\n",
|
||||
" for t in range(nt):\n",
|
||||
" for c in range(7):\n",
|
||||
" plt.subplot(gs[t + c*2*nt])\n",
|
||||
" plt.imshow(X_test[i,t,:,:,c], interpolation='none')\n",
|
||||
" plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')\n",
|
||||
" if t==0: plt.ylabel('Actual', fontsize=10)\n",
|
||||
"\n",
|
||||
" plt.subplot(gs[t + (c*2+1)*nt])\n",
|
||||
" plt.imshow(X_hat[i,t,:,:,c], interpolation='none')\n",
|
||||
" plt.tick_params(axis='both', which='both', bottom='off', top='off', left='off', right='off', labelbottom='off', labelleft='off')\n",
|
||||
" if t==0: plt.ylabel('Predicted', fontsize=10)\n",
|
||||
"\n",
|
||||
" plt.savefig(plot_save_dir + 'plot_' + str(i) + '.png')\n",
|
||||
" plt.clf()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fig=plt.figure(figsize=(15,10))\n",
|
||||
"columns = 3\n",
|
||||
"rows = 4\n",
|
||||
"for i in range(1,columns+rows +1):\n",
|
||||
" fig.add_subplot(rows,columns,i)\n",
|
||||
" plt.imshow(X_test[0,0,:,:,i-1],X_hat[0,0,:,:,i-1])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"X_hat[0][0][0][0][2]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
@@ -45,7 +45,9 @@ class SequenceGenerator(Iterator):
|
||||
|
||||
def next(self):
|
||||
with self.lock:
|
||||
index_array, current_index, current_batch_size = next(self.index_generator)
|
||||
index_array = next(self.index_generator)
|
||||
current_index = index_array[0]
|
||||
current_batch_size = len(index_array)
|
||||
batch_x = np.zeros((current_batch_size, self.nt) + self.im_shape, np.float32)
|
||||
for i, idx in enumerate(index_array):
|
||||
idx = self.possible_starts[idx]
|
||||
2106
Project Final/Preprocessing/preprocess_data.ipynb
Normal file
2106
Project Final/Preprocessing/preprocess_data.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
407
Project Final/Preprocessing/stations_unique.csv
Normal file
407
Project Final/Preprocessing/stations_unique.csv
Normal file
@@ -0,0 +1,407 @@
|
||||
,index,STATION_ID,STATION,BEGIN_DATE,END_DATE,STATE,COUNTRY,LATITUDE,LONGITUDE,ELEVATION,LAT_SCALED,LON_SCALED,TUPLES
|
||||
0,0,WBAN:00184,"ABBEVILLE CHRIS CRUSTA MEMORIAL AIRPORT, LA US",2013-12-31,2018-08-18,Louisiana,United States,29.976000000000006,-92.084,15.2,16,22,"(22, 16)"
|
||||
2,3,WBAN:14929,"ABERDEEN REGIONAL AIRPORT, SD US",1964-06-30,2018-08-18,South Dakota,United States,45.4433,-98.413,395.3,3,18,"(18, 3)"
|
||||
4,5,WBAN:13962,"ABILENE REGIONAL AIRPORT, TX US",1946-07-31,2018-08-18,Texas,United States,32.4105,-99.6822,545.6,14,17,"(17, 14)"
|
||||
10,11,WBAN:94975,"AINSWORTH MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,42.57694,-100.00056,787.6,5,17,"(17, 5)"
|
||||
13,16,WBAN:14813,"AKRON FULTON INTERNATIONAL AIRPORT, OH US",1998-12-31,2018-08-18,Ohio,United States,41.0375,-81.46417,318.2,7,30,"(30, 7)"
|
||||
15,18,WBAN:53864,"ALABASTER SHELBY CO AIRPORT, AL US",2001-12-31,2018-08-18,Alabama,United States,33.178329999999995,-86.78166999999998,172.20000000000005,14,26,"(26, 14)"
|
||||
17,21,WBAN:23061,"ALAMOSA SAN LUIS VALLEY REGIONAL AIRPORT, CO US",1956-12-31,2018-08-18,Colorado,United States,37.4389,-105.8613,2296.1,10,13,"(13, 10)"
|
||||
22,26,WBAN:54921,"ALBION MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,41.73,-98.05444,548.3000000000002,6,18,"(18, 6)"
|
||||
26,30,WBAN:00258,"ALEXANDER MUNICIPAL AIRPORT, NM US",2014-07-30,2018-08-18,New Mexico,United States,34.645,-106.834,1583.1,12,12,"(12, 12)"
|
||||
33,38,WBAN:24044,"ALLIANCE MUNICIPAL AIRPORT ASOS, NE US",2005-12-31,2018-08-18,Nebraska,United States,42.05730000000001,-102.8017,1197.6,6,15,"(15, 6)"
|
||||
37,42,WBAN:03049,"ALPINE CASPARIS MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,30.38333,-103.68333,1375.6,16,14,"(14, 16)"
|
||||
40,45,WBAN:94299,"ALTURAS MUNICIPAL AIRPORT, CA US",2005-12-31,2018-08-18,California,United States,41.49139,-120.56444,1333.5,6,2,"(2, 6)"
|
||||
42,47,WBAN:53933,"ALVA REGIONAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,36.77306,-98.66972,449.3,10,18,"(18, 10)"
|
||||
43,49,WBAN:23047,"AMARILLO AIRPORT, TX US",1943-02-28,2018-08-18,Texas,United States,35.2295,-101.7042,1098.5,12,16,"(16, 12)"
|
||||
50,56,WBAN:63811,"ANDREWS MURPHY AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,35.195,-83.86528,516.9,12,28,"(28, 12)"
|
||||
51,57,WBAN:00137,"ANGEL FIRE AIRPORT, NM US",2013-12-31,2018-08-18,New Mexico,United States,36.422,-105.29,2554.2000000000007,11,13,"(13, 11)"
|
||||
58,64,WBAN:04864,"ANTIGO LANGLADE CO AIRPORT, WI US",2005-12-31,2018-08-18,Wisconsin,United States,45.15417,-89.11055999999998,463.9,3,24,"(24, 3)"
|
||||
59,65,WBAN:12832,"APALACHICOLA AIRPORT, FL US",1944-12-31,2018-08-18,Florida,United States,29.73333,-85.03332999999998,5.8,17,27,"(27, 17)"
|
||||
82,91,WBAN:93730,"ATLANTIC CITY INTERNATIONAL AIRPORT, NJ US",1946-12-31,2018-08-18,New Jersey,United States,39.452020000000005,-74.56698999999998,18.3,8,35,"(35, 8)"
|
||||
84,94,WBAN:53932,"ATOKA MUNICIPAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,34.39833,-96.14806,179.8,12,19,"(19, 12)"
|
||||
91,102,WBAN:14605,"AUGUSTA STATE AIRPORT, ME US",1972-12-31,2018-08-18,Maine,United States,44.3155,-69.7972,107.0,4,38,"(38, 4)"
|
||||
94,105,WBAN:94281,"AURORA STATE AIRPORT, OR US",2005-12-31,2018-08-18,Oregon,United States,45.24861,-122.76861000000001,59.7,3,1,"(1, 3)"
|
||||
102,114,WBAN:54817,"BAD AXE HURON CO MEMORIAL AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,43.78028,-82.98555999999998,233.5,4,29,"(29, 4)"
|
||||
104,116,WBAN:53138,"BAKER 5 W, NV US",2004-05-08,2018-08-17,Nevada,United States,39.0118,-114.209,2016.9,8,7,"(7, 8)"
|
||||
105,117,WBAN:24130,"BAKER CITY AIRPORT, OR US",1956-12-31,2018-08-18,Oregon,United States,44.8428,-117.8086,1024.4,3,4,"(4, 3)"
|
||||
107,119,WBAN:23155,"BAKERSFIELD AIRPORT, CA US",1941-09-30,2018-08-18,California,United States,35.43440000000001,-119.0542,149.0,12,3,"(3, 12)"
|
||||
111,124,WBAN:14606,"BANGOR INTERNATIONAL AIRPORT, ME US",1956-12-31,2018-08-18,Maine,United States,44.7978,-68.8185,45.1,3,39,"(39, 3)"
|
||||
112,125,WBAN:14616,"BAR HARBOR AIRPORT, ME US",2005-12-31,2018-08-18,Maine,United States,44.45,-68.36667,26.8,4,39,"(39, 4)"
|
||||
113,126,WBAN:54833,"BARABOO WISCONSIN DELLS AIRPORT, WI US",2005-12-31,2018-08-18,Wisconsin,United States,43.52194,-89.77360999999998,297.5,5,24,"(24, 5)"
|
||||
124,137,WBAN:24119,"BATTTLE MOUNTAIN 4 SE, NV US",1972-12-31,2018-08-18,Nevada,United States,40.6118,-116.8917,1373.1,7,5,"(5, 7)"
|
||||
129,142,WBAN:00282,"BEACH AIRPORT, ND US",2014-08-18,2018-08-18,North Dakota,United States,46.925,-103.98200000000001,840.0,2,14,"(14, 2)"
|
||||
131,144,WBAN:94947,"BEATRICE MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,40.301390000000005,-96.75389,403.6,7,19,"(19, 7)"
|
||||
140,155,WBAN:00127,"BEEVILLE MUNICIPAL AIRPORT, TX US",2013-12-31,2018-08-18,Texas,United States,28.35,-97.71700000000001,82.3,18,18,"(18, 18)"
|
||||
148,163,WBAN:00224,"BEND MUNICIPAL AIRPORT, OR US",2012-12-31,2018-08-18,Oregon,United States,44.095,-121.2,1055.2,4,2,"(2, 4)"
|
||||
149,165,WBAN:54781,"BENNINGTON MORSE STATE AIRPORT, VT US",2005-12-31,2018-08-18,Vermont,United States,42.89139,-73.24694000000001,251.8,5,36,"(36, 5)"
|
||||
163,179,WBAN:03044,"BIG SPRING MCMAHON WRINKLE AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,32.2125,-101.52139,784.3000000000002,14,16,"(16, 14)"
|
||||
164,180,WBAN:24033,"BILLINGS INTERNATIONAL AIRPORT, MT US",1935-04-30,2018-08-18,Montana,United States,45.8069,-108.5422,1091.5,3,11,"(11, 3)"
|
||||
166,182,WBAN:04725,"BINGHAMTON GREATER AP, NY US",1956-12-31,2018-08-18,New York,United States,42.2068,-75.98,486.2,6,34,"(34, 6)"
|
||||
168,185,WBAN:23157,"BISHOP AIRPORT, CA US",1943-01-15,2018-08-18,California,United States,37.3711,-118.35799999999999,1250.3,10,4,"(4, 10)"
|
||||
169,186,WBAN:24011,"BISMARCK MUNICIPAL AIRPORT, ND US",1936-06-30,2018-08-18,North Dakota,United States,46.7825,-100.7572,503.2,2,16,"(16, 2)"
|
||||
171,188,WBAN:00286,"BLACK RIVER FALLS AREA AIRPORT, WI US",2013-12-31,2018-08-17,Wisconsin,United States,44.25100000000001,-90.855,255.1,4,23,"(23, 4)"
|
||||
172,189,WBAN:53881,"BLACKSBURG VIRGINIA TECH AIRPORT, VA US",2005-12-31,2018-08-18,Virginia,United States,37.2075,-80.40778,649.8000000000002,10,31,"(31, 10)"
|
||||
175,195,WBAN:03036,"BLANDING MUNICIPAL AIRPORT, UT US",2014-07-23,2018-08-18,Utah,United States,37.58278,-109.48306000000001,1787.7,10,10,"(10, 10)"
|
||||
176,196,WBAN:94793,"BLOCK ISLAND STATE AIRPORT, RI US",1989-12-31,2018-08-18,Rhode Island,United States,41.16806,-71.57777999999998,32.0,7,37,"(37, 7)"
|
||||
179,199,WBAN:23225,"BLUE CANYON AIRPORT, CA US",1956-12-31,2018-08-18,California,United States,39.2774,-120.7102,1608.1,8,2,"(2, 8)"
|
||||
181,201,WBAN:03859,"BLUEFIELD MERCER CO AIRPORT, WV US",1999-12-31,2018-08-18,West Virginia,United States,37.2978,-81.20366,870.5,10,30,"(30, 10)"
|
||||
182,203,WBAN:23158,"BLYTHE ASOS, CA US",1942-06-12,2018-08-18,California,United States,33.6186,-114.7142,120.4,13,6,"(6, 13)"
|
||||
185,207,WBAN:00263,"BOERNE STAGE FIELD AIRPORT, TX US",2014-07-30,2018-08-18,Texas,United States,29.724,-98.695,422.1,17,18,"(18, 17)"
|
||||
186,208,WBAN:24131,"BOISE AIR TERMINAL, ID US",1930-12-31,2018-08-18,Idaho,United States,43.5666,-116.2405,857.7,5,5,"(5, 5)"
|
||||
187,209,WBAN:63871,"BONIFAY TRI CO AIRPORT, FL US",2006-08-31,2018-08-18,Florida,United States,30.84583,-85.60139000000001,25.9,16,27,"(27, 16)"
|
||||
193,215,WBAN:14739,"BOSTON, MA US",1943-11-20,2018-08-18,Massachusetts,United States,42.36060000000001,-71.00970000000002,3.7,6,37,"(37, 6)"
|
||||
196,219,WBAN:00310,"BOUNDARY CO AIRPORT, ID US",2013-12-31,2018-08-18,Idaho,United States,48.726000000000006,-116.295,711.1,0,5,"(5, 0)"
|
||||
198,223,WBAN:93808,"BOWLING GREEN WARREN CO AIRPORT, KY US",1972-12-31,2018-08-18,Kentucky,United States,36.9647,-86.4238,160.9,10,26,"(26, 10)"
|
||||
199,224,WBAN:00353,"BOYSEN THERMOPOL, WY US",2012-12-31,2018-08-18,Wyoming,United States,43.467,-108.389,2225.0,5,11,"(11, 5)"
|
||||
200,225,WBAN:24132,"BOZEMAN GALLATIN FIELD AIRPORT, MT US",1972-12-31,2018-08-18,Montana,United States,45.788,-111.1608,1349.3,3,9,"(9, 3)"
|
||||
205,230,WBAN:00451,"BRANSON WEST MUNICIPAL EMERSON FIELD AIRPORT, MO US",2014-07-30,2018-08-18,Missouri,United States,36.6985,-93.4022,411.2,10,21,"(21, 10)"
|
||||
207,232,WBAN:00435,BRAZOS 451 OILP,2013-07-16,2018-08-18,,,28.5,-95.716,34.1,18,20,"(20, 18)"
|
||||
211,237,WBAN:00433,"BRIDGEPORT SONORA JUNCTION, CA US",2013-01-09,2018-08-18,California,United States,38.3557,-119.51899999999999,2057.1,9,3,"(3, 9)"
|
||||
213,239,WBAN:24180,"BRIGHAM CITY AIRPORT, UT US",2014-07-23,2018-08-18,Utah,United States,41.55222,-112.06222,1288.1,6,8,"(8, 6)"
|
||||
215,241,WBAN:94946,"BROKEN BOW MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,41.43333,-99.63333,771.1,6,17,"(17, 6)"
|
||||
218,245,WBAN:94902,"BROOKINGS, SD US",2005-12-31,2018-08-18,South Dakota,United States,44.3,-96.8,502.3,4,19,"(19, 4)"
|
||||
222,250,WBAN:03721,"BRUNSWICK CO AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,33.92917,-78.07472,7.6,13,32,"(32, 13)"
|
||||
225,254,WBAN:23159,"BRYCE CANYON AIRPORT, UT US",1956-12-31,2018-08-18,Utah,United States,37.70639,-112.14556,2312.2000000000007,10,8,"(8, 10)"
|
||||
228,259,WBAN:94037,"BUFFALO ASOS, SD US",1998-12-31,2018-08-18,South Dakota,United States,45.604440000000004,-103.54639,915.6,3,14,"(14, 3)"
|
||||
229,260,WBAN:94054,"BUFFALO JOHNSON CO AIRPORT, WY US",2005-12-31,2018-08-18,Wyoming,United States,44.38139,-106.72111000000001,1513.9,4,12,"(12, 4)"
|
||||
232,263,WBAN:03068,"BULLSEYE AUXILIARY AIRFIELD USAFA, CO US",2006-04-30,2018-08-18,Colorado,United States,38.76667,-104.3,1837.9,9,14,"(14, 9)"
|
||||
238,269,WBAN:04866,"BURLINGTON MUNICIPAL AIRPORT, WI US",2005-12-31,2018-08-18,Wisconsin,United States,42.69,-88.30360999999998,237.4,5,25,"(25, 5)"
|
||||
239,270,WBAN:94282,"BURLINGTON SKAGIT REGIONAL BAYVIEW AIRPORT, WA US",2005-12-31,2018-08-18,Washington,United States,48.46667,-122.41667,42.7,0,1,"(1, 0)"
|
||||
240,271,WBAN:14742,"BURLINGTON WEATHER SERVICE OFFICE AIRPORT, VT US",1947-12-31,2018-08-18,Vermont,United States,44.4683,-73.1499,100.6,4,36,"(36, 4)"
|
||||
246,277,WBAN:14817,"CADILLAC WEXFORD CO AIRPORT, MI US",1990-07-23,2018-08-18,Michigan,United States,44.28333,-85.41667,397.8,4,27,"(27, 4)"
|
||||
250,281,WBAN:54743,"CALDWELL ESSEX CO AIRPORT, NJ US",2005-12-31,2018-08-18,New Jersey,United States,40.87639,-74.28305999999998,52.7,7,35,"(35, 7)"
|
||||
251,282,WBAN:94195,"CALDWELL INDUSTRIAL AIRPORT, ID US",2005-12-31,2018-08-18,Idaho,United States,43.65,-116.63333,740.4,4,5,"(5, 4)"
|
||||
254,285,WBAN:12986,"CALHOUN CO AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,28.65417,-96.68139000000001,9.1,17,19,"(19, 17)"
|
||||
255,287,WBAN:23136,"CAMARILLO AIRPORT, CA US",1956-12-31,2018-08-18,California,United States,34.21667,-119.08333,23.5,13,3,"(3, 13)"
|
||||
265,300,WBAN:54923,"CANBY MYERS FIELD AIRPORT, MN US",2006-10-17,2018-08-18,Minnesota,United States,44.729440000000004,-96.26611,363.6,3,19,"(19, 3)"
|
||||
266,301,WBAN:00285,"CANDO MUNICIPAL AIRPORT, ND US",2013-12-31,2018-08-18,North Dakota,United States,48.48,-99.236,450.2,0,17,"(17, 0)"
|
||||
269,305,WBAN:93729,"CAPE HATTERAS BILLY MITCHELL FIELD, NC US",1957-02-28,2018-08-18,North Carolina,United States,35.23260000000001,-75.6219,3.4,12,34,"(34, 12)"
|
||||
270,306,WBAN:93810,"CARBONDALE SOUTHERN ILLINOIS AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,37.779720000000005,-89.24972,123.7,10,24,"(24, 10)"
|
||||
273,309,WBAN:03177,"CARLSBAD MCCLELLAN PALOMAR AIRPORT, CA US",2005-12-31,2018-08-18,California,United States,33.12806,-117.27944,100.0,14,4,"(4, 14)"
|
||||
280,317,WBAN:03914,"CASA GRANDE MUNICIPAL AIRPORT, AZ US",2005-12-31,2018-08-18,Arizona,United States,32.95,-111.76666999999999,445.6,14,8,"(8, 14)"
|
||||
281,319,WBAN:24089,"CASPER NATRONA CO AIRPORT, WY US",1939-12-31,2018-08-18,Wyoming,United States,42.89779,-106.47371000000001,1620.9,5,12,"(12, 5)"
|
||||
284,322,WBAN:00465,"CATTARAUGUS CO OLEAN AIRPORT, NY US",2012-12-31,2018-08-18,New York,United States,42.24122,-78.37136,651.1,6,32,"(32, 6)"
|
||||
285,323,WBAN:00283,"CAVALIER MUNICIPAL AIRPORT, ND US",2013-12-31,2018-08-18,North Dakota,United States,48.784,-97.632,272.2,0,18,"(18, 0)"
|
||||
290,330,WBAN:53887,"CENTRALIA MUNICIPAL AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,38.514720000000004,-89.09194000000001,162.8,9,24,"(24, 9)"
|
||||
291,332,WBAN:24017,"CHADRON MUNICIPAL AIRPORT, NE US",1972-12-31,2018-08-18,Nebraska,United States,42.8374,-103.0981,1004.0,5,15,"(15, 5)"
|
||||
292,333,WBAN:04114,"CHALLIS AIRPORT, ID US",2001-08-31,2018-08-18,Idaho,United States,44.52278,-114.215,1534.1,4,7,"(7, 4)"
|
||||
293,335,WBAN:94943,"CHAMBERLAIN MUNICIPAL AIRPORT, SD US",2005-12-31,2018-08-18,South Dakota,United States,43.76667,-99.31833,519.1,4,17,"(17, 4)"
|
||||
300,344,WBAN:13880,"CHARLESTON INTL. AIRPORT, SC US",1944-12-31,2018-08-18,South Carolina,United States,32.899429999999995,-80.04075,12.2,14,31,"(31, 14)"
|
||||
301,345,WBAN:13866,"CHARLESTON YEAGER AIRPORT, WV US",1956-12-31,2018-08-18,West Virginia,United States,38.3794,-81.59,277.40000000000003,9,30,"(30, 9)"
|
||||
317,364,WBAN:00143,"CHEYENNE CO MUNICIPAL AIRPORT, KS US",2012-12-31,2018-08-18,Kansas,United States,39.766999999999996,-101.8,1040.3,8,15,"(15, 8)"
|
||||
327,374,WBAN:93203,"CHICO ARMY FLYING SCHOOL, CA US",2005-12-31,2018-08-18,California,United States,39.8,-121.85,82.9,8,1,"(1, 8)"
|
||||
330,377,WBAN:13301,"CHILLICOTHE 22 ENE, MO US",2005-06-10,2018-08-16,Missouri,United States,39.86680000000001,-93.147,253.9,8,22,"(22, 8)"
|
||||
331,378,WBAN:53916,"CHILLICOTHE AGRI SCIENCE CENTER, MO US",2005-12-31,2018-08-18,Missouri,United States,39.82333,-93.57917,234.4,8,21,"(21, 8)"
|
||||
332,379,WBAN:93104,"CHINA LAKE NAF, CA US",1956-12-31,2018-08-18,California,United States,35.6875,-117.6931,679.7,11,4,"(4, 11)"
|
||||
345,395,WBAN:94605,"CLAYTON LAKE RAMOS, ME US",2005-12-31,2018-08-18,Maine,United States,46.61667,-69.53332999999999,304.8,2,38,"(38, 2)"
|
||||
346,396,WBAN:23051,"CLAYTON MUNICIPAL AIR PARK, NM US",1956-12-31,2018-08-18,New Mexico,United States,36.4486,-103.1539,1511.8,11,14,"(14, 11)"
|
||||
348,398,WBAN:92828,"CLEARWATER AIR PARK, FL US",2018-06-30,2018-08-18,Florida,United States,27.977214,-82.759057,21.6,18,29,"(29, 18)"
|
||||
354,404,WBAN:14820,"CLEVELAND HOPKINS INTERNATIONAL AIRPORT, OH US",1956-12-31,2018-08-18,Ohio,United States,41.4057,-81.852,238.0,6,30,"(30, 6)"
|
||||
357,407,WBAN:03027,"CLINES CORNERS, NM US",2005-12-31,2018-08-18,New Mexico,United States,35.00278,-105.66278,2159.8,12,13,"(13, 12)"
|
||||
358,408,WBAN:00222,"CLINTON MEMORIAL AIRPORT, MO US",2013-12-31,2018-08-18,Missouri,United States,38.35,-93.68299999999999,251.2,9,21,"(21, 9)"
|
||||
365,417,WBAN:23008,"CLOVIS CANNON AFB, NM US",1943-01-24,2018-08-18,New Mexico,United States,34.38333,-103.31667,1309.1,13,14,"(14, 13)"
|
||||
368,420,WBAN:12867,"COCOA BEACH PATRICK AFB, FL US",1945-02-28,2018-08-18,Florida,United States,28.23333,-80.60000000000002,2.4,18,30,"(30, 18)"
|
||||
369,421,WBAN:24045,"CODY MUNICIPAL AIRPORT, WY US",2005-12-31,2018-08-18,Wyoming,United States,44.51667,-109.01666999999999,1552.0,4,10,"(10, 4)"
|
||||
370,422,WBAN:24136,"COEUR D ALENE AIR TERMINAL, ID US",2005-12-31,2018-08-18,Idaho,United States,47.76667,-116.81667,703.2,1,5,"(5, 1)"
|
||||
374,426,WBAN:00276,"COLEMAN MUNICIPAL AIRPORT, TX US",2012-12-31,2018-08-18,Texas,United States,31.840999999999998,-99.404,517.2,15,17,"(17, 15)"
|
||||
377,429,WBAN:53129,"COLORADO CITY MUNICIPAL AIRPORT, AZ US",2014-02-26,2018-08-18,Arizona,United States,36.959720000000004,-113.01388999999999,1485.6,10,7,"(7, 10)"
|
||||
380,433,WBAN:00206,"COLUMBIA AIRPORT, CA US",2013-12-31,2018-08-18,California,United States,38.033,-120.417,646.2,9,2,"(2, 9)"
|
||||
383,436,WBAN:03945,"COLUMBIA REGIONAL AIRPORT, MO US",1969-10-31,2018-08-18,Missouri,United States,38.8169,-92.2183,272.2,9,22,"(22, 9)"
|
||||
385,438,WBAN:13803,"COLUMBUS BAKALAR MUNICIPAL AIRPORT, IN US",1956-12-31,2018-08-18,Indiana,United States,39.26667,-85.9,199.9,8,27,"(27, 8)"
|
||||
389,442,WBAN:13812,"COLUMBUS RICKENBACKER, OH US",1942-07-31,2018-08-18,Ohio,United States,39.81667,-82.93333,226.8,8,29,"(29, 8)"
|
||||
395,448,WBAN:13984,"CONCORDIA ASOS, KS US",1962-05-31,2018-08-18,Kansas,United States,39.5514,-97.6508,447.8,8,18,"(18, 8)"
|
||||
397,451,WBAN:94057,"CONVERSE CO AIRPORT ASOS, WY US",2005-12-31,2018-08-18,Wyoming,United States,42.79611,-105.38028,1504.5,5,13,"(13, 5)"
|
||||
400,455,WBAN:00327,"COOPERSTOWN MUNICIPAL AIRPORT, ND US",2015-09-25,2018-08-18,North Dakota,United States,47.423,-98.106,434.0,1,18,"(18, 1)"
|
||||
401,456,WBAN:04141,"COOS BAY 8 SW, OR US",2008-08-18,2018-08-16,Oregon,United States,43.2718,-124.3186,3.7,5,0,"(0, 5)"
|
||||
403,458,WBAN:00234,"CORNING MUNICIPAL AIRPORT, AR US",2014-07-31,2018-08-18,Arkansas,United States,36.4,-90.65,89.0,11,23,"(23, 11)"
|
||||
412,468,WBAN:04908,"COUNCIL BLUFFS MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,41.259440000000005,-95.75972,381.9,7,20,"(20, 7)"
|
||||
414,475,WBAN:94977,"CRANE LAKE, MN US",2005-12-31,2018-08-18,Minnesota,United States,48.26667,-92.48333000000001,341.1,0,22,"(22, 0)"
|
||||
415,476,WBAN:24286,"CRESCENT CITY MCNAMARA AIRPORT, CA US",1972-12-31,2018-08-18,California,United States,41.78028,-124.23666999999999,17.1,6,0,"(0, 6)"
|
||||
416,477,WBAN:04915,"CRESTON MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,41.007220000000004,-94.36306,394.4,7,21,"(21, 7)"
|
||||
419,480,WBAN:03073,"CROCKETT HOUSTON CO AIRPORT, TX US",2013-12-31,2018-08-18,Texas,United States,31.30694,-95.40389,106.1,15,20,"(20, 15)"
|
||||
421,482,WBAN:00287,"CROSBY MUNICIPAL AIRPORT, ND US",2012-12-31,2018-08-18,North Dakota,United States,48.928999999999995,-103.297,594.1,0,14,"(14, 0)"
|
||||
423,485,WBAN:03847,"CROSSVILLE MEMORIAL AIRPORT, TN US",1972-12-31,2018-08-18,Tennessee,United States,35.95090000000001,-85.0813,569.1,11,27,"(27, 11)"
|
||||
425,489,WBAN:63839,"CULLMAN FOLSOM FIELD AIRPORT, AL US",2005-12-31,2018-08-17,Alabama,United States,34.26889,-86.85833000000001,293.5,13,26,"(26, 13)"
|
||||
427,491,WBAN:93798,"CULPEPER REGIONAL AIRPORT, VA US",2005-12-31,2018-08-18,Virginia,United States,38.52667,-77.85861,96.3,9,32,"(32, 9)"
|
||||
428,492,WBAN:00316,"CUMBERLAND MUNICIPAL AIRPORT, WI US",2013-12-31,2018-08-18,Wisconsin,United States,45.506,-91.981,378.3,3,22,"(22, 3)"
|
||||
432,496,WBAN:94032,"CUSTER CO AIRPORT, SD US",2005-12-31,2018-08-18,South Dakota,United States,43.73306,-103.61139,1690.1,4,14,"(14, 4)"
|
||||
433,497,WBAN:24137,"CUT BANK AIRPORT, MT US",1942-11-30,2018-08-18,Montana,United States,48.6033,-112.3752,1169.8,0,8,"(8, 0)"
|
||||
434,498,WBAN:23161,"DAGGETT AIRPORT, CA US",1948-12-31,2018-08-18,California,United States,34.8536,-116.7858,584.3000000000002,12,5,"(5, 12)"
|
||||
441,506,WBAN:24219,"DALLESPORT AIRPORT, WA US",1956-12-31,2018-08-18,Oregon,United States,45.6194,-121.1661,71.60000000000002,3,2,"(2, 3)"
|
||||
443,508,WBAN:54734,"DANBURY MUNICIPAL AIRPORT, CT US",2005-12-31,2018-08-18,Connecticut,United States,41.371390000000005,-73.48277999999998,139.3,6,35,"(35, 6)"
|
||||
444,509,WBAN:94704,"DANSVILLE MUNICIPAL AIRPORT, NY US",1972-12-31,2018-08-18,New York,United States,42.57083,-77.71333,208.8,5,32,"(32, 5)"
|
||||
449,514,WBAN:04223,"DARRINGTON 21 NNE, WA US",2003-04-02,2018-08-16,Washington,United States,48.5405,-121.446,124.1,0,2,"(2, 0)"
|
||||
452,517,WBAN:23109,"DAVIS MONTHAN AFB, AZ US",1941-07-16,2018-08-18,Arizona,United States,32.16667,-110.88333,824.2,14,9,"(9, 14)"
|
||||
459,524,WBAN:04871,"DE KALB TAYLOR MUNICIPAL AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,41.931670000000004,-88.70805999999997,278.90000000000003,6,25,"(25, 6)"
|
||||
460,525,WBAN:53925,"DE QUEEN SEVIER CO AIRPORT, AR US",2003-12-31,2018-08-18,Arkansas,United States,34.05,-94.40083,108.2,13,21,"(21, 13)"
|
||||
462,527,WBAN:03976,"DE RIDDER BEAUREGARD PARISH AIRPORT, LA US",2005-12-31,2018-08-18,Louisiana,United States,30.83333,-93.33333,62.2,16,21,"(21, 16)"
|
||||
464,529,WBAN:53964,"DECATUR MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,33.254439999999995,-97.58056,319.1,13,18,"(18, 13)"
|
||||
466,531,WBAN:04916,"DECORAH MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,43.27528,-91.73917,352.7,5,23,"(23, 5)"
|
||||
470,535,WBAN:22001,"DEL RIO LAUGHLIN AFB, TX US",1943-02-16,2018-08-18,Texas,United States,29.366670000000003,-100.78333,329.8,17,16,"(16, 17)"
|
||||
472,537,WBAN:00315,"DELAWARE MUNICIPAL JIM MOORE FIELD AIRPORT, OH US",2012-12-31,2018-08-18,Ohio,United States,40.28,-83.11500000000002,288.0,7,29,"(29, 7)"
|
||||
473,539,WBAN:23162,"DELTA FAA AIRPORT, UT US",2014-08-19,2018-08-18,Utah,United States,39.38333,-112.51666999999999,1450.5,8,8,"(8, 8)"
|
||||
474,540,WBAN:23078,"DEMING MUNICIPAL AIRPORT, NM US",2005-12-31,2018-08-18,New Mexico,United States,32.26222,-107.72056,1310.9,14,11,"(11, 14)"
|
||||
475,541,WBAN:00445,"DEMOPOLIS MUNICIPAL AIRPORT, AL US",2014-07-30,2018-08-18,Alabama,United States,32.46383,-87.95405,34.1,14,25,"(25, 14)"
|
||||
476,542,WBAN:04139,"DENIO 52 WSW, NV US",2008-06-15,2018-08-16,Nevada,United States,41.84840000000001,-119.6357,1981.2,6,3,"(3, 6)"
|
||||
481,548,WBAN:14933,"DES MOINES INTERNATIONAL AIRPORT, IA US",1956-12-31,2018-08-18,Iowa,United States,41.5338,-93.65299999999999,291.7,6,21,"(21, 6)"
|
||||
482,549,WBAN:03104,"DESERT RESORTS REGIONAL AIRPORT, CA US",1943-07-11,2018-08-18,California,United States,33.626670000000004,-116.15943999999999,-36.0,13,5,"(5, 13)"
|
||||
483,550,WBAN:53853,"DESTIN FORT WALTON BEACH AIRPORT, FL US",2005-12-31,2018-08-18,Florida,United States,30.4,-86.47166999999999,6.7,16,26,"(26, 16)"
|
||||
492,560,WBAN:24138,"DILLON AIRPORT, MT US",1956-12-31,2018-08-18,Montana,United States,45.2575,-112.5544,1585.0,3,8,"(8, 3)"
|
||||
493,562,WBAN:00444,"DIXON AIRPORT, WY US",2013-12-31,2018-08-18,Wyoming,United States,41.037440000000004,-107.49252,1996.1,7,11,"(11, 7)"
|
||||
494,563,WBAN:04978,"DODGE CENTER AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,44.01778,-92.83139,397.8,4,22,"(22, 4)"
|
||||
495,564,WBAN:13985,"DODGE CITY REGIONAL AIRPORT, KS US",1943-04-18,2018-08-18,Kansas,United States,37.7686,-99.9678,787.0,10,17,"(17, 10)"
|
||||
497,567,WBAN:93026,"DOUGLAS BISBEE INL AIRPORT, AZ US",1972-12-31,2018-08-18,Arizona,Mexico,31.4583,-109.6061,1251.2,15,10,"(10, 15)"
|
||||
499,569,WBAN:13707,"DOVER AFB, DE US",1942-11-30,2018-08-18,Delaware,United States,39.13333,-75.46667,8.5,8,34,"(34, 8)"
|
||||
500,571,WBAN:54786,"DOYLESTOWN AIRPORT, PA US",2005-12-31,2018-08-18,Pennsylvania,United States,40.33,-75.1225,120.1,7,34,"(34, 7)"
|
||||
502,574,WBAN:54844,"DRUMMOND ISLAND AIRPORT, MI US",2006-08-30,2018-08-18,Michigan,United States,46.007220000000004,-83.74278000000001,202.7,2,28,"(28, 2)"
|
||||
505,577,WBAN:04787,"DUBOIS JEFFERSON CO AIRPORT, PA US",1972-12-31,2018-08-18,Pennsylvania,United States,41.17833,-78.89889000000001,552.9,7,32,"(32, 7)"
|
||||
506,578,WBAN:00443,"DUBOIS MUNICIPAL AIRPORT, WY US",2013-05-13,2018-08-18,Wyoming,United States,43.54836,-109.69025,2224.1,5,10,"(10, 5)"
|
||||
508,580,WBAN:24103,"DUGWAY PROVING GROUNDS, UT US",2006-01-02,2018-08-15,Utah,United States,40.18333,-112.93333,1325.6,7,8,"(8, 7)"
|
||||
511,584,WBAN:03070,"DUMAS MOORE CO AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,35.858059999999995,-102.01306,1129.3,11,15,"(15, 11)"
|
||||
512,585,WBAN:14747,"DUNKIRK CHAUTAUQUA CO AIRPORT, NY US",1998-12-31,2018-08-18,New York,United States,42.49333,-79.27221999999998,203.0,5,31,"(31, 5)"
|
||||
513,586,WBAN:00298,"DUPONT LAPEER AIRPORT, MI US",2012-12-31,2018-08-18,Michigan,United States,43.067,-83.26700000000002,254.2,5,29,"(29, 5)"
|
||||
517,592,WBAN:03809,"DYERSBURG MUNICIPAL AIRPORT, TN US",1943-04-11,2018-08-18,Tennessee,United States,36.0002,-89.4094,91.4,11,24,"(24, 11)"
|
||||
518,595,WBAN:23063,"EAGLE CO AIRPORT, CO US",1956-12-31,2018-08-18,Colorado,United States,39.65,-106.91667,1980.3,8,12,"(12, 8)"
|
||||
519,596,WBAN:00480,"EAGLE RANGE WEATHER SERVICE OFFICE, UT US",2014-07-30,2018-08-16,Utah,United States,41.05,-113.06,1292.0,7,7,"(7, 7)"
|
||||
522,599,WBAN:00254,EAST CAMERON 278 OIL PLATFORM,2013-12-31,2018-08-18,,,28.433000000000003,-92.883,224.0,18,22,"(22, 18)"
|
||||
528,606,WBAN:14991,"EAU CLAIRE REGIONAL AIRPORT, WI US",1956-12-31,2018-08-18,Wisconsin,United States,44.8665,-91.4879,269.7,3,23,"(23, 3)"
|
||||
531,609,WBAN:23114,"EDWARDS AFB, CA US",1941-11-30,2018-08-18,California,United States,34.9,-117.86667,704.4,12,4,"(4, 12)"
|
||||
532,610,WBAN:93816,"EFFINGHAM CO MEMORIAL AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,39.07028,-88.53332999999998,178.9,8,25,"(25, 8)"
|
||||
534,612,WBAN:93992,"EL DORADO S AR REGIONAL AIRPORT, AR US",1972-12-31,2018-08-18,Arkansas,United States,33.22083,-92.81417,76.8,14,22,"(22, 14)"
|
||||
536,614,WBAN:23044,"EL PASO INTERNATIONAL AIRPORT, TX US",1941-03-31,2018-08-18,Texas,United States,31.81111,-106.37583000000001,1194.2,15,12,"(12, 15)"
|
||||
539,618,WBAN:00182,"ELBOW LAKE MUNICIPAL PRIDE OF THE PRAIRIE AIRPORT, MN US",2014-07-30,2018-08-18,Minnesota,United States,45.986,-95.992,367.3,2,20,"(20, 2)"
|
||||
541,620,WBAN:13786,"ELIZABETH CITY COAST GUARD AIR STATION, NC US",1948-12-31,2018-08-18,North Carolina,United States,36.26056,-76.175,4.0,11,34,"(34, 11)"
|
||||
542,621,WBAN:00210,"ELIZABETHTON MUNICIPAL AIRPORT, TN US",2014-07-30,2018-08-18,Tennessee,United States,36.367,-82.167,486.2,11,29,"(29, 11)"
|
||||
546,625,WBAN:93076,"ELKHART, KS US",2005-12-31,2018-08-18,Kansas,United States,37.0,-101.88333,1105.8,10,15,"(15, 10)"
|
||||
547,626,WBAN:03733,"ELKINS 21 ENE, WV US",2003-11-16,2018-08-16,West Virginia,United States,39.01300000000001,-79.4743,1033.3,8,31,"(31, 8)"
|
||||
549,628,WBAN:24121,"ELKO REGIONAL AIRPORT, NV US",1956-12-31,2018-08-18,Nevada,United States,40.8288,-115.7886,1533.1,7,6,"(6, 7)"
|
||||
550,629,WBAN:24220,"ELLENSBURG BOWERS FIELD, WA US",1988-01-05,2018-08-18,Washington,United States,47.03389,-120.53028,538.3,1,2,"(2, 1)"
|
||||
551,630,WBAN:24006,"ELLSWORTH AFB, SD US",1939-01-31,2018-08-18,South Dakota,United States,44.15,-103.1,999.1,4,15,"(15, 4)"
|
||||
552,631,WBAN:14748,"ELMIRA CORNING REGIONAL AIRPORT, NY US",1972-12-31,2018-08-18,New York,United States,42.159440000000004,-76.89193999999998,291.1,6,33,"(33, 6)"
|
||||
553,632,WBAN:23154,"ELY AIRPORT, NV US",1956-12-31,2018-08-18,Nevada,United States,39.2952,-114.8466,1908.7,8,6,"(6, 8)"
|
||||
556,635,WBAN:13989,"EMPORIA ASOS, KS US",1972-12-31,2018-08-18,Kansas,United States,38.3291,-96.1946,364.5,9,19,"(19, 9)"
|
||||
559,638,WBAN:53986,"ENID WOODRING AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,36.38333,-97.8,355.7,11,18,"(18, 11)"
|
||||
560,639,WBAN:24141,"EPHRATA MUNICIPAL AIRPORT, WA US",1941-12-31,2018-08-18,Washington,United States,47.3078,-119.5154,381.6,1,3,"(3, 1)"
|
||||
564,643,WBAN:94853,"ESCANABA DELTA CO AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,45.73333,-87.08332999999998,181.1,3,26,"(26, 3)"
|
||||
565,644,WBAN:94971,"ESTHERVILLE MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,43.40111,-94.74722,401.4,5,20,"(20, 5)"
|
||||
567,646,WBAN:00304,EUGENE ISLAND OIL PLATFORM,2012-12-31,2018-08-18,,,28.633000000000006,-91.48299999999999,28.0,18,23,"(23, 18)"
|
||||
568,647,WBAN:24221,"EUGENE MAHLON SWEET FIELD, OR US",1956-12-31,2018-08-18,Oregon,United States,44.1278,-123.2206,107.6,4,0,"(0, 4)"
|
||||
570,649,WBAN:03170,"EUREKA AIRPORT, NV US",2005-12-31,2018-08-18,Nevada,United States,39.6013,-116.0055,1809.3,8,5,"(5, 8)"
|
||||
571,650,WBAN:24213,"EUREKA WEATHER FORECAST OFFICE WOODLEY ISLAND, CA US",2005-12-31,2018-08-17,California,United States,40.8097,-124.1602,6.1000000000000005,7,0,"(0, 7)"
|
||||
572,651,WBAN:04111,"EVANSTON UINTA CO BURNS FIELD, WY US",1972-12-31,2018-08-18,Wyoming,United States,41.27306,-111.03056000000001,2183.3,7,9,"(9, 7)"
|
||||
577,656,WBAN:24114,"FAIRCHILD AFB, WA US",1940-03-31,2018-08-18,Washington,United States,47.63333,-117.65,750.1,1,4,"(4, 1)"
|
||||
578,657,WBAN:00220,"FAIRFIELD CO AIRPORT, SC US",2013-12-31,2018-08-18,South Carolina,United States,34.315000000000005,-81.10900000000002,176.20000000000005,13,30,"(30, 13)"
|
||||
579,658,WBAN:04925,"FAIRFIELD MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,41.05306,-91.97889,243.5,7,22,"(22, 7)"
|
||||
582,662,WBAN:94056,"FAITH MUNICIPAL AIRPORT, SD US",2005-12-31,2018-08-18,South Dakota,United States,45.031940000000006,-102.01916999999999,786.4,3,15,"(15, 3)"
|
||||
583,663,WBAN:00270,"FAITH RANCH AIRPORT, TX US",2014-01-06,2018-08-18,Texas,United States,28.209,-100.01899999999999,236.2,18,17,"(17, 18)"
|
||||
584,665,WBAN:93102,"FALLON NAAS, NV US",1956-12-31,2018-08-18,Nevada,United States,39.41667,-118.71667,1199.1,8,3,"(3, 8)"
|
||||
585,666,WBAN:94957,"FALLS CITY BRENNER FIELD, NE US",1999-12-31,2018-08-18,Nebraska,United States,40.08028,-95.59194000000001,298.7,8,20,"(20, 8)"
|
||||
589,670,WBAN:94969,"FARIBAULT MUNICIPAL AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,44.33333,-93.31667,323.1,4,21,"(21, 4)"
|
||||
591,672,WBAN:23090,"FARMINGTON FOUR CORNERS REGIONAL AIRPORT, NM US",1956-12-31,2018-08-18,New Mexico,United States,36.74361,-108.22917,1674.9,10,11,"(11, 10)"
|
||||
592,673,WBAN:93996,"FARMINGTON REGIONAL AIRPORT, MO US",1995-02-09,2018-08-18,Missouri,United States,37.76083,-90.42833,288.6,10,23,"(23, 10)"
|
||||
593,674,WBAN:03707,"FARMVILLE REGIONAL AIRPORT, VA US",2005-12-31,2018-08-18,Virginia,United States,37.3575,-78.43777999999998,127.1,10,32,"(32, 10)"
|
||||
598,679,WBAN:53922,"FAYETTEVILLE SPRINGDALE NW AR REGL AIRPORT, AR US",2005-12-31,2018-08-18,Arkansas,United States,36.28333,-94.3,392.3,11,21,"(21, 11)"
|
||||
599,680,WBAN:13762,"FENTRESS NAVAL AUXILIARY FIELD, VA US",2007-05-31,2018-08-18,Virginia,United States,36.695,-76.13556,4.9,10,34,"(34, 10)"
|
||||
600,681,WBAN:94966,"FERGUS FALLS AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,46.28333,-96.15,360.6,2,19,"(19, 2)"
|
||||
601,682,WBAN:00326,"FERNANDINA BEACH MUNICIPAL AIRPORT, FL US",2013-12-31,2018-08-18,Florida,United States,30.616999999999997,-81.467,5.2,16,30,"(30, 16)"
|
||||
602,683,WBAN:00237,"FIELD OF DREAMS AIRPORT, MN US",2014-07-30,2018-08-18,Minnesota,United States,46.023,-92.895,311.2,2,22,"(22, 2)"
|
||||
603,684,WBAN:14825,"FINDLAY AIRPORT, OH US",1972-12-31,2018-08-18,Ohio,United States,41.01361,-83.66861,243.8,7,28,"(28, 7)"
|
||||
604,685,WBAN:04780,"FITCHBURG MUNICIPAL AIRPORT, MA US",1956-12-31,2018-08-18,Massachusetts,United States,42.55194,-71.75583,106.1,5,37,"(37, 5)"
|
||||
607,688,WBAN:00485,"FLAGLER CO AIRPORT, FL US",2012-12-31,2018-08-17,Florida,United States,29.46738,-81.20633000000001,10.1,17,30,"(30, 17)"
|
||||
612,693,WBAN:53889,"FLORA MUNICIPAL AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,38.66472,-88.45277999999998,143.9,9,25,"(25, 9)"
|
||||
617,698,WBAN:13829,"FORT BENNING LAWSON FIELD, AL US",1939-04-30,2018-08-18,Alabama,United States,32.35,-85.0,70.7,14,27,"(27, 14)"
|
||||
620,701,WBAN:00449,"FORT BRIDGER AIRPORT, WY US",2013-07-31,2018-08-18,Wyoming,United States,41.39333,-110.40597,2145.5,6,9,"(9, 6)"
|
||||
621,702,WBAN:13806,"FORT CAMPBELL ARMY AIR FIELD, KY US",1943-07-14,2018-08-18,Kentucky,United States,36.66667,-87.48333000000001,174.70000000000005,11,26,"(26, 11)"
|
||||
622,703,WBAN:94015,"FORT CARSON BUTTS ARMY AIR FIELD, CO US",1966-09-14,2018-08-18,Colorado,United States,38.67833,-104.75667,1779.4,9,13,"(13, 9)"
|
||||
624,705,WBAN:94933,"FORT DODGE OZARK AIRLINES, IA US",2005-12-31,2018-08-18,Iowa,United States,42.55,-94.18333,352.3,5,21,"(21, 5)"
|
||||
628,709,WBAN:03124,"FORT HUACHUCA SIERRA VISTA MUNICIPAL AIRPORT, AZ US",1954-10-10,2018-08-18,Arizona,United States,31.58833,-110.34416999999999,1438.4,15,9,"(9, 15)"
|
||||
631,712,WBAN:00162,"FORT MORGAN MUNICIPAL AIRPORT, CO US",2013-12-31,2018-08-18,Colorado,United States,40.333,-103.8,1393.2,7,14,"(14, 7)"
|
||||
632,713,WBAN:63847,"FORT PAYNE ISBELL FIELD, AL US",2005-12-31,2018-08-18,Alabama,United States,34.473890000000004,-85.72139,267.3,12,27,"(27, 12)"
|
||||
633,714,WBAN:12895,"FORT PIERCE ST LUCIE CO INTERNATIONAL AIRPORT, FL US",2005-12-31,2018-08-17,Florida,United States,27.49806,-80.37666999999998,7.3,19,31,"(31, 19)"
|
||||
635,716,WBAN:53988,"FORT POLK FULLERTON LANDING STRIP, LA US",2005-12-31,2018-08-18,Louisiana,United States,31.021700000000006,-92.9107,94.5,15,22,"(22, 15)"
|
||||
636,718,WBAN:13947,"FORT RILEY MARSHALL ARMY AIR FIELD, KS US",1938-08-16,2018-08-18,Kansas,United States,39.05,-96.76667,324.6,8,19,"(19, 8)"
|
||||
637,720,WBAN:53861,"FORT RUCKER LOWE ARMY HELIPORT, AL US",2008-07-16,2018-08-18,Alabama,United States,31.35583,-85.75111,74.4,15,27,"(27, 15)"
|
||||
639,722,WBAN:13964,"FORT SMITH REGIONAL AIRPORT, AR US",1946-12-31,2018-08-18,Arkansas,United States,35.333,-94.3625,136.9,12,21,"(21, 12)"
|
||||
640,723,WBAN:03875,"FORT STEWART WRIGHT, GA US",2006-01-02,2018-08-18,Georgia,United States,31.88333,-81.56667,13.7,15,30,"(30, 15)"
|
||||
641,724,WBAN:23091,"FORT STOCKTON PECOS CO AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,30.91194,-102.91667,917.4,16,15,"(15, 16)"
|
||||
642,725,WBAN:14827,"FORT WAYNE INTERNATIONAL AIRPORT, IN US",1941-10-31,2018-08-18,Indiana,United States,40.9705,-85.2063,241.1,7,27,"(27, 7)"
|
||||
646,729,WBAN:04929,"FOSSTON MUNICIPAL AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,47.59278,-95.77528000000001,389.2,1,20,"(20, 1)"
|
||||
647,731,WBAN:53841,"FRANKFORT CAPITAL CITY AIRPORT, KY US",2005-12-31,2018-08-18,Kentucky,United States,38.18472,-84.90333000000001,245.1,9,27,"(27, 9)"
|
||||
648,732,WBAN:54818,"FRANKFORT DOW MEMORIAL FIELD AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,44.62556,-86.20083000000001,192.6,4,26,"(26, 4)"
|
||||
649,733,WBAN:00152,"FRANKLIN CO STATE AIRPORT, VT US",2008-03-31,2018-08-18,Vermont,United States,44.933,-73.10000000000002,70.10000000000001,3,36,"(36, 3)"
|
||||
651,735,WBAN:94868,"FRANKLIN, PA US",1967-12-31,2018-08-18,Pennsylvania,United States,41.38333,-79.86667,469.4,6,31,"(31, 6)"
|
||||
653,737,WBAN:03981,"FREDERICK MUNICIPAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,34.344440000000006,-98.98306,382.5,13,17,"(17, 13)"
|
||||
654,738,WBAN:93947,"FREDERICKSBURG GILLESPIE CO AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,30.24333,-98.90972,516.6,16,17,"(17, 16)"
|
||||
655,739,WBAN:03706,"FREDERICKSBURG SHANNON AIRPORT, VA US",2005-12-31,2018-08-18,Virginia,United States,38.26667,-77.44917,25.9,9,33,"(33, 9)"
|
||||
656,740,WBAN:04876,"FREEPORT ALBERTUS AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,42.246109999999994,-89.58221999999998,261.8,6,24,"(24, 6)"
|
||||
658,742,WBAN:04924,"FREMONT MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,41.448890000000006,-96.52,366.7,6,19,"(19, 6)"
|
||||
659,743,WBAN:04836,"FRENCHVILLE NORTHERN AROOSTOOK AIRPORT, ME US",2005-12-31,2018-08-18,Maine,United States,47.28556,-68.31333000000001,301.1,1,39,"(39, 1)"
|
||||
660,744,WBAN:93193,"FRESNO YOSEMITE INTERNATIONAL, CA US",1941-12-03,2018-08-18,California,United States,36.78,-119.7194,101.5,10,3,"(3, 10)"
|
||||
661,745,WBAN:94276,"FRIDAY HARBOR AIRPORT, WA US",2005-12-31,2018-08-18,Washington,United States,48.522220000000004,-123.02306000000002,33.2,0,0,"(0, 0)"
|
||||
662,746,WBAN:00450,"FRONT RANGE AIRPORT, CO US",2013-12-31,2018-08-18,Colorado,United States,39.7842,-104.5376,1680.4,8,13,"(13, 8)"
|
||||
663,747,WBAN:54772,"FRYEBURG EASTERN SLOPES REGL AIRPORT, ME US",2005-12-31,2018-08-18,Maine,United States,43.99056,-70.9475,135.6,4,37,"(37, 4)"
|
||||
665,749,WBAN:00265,"FULTON CO AIRPORT, IN US",2013-12-31,2018-08-18,Indiana,United States,41.066,-86.182,241.1,7,26,"(26, 7)"
|
||||
667,752,WBAN:03896,"GADSDEN MUNICIPAL AIRPORT, AL US",2005-12-31,2018-08-18,Alabama,United States,33.96667,-86.08332999999998,173.4,13,27,"(27, 13)"
|
||||
668,753,WBAN:13975,"GAGE AIRPORT, OK US",1956-12-31,2018-08-18,Oklahoma,United States,36.2967,-99.7689,667.8000000000002,11,17,"(17, 11)"
|
||||
669,754,WBAN:03056,"GAINES CO AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,32.67528,-102.65444000000001,1010.4,14,15,"(15, 14)"
|
||||
673,759,WBAN:93764,"GAITHERSBURG MONTGOMERY CO AIR PARK, MD US",2013-12-31,2018-08-18,Maryland,United States,39.16667,-77.16667,164.3,8,33,"(33, 8)"
|
||||
674,760,WBAN:94959,"GALESBURG MUNICIPAL AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,40.93333,-90.43333,232.9,7,23,"(23, 7)"
|
||||
675,761,WBAN:12993,"GALLIANO SOUTH LAFOURCHE AIRPORT, LA US",2005-12-31,2018-08-18,Louisiana,United States,29.44472,-90.26111,0.3,17,24,"(24, 17)"
|
||||
676,762,WBAN:23081,"GALLUP MUNICIPAL AIRPORT, NM US",1972-12-31,2018-08-18,New Mexico,United States,35.5144,-108.794,1972.4,12,10,"(10, 12)"
|
||||
678,765,WBAN:23064,"GARDEN CITY REGIONAL AIRPORT, KS US",1943-01-31,2018-08-18,Kansas,United States,37.92722,-100.72471999999999,878.4,9,16,"(16, 9)"
|
||||
680,767,WBAN:94041,"GARRISON, ND US",2005-12-31,2018-08-18,North Dakota,United States,47.64583,-101.43944,582.2,1,16,"(16, 1)"
|
||||
681,768,WBAN:04807,"GARY, IN US",2005-12-31,2018-08-18,Indiana,United States,41.61667,-87.41667,180.1,6,26,"(26, 6)"
|
||||
682,769,WBAN:53870,"GASTONIA MUNICIPAL AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,35.196670000000005,-81.15583000000001,242.9,12,30,"(30, 12)"
|
||||
684,772,WBAN:04854,"GAYLORD OTSEGO CO AIRPORT, MI US",1999-08-31,2018-08-18,Michigan,United States,45.01333,-84.70138999999998,406.9,3,28,"(28, 3)"
|
||||
687,776,WBAN:00391,"GEORGETOWN CO AIRPORT, SC US",2013-12-31,2018-08-18,South Carolina,United States,33.317,-79.31700000000002,12.2,13,31,"(31, 13)"
|
||||
689,778,WBAN:13764,"GEORGETOWN SUSSEX CO AIRPORT, DE US",2005-12-31,2018-08-18,Delaware,United States,38.689170000000004,-75.35916999999998,15.5,9,34,"(34, 9)"
|
||||
693,784,WBAN:53982,"GILMER MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,32.698890000000006,-94.94889,126.5,14,20,"(20, 14)"
|
||||
694,785,WBAN:94008,"GLASGOW INTERNATIONAL AIRPORT, MT US",1942-12-09,2018-08-18,Montana,United States,48.2138,-106.6214,696.5,0,12,"(12, 0)"
|
||||
695,786,WBAN:00361,"GLASGOW MUNICIPAL AIRPORT, KY US",2012-12-31,2018-08-18,Kentucky,United States,37.033,-85.95,218.2,10,27,"(27, 10)"
|
||||
697,788,WBAN:53126,"GLENDALE MUNICIPAL AIRPORT, AZ US",2005-12-31,2018-08-18,Arizona,United States,33.52722,-112.295,324.9000000000001,13,8,"(8, 13)"
|
||||
698,789,WBAN:24087,"GLENDIVE DAWSON COMMUNITY AIRPORT, MT US",2005-12-31,2018-08-18,Montana,United States,47.13333,-104.8,748.9,1,13,"(13, 1)"
|
||||
699,790,WBAN:14750,"GLENS FALLS AIRPORT, NY US",1972-12-31,2018-08-18,New York,United States,43.33845,-73.61028,97.8,5,35,"(35, 5)"
|
||||
701,793,WBAN:00135,"GNOSS FIELD AIRPORT, CA US",2014-07-30,2018-08-18,California,United States,38.15,-122.55,1.2,9,1,"(1, 9)"
|
||||
702,794,WBAN:53893,"GOLDEN TRIANGLE, MS US",2005-12-31,2018-08-18,Mississippi,United States,33.45,-88.58332999999998,80.5,13,25,"(25, 13)"
|
||||
704,796,WBAN:03708,"GOLDSBORO WAYNE MUNICIPAL AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,35.46028,-77.96472,40.8,12,32,"(32, 12)"
|
||||
705,797,WBAN:23065,"GOODLAND RENNER FIELD, KS US",1956-12-31,2018-08-18,Kansas,United States,39.36722,-101.69333,1114.3,8,16,"(16, 8)"
|
||||
706,798,WBAN:04994,"GOODRIDGE 12 NNW, MN US",2003-08-19,2018-08-16,Minnesota,United States,48.3055,-95.8744,350.5,0,20,"(20, 0)"
|
||||
708,800,WBAN:14829,"GOSHEN MUNICIPAL AIRPORT, IN US",1998-12-31,2018-08-18,Indiana,United States,41.5333,-85.78330000000003,253.0,6,27,"(27, 6)"
|
||||
710,802,WBAN:53977,"GRANBURY MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,32.44444,-97.81694,237.1,14,18,"(18, 14)"
|
||||
711,803,WBAN:03195,"GRAND CANYON NATIONAL PARK AIRPORT, AZ US",2005-12-31,2018-08-18,Arizona,United States,35.94611,-112.15472,2013.5,11,8,"(8, 11)"
|
||||
713,805,WBAN:14916,"GRAND FORKS INTERNATIONAL AIRPORT, ND US",1956-12-31,2018-08-18,North Dakota,United States,47.94280000000001,-97.1839,256.6,1,19,"(19, 1)"
|
||||
715,810,WBAN:23066,"GRAND JUNCTION WALKER FIELD, CO US",1956-12-31,2018-08-18,Colorado,United States,39.1342,-108.54,1480.7,8,11,"(11, 8)"
|
||||
717,812,WBAN:94992,"GRAND MARAIS, MN US",2005-12-31,2018-08-18,Minnesota,United States,47.74722,-90.34444,185.9,1,24,"(24, 1)"
|
||||
720,815,WBAN:94919,"GRAND RAPIDS ITASCA CO AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,47.21111,-93.50972,413.0,1,21,"(21, 1)"
|
||||
721,816,WBAN:04999,"GRANITE FALLS MUNICIPAL AIRPORT LENZEN ROE MEMORIAL FIELD, MN US",2005-12-31,2018-08-18,Minnesota,United States,44.75333,-95.55611,319.1,3,20,"(20, 3)"
|
||||
722,818,WBAN:00481,"GRANITE PEAK FILLMORE AIRPORT, UT US",2014-07-29,2018-08-17,Utah,United States,38.95813,-112.36313,1519.4,9,8,"(8, 9)"
|
||||
723,819,WBAN:00387,"GRANT CO REGIONAL AIRPORT OGILVIE FIELD, OR US",2013-12-31,2018-08-18,Oregon,United States,44.4,-118.96700000000001,1127.2,4,3,"(3, 4)"
|
||||
724,821,WBAN:93057,"GRANTS MILAN MUNICIPAL AIRPORT, NM US",1947-12-31,2018-08-18,New Mexico,United States,35.16528,-107.90222,1987.3,12,11,"(11, 12)"
|
||||
725,822,WBAN:24201,"GRAY ARMY AIR FIELD, WA US",1960-05-31,2018-08-18,Washington,United States,47.08333,-122.58333,91.4,1,1,"(1, 1)"
|
||||
727,824,WBAN:53967,"GRAYSON CO AIRPORT SHERMAN DENISON, TX US",2005-12-31,2018-08-18,Texas,United States,33.71417,-96.67361,228.3,13,19,"(19, 13)"
|
||||
729,826,WBAN:24143,"GREAT FALLS AIRPORT, MT US",1956-12-31,2018-08-18,Montana,United States,47.4733,-111.3822,1116.8,1,9,"(9, 1)"
|
||||
730,827,WBAN:04880,"GREATER KANKAKEE AIRPORT, IL US",2005-12-31,2018-08-18,Illinois,United States,41.121390000000005,-87.84611,191.7,7,25,"(25, 7)"
|
||||
731,828,WBAN:24051,"GREELEY WELD CO AIRPORT, CO US",2005-12-31,2018-08-18,Colorado,United States,40.43556,-104.63194,1431.6,7,13,"(13, 7)"
|
||||
732,829,WBAN:14898,"GREEN BAY A S INTERNATIONAL AIRPORT, WI US",1949-12-31,2018-08-18,Wisconsin,United States,44.47940000000001,-88.1366,209.4,4,25,"(25, 4)"
|
||||
733,832,WBAN:13723,"GREENSBORO AIRPORT, NC US",1945-10-31,2018-08-18,North Carolina,United States,36.09690000000001,-79.9432,271.3,11,31,"(31, 11)"
|
||||
735,834,WBAN:13939,"GREENVILLE ASOS, MS US",1942-01-19,2018-08-18,Mississippi,United States,33.4825,-90.98528,39.0,13,23,"(23, 13)"
|
||||
738,837,WBAN:63874,"GREENVILLE MAC CRENSHAW MEMORIAL AIRPORT, AL US",2006-08-31,2018-08-18,Alabama,United States,31.84556,-86.61082999999998,137.5,15,26,"(26, 15)"
|
||||
739,838,WBAN:94626,"GREENVILLE MAINE FORESTRY SERVICE, ME US",2005-12-31,2018-08-18,Maine,United States,45.46222,-69.59528,316.1,3,38,"(38, 3)"
|
||||
740,839,WBAN:13926,"GREENVILLE MUNICIPAL AIRPORT MAJORS FIELD, TX US",2005-12-31,2018-08-18,Texas,United States,33.06778,-96.06528,163.1,14,19,"(19, 14)"
|
||||
742,841,WBAN:53874,"GREENWOOD CO AIRPORT, SC US",2005-12-31,2018-08-18,South Carolina,United States,34.24861,-82.15916999999999,192.3,13,29,"(29, 13)"
|
||||
743,842,WBAN:13978,"GREENWOOD LEFLORE AIRPORT, MS US",1943-02-04,2018-08-18,Mississippi,United States,33.496300000000005,-90.0866,40.5,13,24,"(24, 13)"
|
||||
744,843,WBAN:24048,"GREYBULL SOUTH BIG HORN CO AIRPORT, WY US",2005-12-31,2018-08-18,Wyoming,United States,44.516940000000005,-108.08221999999999,1198.8,4,11,"(11, 4)"
|
||||
745,844,WBAN:00339,"GRIFFIN SPALDING CO AIRPORT, GA US",2012-12-31,2018-08-18,Georgia,United States,33.227000000000004,-84.275,292.3,14,28,"(28, 14)"
|
||||
746,845,WBAN:14976,"GRINNELL, IA US",2006-08-30,2018-08-18,Iowa,United States,41.71667,-92.7,307.2,6,22,"(22, 6)"
|
||||
747,846,WBAN:03870,"GRNVL SPART INTERNATIONAL AIRPORT, SC US",1962-10-14,2018-08-18,South Carolina,United States,34.8842,-82.2209,287.4000000000001,12,29,"(29, 12)"
|
||||
748,847,WBAN:54819,"GROSSE ILE MUNICIPAL AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,42.09861,-83.16111,178.9,6,29,"(29, 6)"
|
||||
750,849,WBAN:53941,"GROVE MUNICIPAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,36.605,-94.73833,253.9,11,20,"(20, 11)"
|
||||
752,851,WBAN:93874,"GULFPORT BILOXI AIRPORT, MS US",2004-12-31,2018-08-18,Mississippi,United States,30.411900000000006,-89.08080000000002,12.8,16,24,"(24, 16)"
|
||||
754,854,WBAN:53913,"GUTHRIE MUNICIPAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,35.8517,-97.4142,325.5,11,19,"(19, 11)"
|
||||
755,855,WBAN:03030,"GUYMON MUNICIPAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,36.681670000000004,-101.50528,951.9,11,16,"(16, 11)"
|
||||
756,856,WBAN:94836,"GWINN K I SAWYER AFB, MI US",2005-12-31,2018-08-18,Michigan,United States,46.35,-87.4,372.2,2,26,"(26, 2)"
|
||||
757,857,WBAN:00150,"GWINNER ROGER MELROE FIELD AIRPORT, ND US",2014-06-30,2018-08-18,North Dakota,United States,46.217,-97.633,386.2,2,18,"(18, 2)"
|
||||
758,858,WBAN:00221,"H A CLARK MEMORIAL FIELD AIRPORT, AZ US",2013-12-31,2018-08-18,Arizona,United States,35.30000000000001,-112.2,2035.1,12,8,"(8, 12)"
|
||||
759,859,WBAN:00186,"H L SONNY CALLAHAN AIRPORT, AL US",2013-12-31,2018-08-18,Alabama,United States,30.46,-87.87700000000002,28.0,16,25,"(25, 16)"
|
||||
760,861,WBAN:93706,"HAGERSTOWN WASHINGTON CO REGIONAL AIRPORT, MD US",2005-12-31,2018-08-18,Maryland,United States,39.70778,-77.72972,212.8,8,32,"(32, 8)"
|
||||
761,862,WBAN:94161,"HAILEY FRIEDMAN MEMORIAL AIRPORT, ID US",2005-12-31,2018-08-18,Idaho,United States,43.5,-114.3,1617.3,5,7,"(7, 5)"
|
||||
763,864,WBAN:00231,"HALIFAX NORTHAMPTON REGIONAL AIRPORT, NC US",2013-12-31,2018-08-18,North Carolina,United States,36.33,-77.635,44.2,11,33,"(33, 11)"
|
||||
764,865,WBAN:53938,"HALLIBURTON FIELD AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,34.47083,-97.95083000000001,339.2,12,18,"(18, 12)"
|
||||
766,867,WBAN:53855,"HAMILTON BUTLER CO REGIONAL AIRPORT, OH US",2005-12-31,2018-08-18,Ohio,United States,39.36444,-84.52472,193.2,8,28,"(28, 8)"
|
||||
767,868,WBAN:00357,"HAMILTON MUNICIPAL AIRPORT, TX US",2013-12-31,2018-08-18,Texas,United States,31.666,-98.149,396.2,15,18,"(18, 15)"
|
||||
768,869,WBAN:03908,"HAMMOND MUNICIPAL AIRPORT, LA US",2005-12-31,2018-08-18,Louisiana,United States,30.52083,-90.4175,13.4,16,23,"(23, 16)"
|
||||
769,870,WBAN:00154,"HAMPTON ROADS EXECUTIVE AIRPORT, VA US",2012-12-31,2018-08-18,Virginia,United States,36.783,-76.45,7.0,10,33,"(33, 10)"
|
||||
770,871,WBAN:14858,"HANCOCK HOUGHTON CO AIRPORT, MI US",1956-12-31,2018-08-18,Michigan,United States,47.16861,-88.48889,333.8,1,25,"(25, 1)"
|
||||
771,872,WBAN:53119,"HANFORD MUNICIPAL AIRPORT, CA US",2005-12-31,2018-08-18,California,United States,36.31889,-119.62888999999998,75.9,11,3,"(3, 11)"
|
||||
772,874,WBAN:23170,"HANKSVILLE AIRPORT, UT US",1972-12-31,2018-08-18,Utah,United States,38.41667,-110.7,1355.1,9,9,"(9, 9)"
|
||||
773,875,WBAN:00455,"HANNIBAL REGIONAL AIRPORT, MO US",2013-12-31,2018-08-18,Missouri,United States,39.72516,-91.44386,234.7,8,23,"(23, 8)"
|
||||
774,876,WBAN:04884,"HARBOR SPRINGS AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,45.42556,-84.91333,206.3,3,27,"(27, 3)"
|
||||
775,877,WBAN:04936,"HARLAN MUNICIPAL AIRPORT, IA US",2005-12-31,2018-08-18,Iowa,United States,41.58417,-95.33944,375.2,6,20,"(20, 6)"
|
||||
777,879,WBAN:00159,"HARRIET ALEXANDER FIELD AIRPORT, CO US",2013-12-31,2018-08-18,Colorado,United States,38.533,-106.05,2294.2000000000007,9,12,"(12, 9)"
|
||||
778,880,WBAN:14751,"HARRISBURG CAPITAL CITY AIRPORT, PA US",1956-12-31,2018-08-18,Pennsylvania,United States,40.217220000000005,-76.85139000000001,103.6,7,33,"(33, 7)"
|
||||
780,883,WBAN:13971,"HARRISON BOONE CO AIRPORT, AR US",1946-07-31,2018-08-18,Arkansas,United States,36.2668,-93.1566,418.8,11,22,"(22, 11)"
|
||||
781,884,WBAN:00431,"HARRISON CO AIRPORT, TX US",2012-12-31,2018-08-18,Texas,United States,32.5205,-94.3077,108.8,14,21,"(21, 14)"
|
||||
783,886,WBAN:14752,"HARTFORD BRAINARD FIELD, CT US",1973-12-31,2018-08-18,Connecticut,United States,41.73611,-72.65056,5.8,6,36,"(36, 6)"
|
||||
784,887,WBAN:00219,"HARTSVILLE REGIONAL AIRPORT, SC US",2012-12-31,2018-08-18,South Carolina,United States,34.4,-80.117,111.3,12,31,"(31, 12)"
|
||||
785,888,WBAN:00322,"HARVEY MUNICIPAL AIRPORT, ND US",2014-07-30,2018-08-18,North Dakota,United States,47.783,-99.93299999999999,489.2,1,17,"(17, 1)"
|
||||
787,890,WBAN:13833,"HATTIESBURG CHAIN MUNICIPAL AIRPORT, MS US",1942-08-19,2018-08-18,Mississippi,United States,31.281940000000002,-89.25305999999998,46.0,15,24,"(24, 15)"
|
||||
789,892,WBAN:94012,"HAVRE AIRPORT ASOS, MT US",1961-01-31,2018-08-18,Montana,United States,48.5428,-109.7633,787.9,0,10,"(10, 0)"
|
||||
790,893,WBAN:03167,"HAWTHORNE MUNICIPAL AIRPORT, CA US",2005-12-31,2018-08-18,California,United States,33.92278,-118.33417,19.2,13,4,"(4, 13)"
|
||||
791,894,WBAN:94025,"HAYDEN YAMPA VALLEY AIRPORT, CO US",2005-12-31,2018-08-18,Colorado,United States,40.48111,-107.2175,2011.7,7,12,"(12, 7)"
|
||||
792,895,WBAN:03968,"HAYS MUNICIPAL AIRPORT, KS US",2005-12-31,2018-08-18,Kansas,United States,38.85,-99.26667,609.0,9,17,"(17, 9)"
|
||||
793,896,WBAN:93228,"HAYWARD AIR TERMINAL, CA US",1999-12-31,2018-08-18,California,United States,37.6542,-122.115,13.1,10,1,"(1, 10)"
|
||||
794,897,WBAN:94973,"HAYWARD MUNICIPAL AIRPORT, WI US",2005-12-31,2018-08-18,Wisconsin,United States,46.02611,-91.44417,367.0,2,23,"(23, 2)"
|
||||
796,899,WBAN:53973,"HEARNE MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,30.871940000000002,-96.62222,86.9,16,19,"(19, 16)"
|
||||
797,900,WBAN:00337,"HEART OF GEORGIA REGIONAL AIRPORT, GA US",2012-12-31,2018-08-18,Georgia,United States,32.214,-83.12799999999999,93.3,14,29,"(29, 14)"
|
||||
798,903,WBAN:04998,"HEBRON MUNICIPAL AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,40.14917,-97.58667,449.0,7,18,"(18, 7)"
|
||||
799,904,WBAN:24144,"HELENA AIRPORT ASOS, MT US",1956-12-31,2018-08-18,Montana,United States,46.6056,-111.9636,1166.8,2,8,"(8, 2)"
|
||||
800,905,WBAN:53886,"HENDERSON CITY CO AIRPORT, KY US",2005-12-31,2018-08-18,Kentucky,United States,37.8,-87.68333,118.0,10,25,"(25, 10)"
|
||||
801,906,WBAN:03711,"HENDERSON OXFORD AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,36.36139,-78.52888999999998,160.6,11,32,"(32, 11)"
|
||||
802,907,WBAN:00250,"HENRY TIFT MYERS AIRPORT, GA US",2012-12-31,2018-08-18,Georgia,United States,31.429,-83.48899999999999,108.2,15,28,"(28, 15)"
|
||||
803,908,WBAN:00129,"HEREFORD MUNICIPAL AIRPORT, TX US",2013-12-31,2018-08-18,Texas,United States,34.85,-102.333,1153.1,12,15,"(15, 12)"
|
||||
804,909,WBAN:04113,"HERMISTON MUNICIPAL AIRPORT, OR US",2005-12-31,2018-08-18,Oregon,United States,45.82583,-119.26111000000002,195.4,3,3,"(3, 3)"
|
||||
805,910,WBAN:94038,"HETTINGER MUNICIPAL AIRPORT, ND US",2005-12-31,2018-08-18,North Dakota,United States,46.01389,-102.65472,824.5,2,15,"(15, 2)"
|
||||
806,911,WBAN:94931,"HIBBING CHISHOLM HIBBING AIRPORT, MN US",1971-12-31,2018-08-18,Minnesota,United States,47.386390000000006,-92.83889,412.1,1,22,"(22, 1)"
|
||||
807,912,WBAN:03810,"HICKORY FAA AIRPORT, NC US",1972-12-31,2018-08-18,North Carolina,United States,35.74207,-81.38229,358.1,11,30,"(30, 11)"
|
||||
808,913,WBAN:00306,HIGH ISLAND 179 OIL PLATFORM,2012-12-31,2018-08-18,,,29.183000000000003,-94.517,75.3,17,21,"(21, 17)"
|
||||
809,914,WBAN:00260,HIGH ISLAND 376,2013-12-31,2018-08-18,,,27.962,-93.671,0.3,18,21,"(21, 18)"
|
||||
810,916,WBAN:93990,"HILL CITY MUNICIPAL AIRPORT, KS US",1972-12-31,2018-08-18,Kansas,United States,39.37556,-99.82972,666.9,8,17,"(17, 8)"
|
||||
811,917,WBAN:53972,"HILLSBORO MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,32.08361,-97.09722,208.8,15,19,"(19, 15)"
|
||||
813,919,WBAN:63837,"HILTON HEAD ISLAND AIRPORT, SC US",2005-12-31,2018-08-18,South Carolina,United States,32.21667,-80.7,7.3,14,30,"(30, 14)"
|
||||
814,920,WBAN:93986,"HOBART MUNICIPAL AIRPORT, OK US",1972-12-31,2018-08-18,Oklahoma,United States,34.9894,-99.0525,474.3,12,17,"(17, 12)"
|
||||
815,921,WBAN:93034,"HOBBS LEA CO AIRPORT, NM US",1942-10-31,2018-08-18,New Mexico,United States,32.6933,-103.2125,1114.0,14,14,"(14, 14)"
|
||||
816,923,WBAN:04935,"HOLDREGE BREWSTER FIELD AIRPORT, NE US",2005-12-31,2018-08-18,Nebraska,United States,40.45,-99.33917,702.3000000000002,7,17,"(17, 7)"
|
||||
818,925,WBAN:00392,"HOLLISTER MUNICIPAL AIRPORT, CA US",1945-02-28,2018-08-18,California,United States,36.9,-121.417,72.2,10,2,"(2, 10)"
|
||||
819,926,WBAN:23002,"HOLLOMAN AFB, NM US",2006-01-02,2018-08-18,New Mexico,United States,32.85,-106.1,1267.4,14,12,"(12, 14)"
|
||||
820,927,WBAN:23803,"HOLLY SPRINGS 4 N, MS US",2008-01-31,2018-08-16,Mississippi,United States,34.8223,-89.43480000000002,147.5,12,24,"(24, 12)"
|
||||
821,928,WBAN:00163,"HOLYOKE AIRPORT, CO US",2013-12-31,2018-08-18,Colorado,United States,40.567,-102.267,1137.2,7,15,"(15, 7)"
|
||||
822,929,WBAN:00128,"HOMERVILLE AIRPORT, GA US",2012-12-31,2018-08-18,Georgia,United States,31.055999999999997,-82.76700000000002,57.3,15,29,"(29, 15)"
|
||||
823,930,WBAN:12962,"HONDO MUNICIPAL AIRPORT, TX US",1942-09-14,2018-08-18,Texas,United States,29.360100000000006,-99.1742,280.40000000000003,17,17,"(17, 17)"
|
||||
824,931,WBAN:00429,"HOPKINS FIELD AIRPORT, CO US",2012-12-31,2018-08-18,Colorado,United States,38.23875,-108.56326999999999,1810.5,9,11,"(11, 9)"
|
||||
825,932,WBAN:94225,"HOQUIAM BOWERMAN AIRPORT, WA US",1956-12-31,2018-08-18,Washington,United States,46.9727,-123.9302,3.7,2,0,"(0, 2)"
|
||||
826,933,WBAN:00225,"HORSESHOE BAY RESORT AIRPORT, TX US",2012-12-31,2018-08-18,Texas,United States,30.533,-98.367,333.1,16,18,"(18, 16)"
|
||||
827,934,WBAN:03962,"HOT SPRINGS ASOS, AR US",2005-12-31,2018-08-18,Arkansas,United States,34.29,-93.06,163.1,13,22,"(22, 13)"
|
||||
828,935,WBAN:93757,"HOT SPRINGS INGALLS FIELD, VA US",2005-12-31,2018-08-18,Virginia,United States,37.95,-79.81667,1156.1,9,31,"(31, 9)"
|
||||
830,937,WBAN:14609,"HOULTON AIRPORT, ME US",1999-12-31,2018-08-18,Maine,United States,46.1185,-67.7928,145.1,2,39,"(39, 2)"
|
||||
831,938,WBAN:12927,"HOUMA TERREBONNE AIRPORT, LA US",2005-12-31,2018-08-18,Louisiana,United States,29.566390000000002,-90.66028,4.0,17,23,"(23, 17)"
|
||||
840,948,WBAN:12918,"HOUSTON WILLIAM P HOBBY AIRPORT, TX US",1946-07-27,2018-08-18,Texas,United States,29.63806,-95.28194,13.4,17,20,"(20, 17)"
|
||||
841,949,WBAN:04887,"HOWELL LIVINGSTON CO AIRPORT, MI US",2005-12-31,2018-08-18,Michigan,United States,42.62944,-83.98416999999998,287.7,5,28,"(28, 5)"
|
||||
842,950,WBAN:00484,"HULETT MUNICIPAL AIRPORT, WY US",2014-07-30,2018-08-18,Wyoming,United States,44.66286,-104.56783,1300.0,4,13,"(13, 4)"
|
||||
843,951,WBAN:53896,"HUNTINGBURG AIRPORT, IN US",2005-12-31,2018-08-18,Indiana,United States,38.24889,-86.95361,161.20000000000005,9,26,"(26, 9)"
|
||||
844,952,WBAN:03860,"HUNTINGTON TRI STATE AIRPORT, WV US",1961-11-30,2018-08-18,West Virginia,United States,38.365,-82.555,251.2,9,29,"(29, 9)"
|
||||
846,954,WBAN:63804,"HUNTSVILLE MADISON CO EXECUTIVE AIRPORT, AL US",2005-12-31,2018-08-18,Alabama,United States,34.86139,-86.55722,230.1,12,26,"(26, 12)"
|
||||
847,955,WBAN:53903,"HUNTSVILLE MUNICIPAL AIRPORT, TX US",2005-12-31,2018-08-18,Texas,United States,30.743890000000004,-95.58611,111.6,16,20,"(20, 16)"
|
||||
848,956,WBAN:14936,"HURON REGIONAL AIRPORT, SD US",1956-12-31,2018-08-18,South Dakota,United States,44.3981,-98.2231,390.1,4,18,"(18, 4)"
|
||||
849,957,WBAN:13986,"HUTCHINSON MUNICIPAL AIRPORT, KS US",1945-01-31,2018-08-18,Kansas,United States,38.06528,-97.86056,470.3,9,18,"(18, 9)"
|
||||
850,958,WBAN:04933,"HUTCHINSON MUNICIPAL BUTLER FIELD AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,44.85889,-94.38167,323.1,3,21,"(21, 3)"
|
||||
851,959,WBAN:00291,"HUTSON FIELD AIRPORT, ND US",2013-12-31,2018-08-18,North Dakota,United States,48.405,-97.371,251.2,0,19,"(19, 0)"
|
||||
852,960,WBAN:94720,"HYANNIS BARNSTABLE MUNICIPAL AIRPORT, MA US",2005-12-31,2018-08-18,Massachusetts,United States,41.66861,-70.28,16.8,6,38,"(38, 6)"
|
||||
853,961,WBAN:53990,"IDABEL MCCURTAIN CO REGIONAL AIRPORT, OK US",2005-12-31,2018-08-18,Oklahoma,United States,33.909440000000004,-94.85944,143.9,13,20,"(20, 13)"
|
||||
854,962,WBAN:00452,"IDAHO CO AIRPORT, ID US",2013-12-31,2018-08-18,Idaho,United States,45.94255,-116.12341,1010.1,2,5,"(5, 2)"
|
||||
855,963,WBAN:24145,"IDAHO FALLS FANNING FIELD, ID US",1956-12-31,2018-08-18,Idaho,United States,43.51639,-112.06722,1441.4,5,8,"(8, 5)"
|
||||
857,965,WBAN:93115,"IMPERIAL BEACH REAM FIELD NAS, CA US",1956-12-31,2018-08-18,California,United States,32.56667,-117.11667,7.3,14,5,"(5, 14)"
|
||||
858,966,WBAN:03144,"IMPERIAL CO AIRPORT, CA US",2005-12-31,2018-08-18,California,United States,32.83417,-115.57861000000001,-17.7,14,6,"(6, 14)"
|
||||
859,967,WBAN:24091,"IMPERIAL MUNICIPAL AIRPORT, NE US",1973-03-31,2018-08-18,Nebraska,United States,40.51,-101.62,996.1,7,16,"(16, 7)"
|
||||
861,969,WBAN:00141,"INDEPENDENCE MUNICIPAL AIRPORT, KS US",1944-01-31,2018-08-18,Kansas,United States,37.158,-95.77799999999999,251.2,10,20,"(20, 10)"
|
||||
862,971,WBAN:23141,"INDIAN SPRINGS, NV US",1963-09-02,2018-08-18,Nevada,United States,36.58333,-115.68333,951.9,11,6,"(6, 11)"
|
||||
863,972,WBAN:64706,"INDIANA J STEWART, PA US",2005-12-31,2018-08-18,Pennsylvania,United States,40.63333,-79.10000000000002,428.2,7,31,"(31, 7)"
|
||||
866,975,WBAN:93819,"INDIANAPOLIS INTERNATIONAL AIRPORT, IN US",1942-10-05,2018-08-18,Indiana,United States,39.72517,-86.28168000000001,241.1,8,26,"(26, 8)"
|
||||
867,977,WBAN:14918,"INTERNATIONAL FALLS INTERNATIONAL AIRPORT, MN US",1956-12-31,2018-08-18,Minnesota,United States,48.5614,-93.3981,360.6,0,21,"(21, 0)"
|
||||
868,978,WBAN:00377,"INVERNESS AIRPORT, FL US",2012-12-31,2018-08-18,Florida,United States,28.816999999999997,-82.31700000000002,15.2,17,29,"(29, 17)"
|
||||
869,980,WBAN:00240,"IONIA CO AIRPORT, MI US",2014-07-30,2018-08-18,Michigan,United States,42.938,-85.061,249.0,5,27,"(27, 5)"
|
||||
870,981,WBAN:14937,"IOWA CITY MUNICIPAL AIRPORT, IA US",1997-01-22,2018-08-18,Iowa,United States,41.63278,-91.54306,198.1,6,23,"(23, 6)"
|
||||
871,982,WBAN:54941,"IOWA FALLS MUNICIPAL AIRPORT, IA US",2013-06-20,2018-08-18,Iowa,United States,42.47138,-93.20707,346.6,5,22,"(22, 5)"
|
||||
873,984,WBAN:94926,"IRONWOOD, MI US",2005-12-31,2018-08-18,Michigan,United States,46.53333,-90.13333,374.9,2,24,"(24, 2)"
|
||||
874,985,WBAN:04997,"ISEDOR IVERSON AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,46.61889,-93.30972,374.3,2,21,"(21, 2)"
|
||||
875,986,WBAN:04781,"ISLIP LI MACARTHUR AIRPORT, NY US",1972-12-31,2018-08-18,New York,United States,40.7939,-73.10170000000002,25.6,7,36,"(36, 7)"
|
||||
876,988,WBAN:94761,"ITHACA TOMPKINS CNTY, NY US",2005-12-31,2018-08-18,New York,United States,42.48333,-76.46667,335.0,5,33,"(33, 5)"
|
||||
877,989,WBAN:00464,"J DOUGLAS BAKE MEMORIAL AIRPORT, WI US",2014-07-30,2018-08-18,Wisconsin,United States,44.87405,-87.90977,184.4,3,25,"(25, 3)"
|
||||
878,990,WBAN:00216,"JACK BARSTOW AIRPORT, MI US",2013-12-31,2018-08-18,Michigan,United States,43.663,-84.26100000000002,194.2,4,28,"(28, 4)"
|
||||
879,991,WBAN:00394,"JACKSON CO AIRPORT, GA US",2013-12-31,2018-08-17,Georgia,United States,34.147,-83.561,290.2,13,28,"(28, 13)"
|
||||
881,993,WBAN:24166,"JACKSON HOLE AIRPORT, WY US",2005-12-31,2018-08-17,Wyoming,United States,43.6,-110.73333000000001,1956.5,4,9,"(9, 4)"
|
||||
882,994,WBAN:03940,"JACKSON INTERNATIONAL AIRPORT, MS US",1942-08-31,2018-08-18,Mississippi,United States,32.3205,-90.0777,100.6,14,24,"(24, 14)"
|
||||
883,995,WBAN:03889,"JACKSON JULIAN CARROLL AIRPORT, KY US",1973-01-01,2018-08-18,Kentucky,United States,37.591390000000004,-83.31443999999998,416.1,10,29,"(29, 10)"
|
||||
884,996,WBAN:03811,"JACKSON MCKELLAR SIPES AIRPORT, TN US",1972-12-31,2018-08-18,Tennessee,United States,35.593,-88.9167,132.0,11,25,"(25, 11)"
|
||||
885,997,WBAN:04946,"JACKSON MUNICIPAL AIRPORT, MN US",2005-12-31,2018-08-18,Minnesota,United States,43.65,-94.98639,440.7,4,20,"(20, 4)"
|
||||
886,998,WBAN:14833,"JACKSON REYNOLDS FIELD, MI US",1972-12-31,2018-08-18,Michigan,United States,42.2667,-84.4667,304.2,6,28,"(28, 6)"
|
||||
887,999,WBAN:93753,"JACKSONVILLE ALBERT ELLIS AIRPORT, NC US",2005-12-31,2018-08-18,North Carolina,United States,34.83333,-77.61667,29.3,12,33,"(33, 12)"
|
||||
|
@@ -1,532 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Project Notebook\n",
|
||||
"This is the full and complete notebook that takes in the data from NOAA and processes it into frames to be used in the PredNet architecture and produce a resulting prediction."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import os\n",
|
||||
"from tqdm import tqdm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Getting a list of files in raw data folder\n",
|
||||
"filenames = os.listdir('D:/Nico/Desktop/processed_data')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"header_wanted = [\n",
|
||||
" 'HOURLYVISIBILITY',\n",
|
||||
" 'HOURLYDRYBULBTEMPC',\n",
|
||||
" 'HOURLYWETBULBTEMPC',\n",
|
||||
" 'HOURLYDewPointTempC',\n",
|
||||
" 'HOURLYRelativeHumidity',\n",
|
||||
" 'HOURLYWindSpeed',\n",
|
||||
" 'HOURLYWindGustSpeed',\n",
|
||||
" 'HOURLYStationPressure',\n",
|
||||
" 'HOURLYPressureTendency',\n",
|
||||
" 'HOURLYPressureChange',\n",
|
||||
" 'HOURLYSeaLevelPressure',\n",
|
||||
" 'HOURLYPrecip',\n",
|
||||
" 'HOURLYAltimeterSetting']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"usecols = ['DATE','STATION'] + header_wanted"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Loading all files into a pandas Dataframe\n",
|
||||
"tqdm.pandas()\n",
|
||||
"df = pd.concat([pd.read_csv('D:/Nico/Desktop/processed_data/{}'.format(x), usecols=usecols, low_memory=False) for x in tqdm(filenames)])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"At this point all the data has been loaded into a single dataframe and any data changes have been made. The next step is to break the data up by WBAN and place in a 2D array at the appropriate grid cell. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"stations = pd.read_csv(\"../Playground/stations_unique.csv\", usecols = ['STATION_ID', 'LON_SCALED', 'LAT_SCALED'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"height = 20\n",
|
||||
"width = 40"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mask = [([0] * width) for i in range(height)]\n",
|
||||
"\n",
|
||||
"wban_loc = dict(zip(stations.STATION_ID,zip(stations.LON_SCALED,stations.LAT_SCALED)))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"grid = [([pd.DataFrame()] * width) for i in range(height)]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for key, value in tqdm(wban_loc.items()):\n",
|
||||
" mask[value[1]][value[0]] = 1\n",
|
||||
" grid[value[1]][value[0]] = df.loc[df.STATION == key]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"plt.imshow(mask)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#TODO Handle different sized data some stacks too short\n",
|
||||
"def create_frames(data,height, width, depth):\n",
|
||||
" days = []\n",
|
||||
" frames = []\n",
|
||||
" for i in tqdm(range(depth)):\n",
|
||||
" frame = np.zeros((height,width,12))\n",
|
||||
" for y in range(height):\n",
|
||||
" for x in range(width):\n",
|
||||
" if(not data[y][x].empty):\n",
|
||||
" frame[y][x] = data[y][x].iloc[[i],1:13].values.flatten()\n",
|
||||
" if((i+1)%24 != 0):\n",
|
||||
" frames.append(frame)\n",
|
||||
" else:\n",
|
||||
" frames.append(frame)\n",
|
||||
" days.append(frames)\n",
|
||||
" frames = []\n",
|
||||
" return days"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def average_grid_fill(mask,data, height, width):\n",
|
||||
" \n",
|
||||
" for i in range(height):\n",
|
||||
" for j in range(width):\n",
|
||||
" if(mask[i][j] != 1):\n",
|
||||
" neighbors = get_neighbors(j,i,data)\n",
|
||||
" data[i][j] = np.mean(neighbors)\n",
|
||||
" \n",
|
||||
" return data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def get_neighbors(x,y,g):\n",
|
||||
" neighbors = []\n",
|
||||
" for i in [y-1,y,y+1]:\n",
|
||||
" for j in [x-1,x,x+1]:\n",
|
||||
" if(i >= 0 and j >= 0):\n",
|
||||
" if(i != y or j != x ):\n",
|
||||
" try:\n",
|
||||
" neighbors.append(g[i][j])\n",
|
||||
" except:\n",
|
||||
" pass\n",
|
||||
" return neighbors"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def store_sequence(frames):\n",
|
||||
" import hickle as hkl\n",
|
||||
" source_list = []\n",
|
||||
" \n",
|
||||
" for days in range(len(frames)):\n",
|
||||
" for day in range(len(frames[days])):\n",
|
||||
" source_list += '{}'.format(days)\n",
|
||||
" \n",
|
||||
" hkl.dump(frames, './data/train/x_train.hkl')\n",
|
||||
" hkl.dump(source_list, './data/train/x_sources.hkl')\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Splits is a dictionary holding train, test, val\n",
|
||||
"the values for train, test, and val are lists of tuples holding category and folder name\n",
|
||||
"in the end each image gets a source associated with it\n",
|
||||
"there is only one data and one source hickle dump for each of train test and val"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"frames = create_frames(grid, height, width,504)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#TODO use loop to average each frame\n",
|
||||
"for x in tqdm(range(len(frames))):\n",
|
||||
" for y in range(len(frames[0])):\n",
|
||||
" frames[x][y] = average_grid_fill(mask, frames[x][y], height, width )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"store_sequence(frames)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"np_frames = np.array(frames)\n",
|
||||
"np_frames.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"store_sequence(np_frames)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"At this point I have processed the data and made it into discrete frames of data and it is time to run it through the PredNet architecture for training."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Using TensorFlow backend.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"np.random.seed(123)\n",
|
||||
"from six.moves import cPickle\n",
|
||||
"\n",
|
||||
"from keras import backend as K\n",
|
||||
"from keras.models import Model\n",
|
||||
"from keras.layers import Input, Dense, Flatten\n",
|
||||
"from keras.layers import LSTM\n",
|
||||
"from keras.layers import TimeDistributed\n",
|
||||
"from keras.callbacks import LearningRateScheduler, ModelCheckpoint\n",
|
||||
"from keras.optimizers import Adam\n",
|
||||
"\n",
|
||||
"from prednet import PredNet\n",
|
||||
"from data_utils import SequenceGenerator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"WEIGHTS_DIR = './weights/'\n",
|
||||
"DATA_DIR = './data/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"save_model = True # if weights will be saved\n",
|
||||
"weights_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_weights.hdf5') # where weights will be saved\n",
|
||||
"json_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_model.json')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Data files\n",
|
||||
"#TODO: Use the files from NOAA and process them into proper frames\n",
|
||||
"train_file = os.path.join(DATA_DIR,'train/', 'x_train.hkl')\n",
|
||||
"train_sources = os.path.join(DATA_DIR, 'train/', 'x_sources.hkl')\n",
|
||||
"#val_file = os.path.join(DATA_DIR, 'X_val.hkl')\n",
|
||||
"#val_sources = os.path.join(DATA_DIR, 'sources_val.hkl')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Training parameters\n",
|
||||
"nb_epoch = 1\n",
|
||||
"batch_size = 4\n",
|
||||
"samples_per_epoch = 500\n",
|
||||
"N_seq_val = 100 # number of sequences to use for validation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Model parameters\n",
|
||||
"n_channels, im_height, im_width = (12, 20, 40)\n",
|
||||
"input_shape = (n_channels, im_height, im_width) if K.image_data_format() == 'channels_first' else (im_height, im_width, n_channels)\n",
|
||||
"stack_sizes = (n_channels, 48, 96)\n",
|
||||
"R_stack_sizes = stack_sizes\n",
|
||||
"A_filt_sizes = (3, 3)\n",
|
||||
"Ahat_filt_sizes = (3, 3, 3)\n",
|
||||
"R_filt_sizes = (3, 3, 3)\n",
|
||||
"layer_loss_weights = np.array([1., 0., 0.]) # weighting for each layer in final loss; \"L_0\" model: [1, 0, 0, 0], \"L_all\": [1, 0.1, 0.1, 0.1]\n",
|
||||
"layer_loss_weights = np.expand_dims(layer_loss_weights, 1)\n",
|
||||
"nt = 24 # number of timesteps used for sequences in training\n",
|
||||
"time_loss_weights = 1./ (nt - 1) * np.ones((nt,1)) # equally weight all timesteps except the first\n",
|
||||
"time_loss_weights[0] = 0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"prednet = PredNet(stack_sizes, R_stack_sizes,\n",
|
||||
" A_filt_sizes, Ahat_filt_sizes, R_filt_sizes,\n",
|
||||
" output_mode='error', return_sequences=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inputs = Input(shape=(nt,) + input_shape)\n",
|
||||
"errors = prednet(inputs) # errors will be (batch_size, nt, nb_layers)\n",
|
||||
"errors_by_time = TimeDistributed(Dense(1, trainable=False), weights=[layer_loss_weights, np.zeros(1)], trainable=False)(errors) # calculate weighted error by layer\n",
|
||||
"errors_by_time = Flatten()(errors_by_time) # will be (batch_size, nt)\n",
|
||||
"final_errors = Dense(1, weights=[time_loss_weights, np.zeros(1)], trainable=False)(errors_by_time) # weight errors by time\n",
|
||||
"model = Model(inputs=inputs, outputs=final_errors)\n",
|
||||
"model.compile(loss='mean_absolute_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"_________________________________________________________________\n",
|
||||
"Layer (type) Output Shape Param # \n",
|
||||
"=================================================================\n",
|
||||
"input_1 (InputLayer) (None, 24, 20, 40, 12) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"pred_net_1 (PredNet) (None, 24, 3) 1645548 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"time_distributed_1 (TimeDist (None, 24, 1) 4 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"flatten_1 (Flatten) (None, 24) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"dense_2 (Dense) (None, 1) 25 \n",
|
||||
"=================================================================\n",
|
||||
"Total params: 1,645,577\n",
|
||||
"Trainable params: 1,645,548\n",
|
||||
"Non-trainable params: 29\n",
|
||||
"_________________________________________________________________\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"truth = []\n",
|
||||
"for i in range(20):\n",
|
||||
" truth.append(np.random.randint(255,size=(1)))\n",
|
||||
"output = np.array(truth)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_generator = SequenceGenerator(train_file, train_sources, nt, batch_size=batch_size, shuffle=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"lr_schedule = lambda epoch: 0.001 if epoch < 75 else 0.0001 # start with lr of 0.001 and then drop to 0.0001 after 75 epochs\n",
|
||||
"callbacks = [LearningRateScheduler(lr_schedule)]\n",
|
||||
"#history = model.fit(np_frames, output ,batch_size, nb_epoch, callbacks=callbacks)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Epoch 1/1\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"history = model.fit_generator(train_generator, samples_per_epoch / batch_size, nb_epoch, callbacks=callbacks)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
544
Project Final/Training/Training.ipynb
Normal file
544
Project Final/Training/Training.ipynb
Normal file
@@ -0,0 +1,544 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Using TensorFlow backend.\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import os\n",
|
||||
"np.random.seed(123)\n",
|
||||
"from six.moves import cPickle\n",
|
||||
"\n",
|
||||
"from keras import backend as K\n",
|
||||
"from keras.models import Model\n",
|
||||
"from keras.layers import Input, Dense, Flatten\n",
|
||||
"from keras.layers import LSTM\n",
|
||||
"from keras.layers import TimeDistributed\n",
|
||||
"from keras.callbacks import LearningRateScheduler, ModelCheckpoint\n",
|
||||
"from keras.optimizers import Adam\n",
|
||||
"\n",
|
||||
"from prednet import PredNet\n",
|
||||
"from data_utils import SequenceGenerator"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"WEIGHTS_DIR = './weights/'\n",
|
||||
"DATA_DIR = '../data/'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"save_model = True # if weights will be saved\n",
|
||||
"weights_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_weights.hdf5') # where weights will be saved\n",
|
||||
"json_file = os.path.join(WEIGHTS_DIR, 'prednet_weather_model.json')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Data files\n",
|
||||
"train_file = os.path.join(DATA_DIR, 'x_train.hkl')\n",
|
||||
"train_sources = os.path.join(DATA_DIR, 'sources_train.hkl')\n",
|
||||
"val_file = os.path.join(DATA_DIR, 'x_val.hkl')\n",
|
||||
"val_sources = os.path.join(DATA_DIR, 'sources_val.hkl')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Training parameters\n",
|
||||
"nb_epoch = 150\n",
|
||||
"batch_size = 24\n",
|
||||
"samples_per_epoch = 500\n",
|
||||
"N_seq_val = 140 # number of sequences to use for validation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Model parameters\n",
|
||||
"n_channels, im_height, im_width = (7, 20, 40)\n",
|
||||
"input_shape = (n_channels, im_height, im_width) if K.image_data_format() == 'channels_first' else (im_height, im_width, n_channels)\n",
|
||||
"stack_sizes = (n_channels, 48, 96)\n",
|
||||
"R_stack_sizes = stack_sizes\n",
|
||||
"A_filt_sizes = (3, 3)\n",
|
||||
"Ahat_filt_sizes = (3, 3, 3)\n",
|
||||
"R_filt_sizes = (3, 3, 3)\n",
|
||||
"layer_loss_weights = np.array([1., 0., 0.]) # weighting for each layer in final loss; \"L_0\" model: [1, 0, 0, 0], \"L_all\": [1, 0.1, 0.1, 0.1]\n",
|
||||
"layer_loss_weights = np.expand_dims(layer_loss_weights, 1)\n",
|
||||
"nt = 24 # number of timesteps used for sequences in training\n",
|
||||
"time_loss_weights = 1./ (nt - 1) * np.ones((nt,1)) # equally weight all timesteps except the first\n",
|
||||
"time_loss_weights[0] = 0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"prednet = PredNet(stack_sizes, R_stack_sizes,\n",
|
||||
" A_filt_sizes, Ahat_filt_sizes, R_filt_sizes,\n",
|
||||
" output_mode='error', return_sequences=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"inputs = Input(shape=(nt,) + input_shape)\n",
|
||||
"errors = prednet(inputs) # errors will be (batch_size, nt, nb_layers)\n",
|
||||
"errors_by_time = TimeDistributed(Dense(1, trainable=False), weights=[layer_loss_weights, np.zeros(1)], trainable=False)(errors) # calculate weighted error by layer\n",
|
||||
"errors_by_time = Flatten()(errors_by_time) # will be (batch_size, nt)\n",
|
||||
"final_errors = Dense(1, weights=[time_loss_weights, np.zeros(1)], trainable=False)(errors_by_time) # weight errors by time\n",
|
||||
"model = Model(inputs=inputs, outputs=final_errors)\n",
|
||||
"model.compile(loss='mean_absolute_error', optimizer='adam')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"_________________________________________________________________\n",
|
||||
"Layer (type) Output Shape Param # \n",
|
||||
"=================================================================\n",
|
||||
"input_1 (InputLayer) (None, 24, 20, 40, 7) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"pred_net_1 (PredNet) (None, 24, 3) 1621448 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"time_distributed_1 (TimeDist (None, 24, 1) 4 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"flatten_1 (Flatten) (None, 24) 0 \n",
|
||||
"_________________________________________________________________\n",
|
||||
"dense_2 (Dense) (None, 1) 25 \n",
|
||||
"=================================================================\n",
|
||||
"Total params: 1,621,477\n",
|
||||
"Trainable params: 1,621,448\n",
|
||||
"Non-trainable params: 29\n",
|
||||
"_________________________________________________________________\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"model.summary()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"train_generator = SequenceGenerator(train_file, train_sources, nt, batch_size=batch_size, shuffle=False)\n",
|
||||
"val_generator = SequenceGenerator(val_file, val_sources, nt, batch_size=batch_size, N_seq=N_seq_val)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"lr_schedule = lambda epoch: 0.001 if epoch < 75 else 0.0001 # start with lr of 0.001 and then drop to 0.0001 after 75 epochs\n",
|
||||
"callbacks = [LearningRateScheduler(lr_schedule)]\n",
|
||||
"if save_model:\n",
|
||||
" if not os.path.exists(WEIGHTS_DIR): os.mkdir(WEIGHTS_DIR)\n",
|
||||
" callbacks.append(ModelCheckpoint(filepath=weights_file, monitor='val_loss', save_best_only=True))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Epoch 1/150\n",
|
||||
" - 21s - loss: 0.9718 - val_loss: 0.9142\n",
|
||||
"Epoch 2/150\n",
|
||||
" - 15s - loss: 0.9089 - val_loss: 0.8895\n",
|
||||
"Epoch 3/150\n",
|
||||
" - 15s - loss: 0.9057 - val_loss: 0.8826\n",
|
||||
"Epoch 4/150\n",
|
||||
" - 15s - loss: 0.8905 - val_loss: 0.8765\n",
|
||||
"Epoch 5/150\n",
|
||||
" - 15s - loss: 0.9106 - val_loss: 0.8840\n",
|
||||
"Epoch 6/150\n",
|
||||
" - 15s - loss: 0.8812 - val_loss: 0.8749\n",
|
||||
"Epoch 7/150\n",
|
||||
" - 15s - loss: 0.9099 - val_loss: 0.8897\n",
|
||||
"Epoch 8/150\n",
|
||||
" - 15s - loss: 0.8829 - val_loss: 0.8711\n",
|
||||
"Epoch 9/150\n",
|
||||
" - 15s - loss: 0.9039 - val_loss: 0.8843\n",
|
||||
"Epoch 10/150\n",
|
||||
" - 15s - loss: 0.8814 - val_loss: 0.8696\n",
|
||||
"Epoch 11/150\n",
|
||||
" - 15s - loss: 0.8897 - val_loss: 0.8881\n",
|
||||
"Epoch 12/150\n",
|
||||
" - 15s - loss: 0.8803 - val_loss: 0.8696\n",
|
||||
"Epoch 13/150\n",
|
||||
" - 15s - loss: 0.8819 - val_loss: 0.8836\n",
|
||||
"Epoch 14/150\n",
|
||||
" - 15s - loss: 0.8786 - val_loss: 0.8672\n",
|
||||
"Epoch 15/150\n",
|
||||
" - 15s - loss: 0.8732 - val_loss: 0.8671\n",
|
||||
"Epoch 16/150\n",
|
||||
" - 15s - loss: 0.8805 - val_loss: 0.8698\n",
|
||||
"Epoch 17/150\n",
|
||||
" - 15s - loss: 0.8643 - val_loss: 0.8600\n",
|
||||
"Epoch 18/150\n",
|
||||
" - 15s - loss: 0.8845 - val_loss: 0.8574\n",
|
||||
"Epoch 19/150\n",
|
||||
" - 16s - loss: 0.8581 - val_loss: 0.8568\n",
|
||||
"Epoch 20/150\n",
|
||||
" - 15s - loss: 0.8815 - val_loss: 0.8588\n",
|
||||
"Epoch 21/150\n",
|
||||
" - 15s - loss: 0.8622 - val_loss: 0.8575\n",
|
||||
"Epoch 22/150\n",
|
||||
" - 15s - loss: 0.8737 - val_loss: 0.8604\n",
|
||||
"Epoch 23/150\n",
|
||||
" - 15s - loss: 0.8619 - val_loss: 0.8568\n",
|
||||
"Epoch 24/150\n",
|
||||
" - 15s - loss: 0.8694 - val_loss: 0.8606\n",
|
||||
"Epoch 25/150\n",
|
||||
" - 15s - loss: 0.8638 - val_loss: 0.8542\n",
|
||||
"Epoch 26/150\n",
|
||||
" - 15s - loss: 0.8664 - val_loss: 0.8591\n",
|
||||
"Epoch 27/150\n",
|
||||
" - 15s - loss: 0.8700 - val_loss: 0.8551\n",
|
||||
"Epoch 28/150\n",
|
||||
" - 15s - loss: 0.8576 - val_loss: 0.8534\n",
|
||||
"Epoch 29/150\n",
|
||||
" - 15s - loss: 0.8751 - val_loss: 0.8512\n",
|
||||
"Epoch 30/150\n",
|
||||
" - 15s - loss: 0.8532 - val_loss: 0.8520\n",
|
||||
"Epoch 31/150\n",
|
||||
" - 15s - loss: 0.8758 - val_loss: 0.8518\n",
|
||||
"Epoch 32/150\n",
|
||||
" - 15s - loss: 0.8565 - val_loss: 0.8514\n",
|
||||
"Epoch 33/150\n",
|
||||
" - 15s - loss: 0.8688 - val_loss: 0.8500\n",
|
||||
"Epoch 34/150\n",
|
||||
" - 15s - loss: 0.8571 - val_loss: 0.8521\n",
|
||||
"Epoch 35/150\n",
|
||||
" - 15s - loss: 0.8659 - val_loss: 0.8501\n",
|
||||
"Epoch 36/150\n",
|
||||
" - 15s - loss: 0.8595 - val_loss: 0.8508\n",
|
||||
"Epoch 37/150\n",
|
||||
" - 15s - loss: 0.8634 - val_loss: 0.8512\n",
|
||||
"Epoch 38/150\n",
|
||||
" - 15s - loss: 0.8642 - val_loss: 0.8505\n",
|
||||
"Epoch 39/150\n",
|
||||
" - 15s - loss: 0.8562 - val_loss: 0.8490\n",
|
||||
"Epoch 40/150\n",
|
||||
" - 15s - loss: 0.8706 - val_loss: 0.8491\n",
|
||||
"Epoch 41/150\n",
|
||||
" - 15s - loss: 0.8498 - val_loss: 0.8490\n",
|
||||
"Epoch 42/150\n",
|
||||
" - 15s - loss: 0.8727 - val_loss: 0.8479\n",
|
||||
"Epoch 43/150\n",
|
||||
" - 15s - loss: 0.8519 - val_loss: 0.8473\n",
|
||||
"Epoch 44/150\n",
|
||||
" - 15s - loss: 0.8671 - val_loss: 0.8474\n",
|
||||
"Epoch 45/150\n",
|
||||
" - 15s - loss: 0.8534 - val_loss: 0.8485\n",
|
||||
"Epoch 46/150\n",
|
||||
" - 15s - loss: 0.8624 - val_loss: 0.8466\n",
|
||||
"Epoch 47/150\n",
|
||||
" - 15s - loss: 0.8563 - val_loss: 0.8467\n",
|
||||
"Epoch 48/150\n",
|
||||
" - 15s - loss: 0.8607 - val_loss: 0.8468\n",
|
||||
"Epoch 49/150\n",
|
||||
" - 15s - loss: 0.8594 - val_loss: 0.8475\n",
|
||||
"Epoch 50/150\n",
|
||||
" - 15s - loss: 0.8548 - val_loss: 0.8457\n",
|
||||
"Epoch 51/150\n",
|
||||
" - 15s - loss: 0.8671 - val_loss: 0.8456\n",
|
||||
"Epoch 52/150\n",
|
||||
" - 15s - loss: 0.8476 - val_loss: 0.8453\n",
|
||||
"Epoch 53/150\n",
|
||||
" - 15s - loss: 0.8701 - val_loss: 0.8452\n",
|
||||
"Epoch 54/150\n",
|
||||
" - 15s - loss: 0.8470 - val_loss: 0.8455\n",
|
||||
"Epoch 55/150\n",
|
||||
" - 15s - loss: 0.8674 - val_loss: 0.8447\n",
|
||||
"Epoch 56/150\n",
|
||||
" - 15s - loss: 0.8502 - val_loss: 0.8457\n",
|
||||
"Epoch 57/150\n",
|
||||
" - 15s - loss: 0.8614 - val_loss: 0.8443\n",
|
||||
"Epoch 58/150\n",
|
||||
" - 15s - loss: 0.8532 - val_loss: 0.8448\n",
|
||||
"Epoch 59/150\n",
|
||||
" - 15s - loss: 0.8590 - val_loss: 0.8443\n",
|
||||
"Epoch 60/150\n",
|
||||
" - 15s - loss: 0.8561 - val_loss: 0.8439\n",
|
||||
"Epoch 61/150\n",
|
||||
" - 15s - loss: 0.8543 - val_loss: 0.8441\n",
|
||||
"Epoch 62/150\n",
|
||||
" - 15s - loss: 0.8630 - val_loss: 0.8444\n",
|
||||
"Epoch 63/150\n",
|
||||
" - 15s - loss: 0.8472 - val_loss: 0.8437\n",
|
||||
"Epoch 64/150\n",
|
||||
" - 15s - loss: 0.8688 - val_loss: 0.8435\n",
|
||||
"Epoch 65/150\n",
|
||||
" - 15s - loss: 0.8435 - val_loss: 0.8439\n",
|
||||
"Epoch 66/150\n",
|
||||
" - 15s - loss: 0.8675 - val_loss: 0.8439\n",
|
||||
"Epoch 67/150\n",
|
||||
" - 15s - loss: 0.8484 - val_loss: 0.8432\n",
|
||||
"Epoch 68/150\n",
|
||||
" - 15s - loss: 0.8605 - val_loss: 0.8433\n",
|
||||
"Epoch 69/150\n",
|
||||
" - 15s - loss: 0.8508 - val_loss: 0.8438\n",
|
||||
"Epoch 70/150\n",
|
||||
" - 15s - loss: 0.8582 - val_loss: 0.8433\n",
|
||||
"Epoch 71/150\n",
|
||||
" - 15s - loss: 0.8531 - val_loss: 0.8426\n",
|
||||
"Epoch 72/150\n",
|
||||
" - 15s - loss: 0.8553 - val_loss: 0.8435\n",
|
||||
"Epoch 73/150\n",
|
||||
" - 15s - loss: 0.8601 - val_loss: 0.8425\n",
|
||||
"Epoch 74/150\n",
|
||||
" - 15s - loss: 0.8472 - val_loss: 0.8424\n",
|
||||
"Epoch 75/150\n",
|
||||
" - 15s - loss: 0.8659 - val_loss: 0.8425\n",
|
||||
"Epoch 76/150\n",
|
||||
" - 15s - loss: 0.8433 - val_loss: 0.8422\n",
|
||||
"Epoch 77/150\n",
|
||||
" - 15s - loss: 0.8670 - val_loss: 0.8423\n",
|
||||
"Epoch 78/150\n",
|
||||
" - 15s - loss: 0.8471 - val_loss: 0.8426\n",
|
||||
"Epoch 79/150\n",
|
||||
" - 15s - loss: 0.8607 - val_loss: 0.8426\n",
|
||||
"Epoch 80/150\n",
|
||||
" - 15s - loss: 0.8487 - val_loss: 0.8426\n",
|
||||
"Epoch 81/150\n",
|
||||
" - 15s - loss: 0.8582 - val_loss: 0.8426\n",
|
||||
"Epoch 82/150\n",
|
||||
" - 15s - loss: 0.8521 - val_loss: 0.8424\n",
|
||||
"Epoch 83/150\n",
|
||||
" - 15s - loss: 0.8558 - val_loss: 0.8424\n",
|
||||
"Epoch 84/150\n",
|
||||
" - 15s - loss: 0.8576 - val_loss: 0.8421\n",
|
||||
"Epoch 85/150\n",
|
||||
" - 15s - loss: 0.8489 - val_loss: 0.8421\n",
|
||||
"Epoch 86/150\n",
|
||||
" - 15s - loss: 0.8648 - val_loss: 0.8421\n",
|
||||
"Epoch 87/150\n",
|
||||
" - 15s - loss: 0.8431 - val_loss: 0.8421\n",
|
||||
"Epoch 88/150\n",
|
||||
" - 15s - loss: 0.8673 - val_loss: 0.8422\n",
|
||||
"Epoch 89/150\n",
|
||||
" - 15s - loss: 0.8460 - val_loss: 0.8424\n",
|
||||
"Epoch 90/150\n",
|
||||
" - 15s - loss: 0.8621 - val_loss: 0.8425\n",
|
||||
"Epoch 91/150\n",
|
||||
" - 15s - loss: 0.8481 - val_loss: 0.8425\n",
|
||||
"Epoch 92/150\n",
|
||||
" - 15s - loss: 0.8578 - val_loss: 0.8426\n",
|
||||
"Epoch 93/150\n",
|
||||
" - 15s - loss: 0.8518 - val_loss: 0.8424\n",
|
||||
"Epoch 94/150\n",
|
||||
" - 15s - loss: 0.8564 - val_loss: 0.8424\n",
|
||||
"Epoch 95/150\n",
|
||||
" - 15s - loss: 0.8556 - val_loss: 0.8421\n",
|
||||
"Epoch 96/150\n",
|
||||
" - 15s - loss: 0.8506 - val_loss: 0.8421\n",
|
||||
"Epoch 97/150\n",
|
||||
" - 15s - loss: 0.8637 - val_loss: 0.8419\n",
|
||||
"Epoch 98/150\n",
|
||||
" - 15s - loss: 0.8438 - val_loss: 0.8420\n",
|
||||
"Epoch 99/150\n",
|
||||
" - 15s - loss: 0.8672 - val_loss: 0.8421\n",
|
||||
"Epoch 100/150\n",
|
||||
" - 15s - loss: 0.8437 - val_loss: 0.8422\n",
|
||||
"Epoch 101/150\n",
|
||||
" - 15s - loss: 0.8645 - val_loss: 0.8423\n",
|
||||
"Epoch 102/150\n",
|
||||
" - 15s - loss: 0.8472 - val_loss: 0.8424\n",
|
||||
"Epoch 103/150\n",
|
||||
" - 15s - loss: 0.8589 - val_loss: 0.8425\n",
|
||||
"Epoch 104/150\n",
|
||||
" - 15s - loss: 0.8508 - val_loss: 0.8424\n",
|
||||
"Epoch 105/150\n",
|
||||
" - 15s - loss: 0.8567 - val_loss: 0.8425\n",
|
||||
"Epoch 106/150\n",
|
||||
" - 15s - loss: 0.8542 - val_loss: 0.8421\n",
|
||||
"Epoch 107/150\n",
|
||||
" - 15s - loss: 0.8522 - val_loss: 0.8421\n",
|
||||
"Epoch 108/150\n",
|
||||
" - 15s - loss: 0.8613 - val_loss: 0.8419\n",
|
||||
"Epoch 109/150\n",
|
||||
" - 15s - loss: 0.8454 - val_loss: 0.8419\n",
|
||||
"Epoch 110/150\n",
|
||||
" - 15s - loss: 0.8672 - val_loss: 0.8420\n",
|
||||
"Epoch 111/150\n",
|
||||
" - 15s - loss: 0.8420 - val_loss: 0.8420\n",
|
||||
"Epoch 112/150\n",
|
||||
" - 15s - loss: 0.8660 - val_loss: 0.8422\n",
|
||||
"Epoch 113/150\n",
|
||||
" - 15s - loss: 0.8470 - val_loss: 0.8423\n",
|
||||
"Epoch 114/150\n",
|
||||
" - 15s - loss: 0.8593 - val_loss: 0.8424\n",
|
||||
"Epoch 115/150\n",
|
||||
" - 15s - loss: 0.8498 - val_loss: 0.8423\n",
|
||||
"Epoch 116/150\n",
|
||||
" - 15s - loss: 0.8572 - val_loss: 0.8425\n",
|
||||
"Epoch 117/150\n",
|
||||
" - 15s - loss: 0.8522 - val_loss: 0.8421\n",
|
||||
"Epoch 118/150\n",
|
||||
" - 15s - loss: 0.8545 - val_loss: 0.8421\n",
|
||||
"Epoch 119/150\n",
|
||||
" - 15s - loss: 0.8593 - val_loss: 0.8418\n",
|
||||
"Epoch 120/150\n",
|
||||
" - 15s - loss: 0.8466 - val_loss: 0.8418\n",
|
||||
"Epoch 121/150\n",
|
||||
" - 15s - loss: 0.8653 - val_loss: 0.8418\n",
|
||||
"Epoch 122/150\n",
|
||||
" - 15s - loss: 0.8429 - val_loss: 0.8418\n",
|
||||
"Epoch 123/150\n",
|
||||
" - 15s - loss: 0.8667 - val_loss: 0.8420\n",
|
||||
"Epoch 124/150\n",
|
||||
" - 15s - loss: 0.8467 - val_loss: 0.8422\n",
|
||||
"Epoch 125/150\n",
|
||||
" - 15s - loss: 0.8603 - val_loss: 0.8422\n",
|
||||
"Epoch 126/150\n",
|
||||
" - 15s - loss: 0.8483 - val_loss: 0.8422\n",
|
||||
"Epoch 127/150\n",
|
||||
" - 15s - loss: 0.8578 - val_loss: 0.8424\n",
|
||||
"Epoch 128/150\n",
|
||||
" - 15s - loss: 0.8517 - val_loss: 0.8421\n",
|
||||
"Epoch 129/150\n",
|
||||
" - 15s - loss: 0.8554 - val_loss: 0.8422\n",
|
||||
"Epoch 130/150\n",
|
||||
" - 15s - loss: 0.8572 - val_loss: 0.8418\n",
|
||||
"Epoch 131/150\n",
|
||||
" - 15s - loss: 0.8484 - val_loss: 0.8418\n",
|
||||
"Epoch 132/150\n",
|
||||
" - 15s - loss: 0.8645 - val_loss: 0.8417\n",
|
||||
"Epoch 133/150\n",
|
||||
" - 15s - loss: 0.8427 - val_loss: 0.8417\n",
|
||||
"Epoch 134/150\n",
|
||||
" - 15s - loss: 0.8669 - val_loss: 0.8419\n",
|
||||
"Epoch 135/150\n",
|
||||
" - 15s - loss: 0.8456 - val_loss: 0.8420\n",
|
||||
"Epoch 136/150\n",
|
||||
" - 15s - loss: 0.8617 - val_loss: 0.8421\n",
|
||||
"Epoch 137/150\n",
|
||||
" - 15s - loss: 0.8477 - val_loss: 0.8422\n",
|
||||
"Epoch 138/150\n",
|
||||
" - 15s - loss: 0.8574 - val_loss: 0.8423\n",
|
||||
"Epoch 139/150\n",
|
||||
" - 15s - loss: 0.8515 - val_loss: 0.8421\n",
|
||||
"Epoch 140/150\n",
|
||||
" - 15s - loss: 0.8560 - val_loss: 0.8422\n",
|
||||
"Epoch 141/150\n",
|
||||
" - 15s - loss: 0.8553 - val_loss: 0.8419\n",
|
||||
"Epoch 142/150\n",
|
||||
" - 15s - loss: 0.8502 - val_loss: 0.8418\n",
|
||||
"Epoch 143/150\n",
|
||||
" - 15s - loss: 0.8633 - val_loss: 0.8416\n",
|
||||
"Epoch 144/150\n",
|
||||
" - 15s - loss: 0.8434 - val_loss: 0.8416\n",
|
||||
"Epoch 145/150\n",
|
||||
" - 15s - loss: 0.8668 - val_loss: 0.8417\n",
|
||||
"Epoch 146/150\n",
|
||||
" - 15s - loss: 0.8433 - val_loss: 0.8418\n",
|
||||
"Epoch 147/150\n",
|
||||
" - 15s - loss: 0.8642 - val_loss: 0.8419\n",
|
||||
"Epoch 148/150\n",
|
||||
" - 15s - loss: 0.8468 - val_loss: 0.8421\n",
|
||||
"Epoch 149/150\n",
|
||||
" - 15s - loss: 0.8585 - val_loss: 0.8421\n",
|
||||
"Epoch 150/150\n",
|
||||
" - 15s - loss: 0.8504 - val_loss: 0.8420\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"history = model.fit_generator(train_generator, steps_per_epoch=(samples_per_epoch / batch_size), \n",
|
||||
" epochs=nb_epoch, callbacks=callbacks,\n",
|
||||
" validation_data=val_generator, validation_steps=N_seq_val / batch_size,\n",
|
||||
" verbose=2, workers=0)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"if save_model:\n",
|
||||
" json_string = model.to_json()\n",
|
||||
" with open(json_file, \"w\") as f:\n",
|
||||
" f.write(json_string)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
68
Project Final/Training/data_utils.py
Normal file
68
Project Final/Training/data_utils.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import hickle as hkl
|
||||
import numpy as np
|
||||
from keras import backend as K
|
||||
from keras.preprocessing.image import Iterator
|
||||
|
||||
# Data generator that creates sequences for input into PredNet.
|
||||
class SequenceGenerator(Iterator):
|
||||
def __init__(self, data_file, source_file, nt,
|
||||
batch_size=8, shuffle=False, seed=None,
|
||||
output_mode='error', sequence_start_mode='all', N_seq=None,
|
||||
data_format=K.image_data_format()):
|
||||
self.X = hkl.load(data_file) # X will be like (n_images, nb_cols, nb_rows, nb_channels)
|
||||
self.sources = hkl.load(source_file) # source for each image so when creating sequences can assure that consecutive frames are from same video
|
||||
self.nt = nt
|
||||
self.batch_size = batch_size
|
||||
self.data_format = data_format
|
||||
assert sequence_start_mode in {'all', 'unique'}, 'sequence_start_mode must be in {all, unique}'
|
||||
self.sequence_start_mode = sequence_start_mode
|
||||
assert output_mode in {'error', 'prediction'}, 'output_mode must be in {error, prediction}'
|
||||
self.output_mode = output_mode
|
||||
|
||||
if self.data_format == 'channels_first':
|
||||
self.X = np.transpose(self.X, (0, 3, 1, 2))
|
||||
self.im_shape = self.X[0].shape
|
||||
|
||||
if self.sequence_start_mode == 'all': # allow for any possible sequence, starting from any frame
|
||||
self.possible_starts = np.array([i for i in range(self.X.shape[0] - self.nt) if self.sources[i] == self.sources[i + self.nt - 1]])
|
||||
elif self.sequence_start_mode == 'unique': #create sequences where each unique frame is in at most one sequence
|
||||
curr_location = 0
|
||||
possible_starts = []
|
||||
while curr_location < self.X.shape[0] - self.nt + 1:
|
||||
if self.sources[curr_location] == self.sources[curr_location + self.nt - 1]:
|
||||
possible_starts.append(curr_location)
|
||||
curr_location += self.nt
|
||||
else:
|
||||
curr_location += 1
|
||||
self.possible_starts = possible_starts
|
||||
|
||||
if shuffle:
|
||||
self.possible_starts = np.random.permutation(self.possible_starts)
|
||||
if N_seq is not None and len(self.possible_starts) > N_seq: # select a subset of sequences if want to
|
||||
self.possible_starts = self.possible_starts[:N_seq]
|
||||
self.N_sequences = len(self.possible_starts)
|
||||
super(SequenceGenerator, self).__init__(len(self.possible_starts), batch_size, shuffle, seed)
|
||||
|
||||
def next(self):
|
||||
with self.lock:
|
||||
index_array = next(self.index_generator)
|
||||
current_index = index_array[0]
|
||||
current_batch_size = len(index_array)
|
||||
batch_x = np.zeros((current_batch_size, self.nt) + self.im_shape, np.float32)
|
||||
for i, idx in enumerate(index_array):
|
||||
idx = self.possible_starts[idx]
|
||||
batch_x[i] = self.preprocess(self.X[idx:idx+self.nt])
|
||||
if self.output_mode == 'error': # model outputs errors, so y should be zeros
|
||||
batch_y = np.zeros(current_batch_size, np.float32)
|
||||
elif self.output_mode == 'prediction': # output actual pixels
|
||||
batch_y = batch_x
|
||||
return batch_x, batch_y
|
||||
|
||||
def preprocess(self, X):
|
||||
return X.astype(np.float32) / 255
|
||||
|
||||
def create_all(self):
|
||||
X_all = np.zeros((self.N_sequences, self.nt) + self.im_shape, np.float32)
|
||||
for i, idx in enumerate(self.possible_starts):
|
||||
X_all[i] = self.preprocess(self.X[idx:idx+self.nt])
|
||||
return X_all
|
||||
58
Project Final/Training/keras_utils.py
Normal file
58
Project Final/Training/keras_utils.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
from keras import backend as K
|
||||
from keras.legacy.interfaces import generate_legacy_interface, recurrent_args_preprocessor
|
||||
from keras.models import model_from_json
|
||||
|
||||
legacy_prednet_support = generate_legacy_interface(
|
||||
allowed_positional_args=['stack_sizes', 'R_stack_sizes',
|
||||
'A_filt_sizes', 'Ahat_filt_sizes', 'R_filt_sizes'],
|
||||
conversions=[('dim_ordering', 'data_format'),
|
||||
('consume_less', 'implementation')],
|
||||
value_conversions={'dim_ordering': {'tf': 'channels_last',
|
||||
'th': 'channels_first',
|
||||
'default': None},
|
||||
'consume_less': {'cpu': 0,
|
||||
'mem': 1,
|
||||
'gpu': 2}},
|
||||
preprocessor=recurrent_args_preprocessor)
|
||||
|
||||
# Convert old Keras (1.2) json models and weights to Keras 2.0
|
||||
def convert_model_to_keras2(old_json_file, old_weights_file, new_json_file, new_weights_file):
|
||||
from prednet import PredNet
|
||||
# If using tensorflow, it doesn't allow you to load the old weights.
|
||||
if K.backend() != 'theano':
|
||||
os.environ['KERAS_BACKEND'] = backend
|
||||
reload(K)
|
||||
|
||||
f = open(old_json_file, 'r')
|
||||
json_string = f.read()
|
||||
f.close()
|
||||
model = model_from_json(json_string, custom_objects = {'PredNet': PredNet})
|
||||
model.load_weights(old_weights_file)
|
||||
|
||||
weights = model.layers[1].get_weights()
|
||||
if weights[0].shape[0] == model.layers[1].stack_sizes[1]:
|
||||
for i, w in enumerate(weights):
|
||||
if w.ndim == 4:
|
||||
weights[i] = np.transpose(w, (2, 3, 1, 0))
|
||||
model.set_weights(weights)
|
||||
|
||||
model.save_weights(new_weights_file)
|
||||
json_string = model.to_json()
|
||||
with open(new_json_file, "w") as f:
|
||||
f.write(json_string)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
old_dir = './model_data/'
|
||||
new_dir = './model_data_keras2/'
|
||||
if not os.path.exists(new_dir):
|
||||
os.mkdir(new_dir)
|
||||
for w_tag in ['', '-Lall', '-extrapfinetuned']:
|
||||
m_tag = '' if w_tag == '-Lall' else w_tag
|
||||
convert_model_to_keras2(old_dir + 'prednet_kitti_model' + m_tag + '.json',
|
||||
old_dir + 'prednet_kitti_weights' + w_tag + '.hdf5',
|
||||
new_dir + 'prednet_kitti_model' + m_tag + '.json',
|
||||
new_dir + 'prednet_kitti_weights' + w_tag + '.hdf5')
|
||||
311
Project Final/Training/prednet.py
Normal file
311
Project Final/Training/prednet.py
Normal file
@@ -0,0 +1,311 @@
|
||||
import numpy as np
|
||||
|
||||
from keras import backend as K
|
||||
from keras import activations
|
||||
from keras.layers import Recurrent
|
||||
from keras.layers import Conv2D, UpSampling2D, MaxPooling2D
|
||||
from keras.engine import InputSpec
|
||||
from keras_utils import legacy_prednet_support
|
||||
|
||||
class PredNet(Recurrent):
|
||||
'''PredNet architecture - Lotter 2016.
|
||||
Stacked convolutional LSTM inspired by predictive coding principles.
|
||||
|
||||
# Arguments
|
||||
stack_sizes: number of channels in targets (A) and predictions (Ahat) in each layer of the architecture.
|
||||
Length is the number of layers in the architecture.
|
||||
First element is the number of channels in the input.
|
||||
Ex. (3, 16, 32) would correspond to a 3 layer architecture that takes in RGB images and has 16 and 32
|
||||
channels in the second and third layers, respectively.
|
||||
R_stack_sizes: number of channels in the representation (R) modules.
|
||||
Length must equal length of stack_sizes, but the number of channels per layer can be different.
|
||||
A_filt_sizes: filter sizes for the target (A) modules.
|
||||
Has length of 1 - len(stack_sizes).
|
||||
Ex. (3, 3) would mean that targets for layers 2 and 3 are computed by a 3x3 convolution of the errors (E)
|
||||
from the layer below (followed by max-pooling)
|
||||
Ahat_filt_sizes: filter sizes for the prediction (Ahat) modules.
|
||||
Has length equal to length of stack_sizes.
|
||||
Ex. (3, 3, 3) would mean that the predictions for each layer are computed by a 3x3 convolution of the
|
||||
representation (R) modules at each layer.
|
||||
R_filt_sizes: filter sizes for the representation (R) modules.
|
||||
Has length equal to length of stack_sizes.
|
||||
Corresponds to the filter sizes for all convolutions in the LSTM.
|
||||
pixel_max: the maximum pixel value.
|
||||
Used to clip the pixel-layer prediction.
|
||||
error_activation: activation function for the error (E) units.
|
||||
A_activation: activation function for the target (A) and prediction (A_hat) units.
|
||||
LSTM_activation: activation function for the cell and hidden states of the LSTM.
|
||||
LSTM_inner_activation: activation function for the gates in the LSTM.
|
||||
output_mode: either 'error', 'prediction', 'all' or layer specification (ex. R2, see below).
|
||||
Controls what is outputted by the PredNet.
|
||||
If 'error', the mean response of the error (E) units of each layer will be outputted.
|
||||
That is, the output shape will be (batch_size, nb_layers).
|
||||
If 'prediction', the frame prediction will be outputted.
|
||||
If 'all', the output will be the frame prediction concatenated with the mean layer errors.
|
||||
The frame prediction is flattened before concatenation.
|
||||
Nomenclature of 'all' is kept for backwards compatibility, but should not be confused with returning all of the layers of the model
|
||||
For returning the features of a particular layer, output_mode should be of the form unit_type + layer_number.
|
||||
For instance, to return the features of the LSTM "representational" units in the lowest layer, output_mode should be specificied as 'R0'.
|
||||
The possible unit types are 'R', 'Ahat', 'A', and 'E' corresponding to the 'representation', 'prediction', 'target', and 'error' units respectively.
|
||||
extrap_start_time: time step for which model will start extrapolating.
|
||||
Starting at this time step, the prediction from the previous time step will be treated as the "actual"
|
||||
data_format: 'channels_first' or 'channels_last'.
|
||||
It defaults to the `image_data_format` value found in your
|
||||
Keras config file at `~/.keras/keras.json`.
|
||||
|
||||
# References
|
||||
- [Deep predictive coding networks for video prediction and unsupervised learning](https://arxiv.org/abs/1605.08104)
|
||||
- [Long short-term memory](http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf)
|
||||
- [Convolutional LSTM network: a machine learning approach for precipitation nowcasting](http://arxiv.org/abs/1506.04214)
|
||||
- [Predictive coding in the visual cortex: a functional interpretation of some extra-classical receptive-field effects](http://www.nature.com/neuro/journal/v2/n1/pdf/nn0199_79.pdf)
|
||||
'''
|
||||
@legacy_prednet_support
|
||||
def __init__(self, stack_sizes, R_stack_sizes,
|
||||
A_filt_sizes, Ahat_filt_sizes, R_filt_sizes,
|
||||
pixel_max=1., error_activation='relu', A_activation='relu',
|
||||
LSTM_activation='tanh', LSTM_inner_activation='hard_sigmoid',
|
||||
output_mode='error', extrap_start_time=None,
|
||||
data_format=K.image_data_format(), **kwargs):
|
||||
self.stack_sizes = stack_sizes
|
||||
self.nb_layers = len(stack_sizes)
|
||||
assert len(R_stack_sizes) == self.nb_layers, 'len(R_stack_sizes) must equal len(stack_sizes)'
|
||||
self.R_stack_sizes = R_stack_sizes
|
||||
assert len(A_filt_sizes) == (self.nb_layers - 1), 'len(A_filt_sizes) must equal len(stack_sizes) - 1'
|
||||
self.A_filt_sizes = A_filt_sizes
|
||||
assert len(Ahat_filt_sizes) == self.nb_layers, 'len(Ahat_filt_sizes) must equal len(stack_sizes)'
|
||||
self.Ahat_filt_sizes = Ahat_filt_sizes
|
||||
assert len(R_filt_sizes) == (self.nb_layers), 'len(R_filt_sizes) must equal len(stack_sizes)'
|
||||
self.R_filt_sizes = R_filt_sizes
|
||||
|
||||
self.pixel_max = pixel_max
|
||||
self.error_activation = activations.get(error_activation)
|
||||
self.A_activation = activations.get(A_activation)
|
||||
self.LSTM_activation = activations.get(LSTM_activation)
|
||||
self.LSTM_inner_activation = activations.get(LSTM_inner_activation)
|
||||
|
||||
default_output_modes = ['prediction', 'error', 'all']
|
||||
layer_output_modes = [layer + str(n) for n in range(self.nb_layers) for layer in ['R', 'E', 'A', 'Ahat']]
|
||||
assert output_mode in default_output_modes + layer_output_modes, 'Invalid output_mode: ' + str(output_mode)
|
||||
self.output_mode = output_mode
|
||||
if self.output_mode in layer_output_modes:
|
||||
self.output_layer_type = self.output_mode[:-1]
|
||||
self.output_layer_num = int(self.output_mode[-1])
|
||||
else:
|
||||
self.output_layer_type = None
|
||||
self.output_layer_num = None
|
||||
self.extrap_start_time = extrap_start_time
|
||||
|
||||
assert data_format in {'channels_last', 'channels_first'}, 'data_format must be in {channels_last, channels_first}'
|
||||
self.data_format = data_format
|
||||
self.channel_axis = -3 if data_format == 'channels_first' else -1
|
||||
self.row_axis = -2 if data_format == 'channels_first' else -3
|
||||
self.column_axis = -1 if data_format == 'channels_first' else -2
|
||||
super(PredNet, self).__init__(**kwargs)
|
||||
self.input_spec = [InputSpec(ndim=5)]
|
||||
|
||||
def compute_output_shape(self, input_shape):
|
||||
if self.output_mode == 'prediction':
|
||||
out_shape = input_shape[2:]
|
||||
elif self.output_mode == 'error':
|
||||
out_shape = (self.nb_layers,)
|
||||
elif self.output_mode == 'all':
|
||||
out_shape = (np.prod(input_shape[2:]) + self.nb_layers,)
|
||||
else:
|
||||
stack_str = 'R_stack_sizes' if self.output_layer_type == 'R' else 'stack_sizes'
|
||||
stack_mult = 2 if self.output_layer_type == 'E' else 1
|
||||
out_stack_size = stack_mult * getattr(self, stack_str)[self.output_layer_num]
|
||||
out_nb_row = input_shape[self.row_axis] / 2**self.output_layer_num
|
||||
out_nb_col = input_shape[self.column_axis] / 2**self.output_layer_num
|
||||
if self.data_format == 'channels_first':
|
||||
out_shape = (out_stack_size, out_nb_row, out_nb_col)
|
||||
else:
|
||||
out_shape = (out_nb_row, out_nb_col, out_stack_size)
|
||||
|
||||
if self.return_sequences:
|
||||
return (input_shape[0], input_shape[1]) + out_shape
|
||||
else:
|
||||
return (input_shape[0],) + out_shape
|
||||
|
||||
def get_initial_state(self, x):
|
||||
input_shape = self.input_spec[0].shape
|
||||
init_nb_row = input_shape[self.row_axis]
|
||||
init_nb_col = input_shape[self.column_axis]
|
||||
|
||||
base_initial_state = K.zeros_like(x) # (samples, timesteps) + image_shape
|
||||
non_channel_axis = -1 if self.data_format == 'channels_first' else -2
|
||||
for _ in range(2):
|
||||
base_initial_state = K.sum(base_initial_state, axis=non_channel_axis)
|
||||
base_initial_state = K.sum(base_initial_state, axis=1) # (samples, nb_channels)
|
||||
|
||||
initial_states = []
|
||||
states_to_pass = ['r', 'c', 'e']
|
||||
nlayers_to_pass = {u: self.nb_layers for u in states_to_pass}
|
||||
if self.extrap_start_time is not None:
|
||||
states_to_pass.append('ahat') # pass prediction in states so can use as actual for t+1 when extrapolating
|
||||
nlayers_to_pass['ahat'] = 1
|
||||
for u in states_to_pass:
|
||||
for l in range(nlayers_to_pass[u]):
|
||||
ds_factor = 2 ** l
|
||||
nb_row = init_nb_row // ds_factor
|
||||
nb_col = init_nb_col // ds_factor
|
||||
if u in ['r', 'c']:
|
||||
stack_size = self.R_stack_sizes[l]
|
||||
elif u == 'e':
|
||||
stack_size = 2 * self.stack_sizes[l]
|
||||
elif u == 'ahat':
|
||||
stack_size = self.stack_sizes[l]
|
||||
output_size = stack_size * nb_row * nb_col # flattened size
|
||||
|
||||
reducer = K.zeros((input_shape[self.channel_axis], output_size)) # (nb_channels, output_size)
|
||||
initial_state = K.dot(base_initial_state, reducer) # (samples, output_size)
|
||||
if self.data_format == 'channels_first':
|
||||
output_shp = (-1, stack_size, nb_row, nb_col)
|
||||
else:
|
||||
output_shp = (-1, nb_row, nb_col, stack_size)
|
||||
initial_state = K.reshape(initial_state, output_shp)
|
||||
initial_states += [initial_state]
|
||||
|
||||
if K._BACKEND == 'theano':
|
||||
from theano import tensor as T
|
||||
# There is a known issue in the Theano scan op when dealing with inputs whose shape is 1 along a dimension.
|
||||
# In our case, this is a problem when training on grayscale images, and the below line fixes it.
|
||||
initial_states = [T.unbroadcast(init_state, 0, 1) for init_state in initial_states]
|
||||
|
||||
if self.extrap_start_time is not None:
|
||||
initial_states += [K.variable(0, int if K.backend() != 'tensorflow' else 'int32')] # the last state will correspond to the current timestep
|
||||
return initial_states
|
||||
|
||||
def build(self, input_shape):
|
||||
self.input_spec = [InputSpec(shape=input_shape)]
|
||||
self.conv_layers = {c: [] for c in ['i', 'f', 'c', 'o', 'a', 'ahat']}
|
||||
|
||||
for l in range(self.nb_layers):
|
||||
for c in ['i', 'f', 'c', 'o']:
|
||||
act = self.LSTM_activation if c == 'c' else self.LSTM_inner_activation
|
||||
self.conv_layers[c].append(Conv2D(self.R_stack_sizes[l], self.R_filt_sizes[l], padding='same', activation=act, data_format=self.data_format))
|
||||
|
||||
act = 'relu' if l == 0 else self.A_activation
|
||||
self.conv_layers['ahat'].append(Conv2D(self.stack_sizes[l], self.Ahat_filt_sizes[l], padding='same', activation=act, data_format=self.data_format))
|
||||
|
||||
if l < self.nb_layers - 1:
|
||||
self.conv_layers['a'].append(Conv2D(self.stack_sizes[l+1], self.A_filt_sizes[l], padding='same', activation=self.A_activation, data_format=self.data_format))
|
||||
|
||||
self.upsample = UpSampling2D(data_format=self.data_format)
|
||||
self.pool = MaxPooling2D(data_format=self.data_format)
|
||||
|
||||
self.trainable_weights = []
|
||||
nb_row, nb_col = (input_shape[-2], input_shape[-1]) if self.data_format == 'channels_first' else (input_shape[-3], input_shape[-2])
|
||||
for c in sorted(self.conv_layers.keys()):
|
||||
for l in range(len(self.conv_layers[c])):
|
||||
ds_factor = 2 ** l
|
||||
if c == 'ahat':
|
||||
nb_channels = self.R_stack_sizes[l]
|
||||
elif c == 'a':
|
||||
nb_channels = 2 * self.R_stack_sizes[l]
|
||||
else:
|
||||
nb_channels = self.stack_sizes[l] * 2 + self.R_stack_sizes[l]
|
||||
if l < self.nb_layers - 1:
|
||||
nb_channels += self.R_stack_sizes[l+1]
|
||||
in_shape = (input_shape[0], nb_channels, nb_row // ds_factor, nb_col // ds_factor)
|
||||
if self.data_format == 'channels_last': in_shape = (in_shape[0], in_shape[2], in_shape[3], in_shape[1])
|
||||
with K.name_scope('layer_' + c + '_' + str(l)):
|
||||
self.conv_layers[c][l].build(in_shape)
|
||||
self.trainable_weights += self.conv_layers[c][l].trainable_weights
|
||||
|
||||
self.states = [None] * self.nb_layers*3
|
||||
|
||||
if self.extrap_start_time is not None:
|
||||
self.t_extrap = K.variable(self.extrap_start_time, int if K.backend() != 'tensorflow' else 'int32')
|
||||
self.states += [None] * 2 # [previous frame prediction, timestep]
|
||||
|
||||
def step(self, a, states):
|
||||
r_tm1 = states[:self.nb_layers]
|
||||
c_tm1 = states[self.nb_layers:2*self.nb_layers]
|
||||
e_tm1 = states[2*self.nb_layers:3*self.nb_layers]
|
||||
|
||||
if self.extrap_start_time is not None:
|
||||
t = states[-1]
|
||||
a = K.switch(t >= self.t_extrap, states[-2], a) # if past self.extrap_start_time, the previous prediction will be treated as the actual
|
||||
|
||||
c = []
|
||||
r = []
|
||||
e = []
|
||||
|
||||
# Update R units starting from the top
|
||||
for l in reversed(range(self.nb_layers)):
|
||||
inputs = [r_tm1[l], e_tm1[l]]
|
||||
if l < self.nb_layers - 1:
|
||||
inputs.append(r_up)
|
||||
|
||||
inputs = K.concatenate(inputs, axis=self.channel_axis)
|
||||
i = self.conv_layers['i'][l].call(inputs)
|
||||
f = self.conv_layers['f'][l].call(inputs)
|
||||
o = self.conv_layers['o'][l].call(inputs)
|
||||
_c = f * c_tm1[l] + i * self.conv_layers['c'][l].call(inputs)
|
||||
_r = o * self.LSTM_activation(_c)
|
||||
c.insert(0, _c)
|
||||
r.insert(0, _r)
|
||||
|
||||
if l > 0:
|
||||
r_up = self.upsample.call(_r)
|
||||
|
||||
# Update feedforward path starting from the bottom
|
||||
for l in range(self.nb_layers):
|
||||
ahat = self.conv_layers['ahat'][l].call(r[l])
|
||||
if l == 0:
|
||||
ahat = K.minimum(ahat, self.pixel_max)
|
||||
frame_prediction = ahat
|
||||
|
||||
# compute errors
|
||||
e_up = self.error_activation(ahat - a)
|
||||
e_down = self.error_activation(a - ahat)
|
||||
|
||||
e.append(K.concatenate((e_up, e_down), axis=self.channel_axis))
|
||||
|
||||
if self.output_layer_num == l:
|
||||
if self.output_layer_type == 'A':
|
||||
output = a
|
||||
elif self.output_layer_type == 'Ahat':
|
||||
output = ahat
|
||||
elif self.output_layer_type == 'R':
|
||||
output = r[l]
|
||||
elif self.output_layer_type == 'E':
|
||||
output = e[l]
|
||||
|
||||
if l < self.nb_layers - 1:
|
||||
a = self.conv_layers['a'][l].call(e[l])
|
||||
a = self.pool.call(a) # target for next layer
|
||||
|
||||
if self.output_layer_type is None:
|
||||
if self.output_mode == 'prediction':
|
||||
output = frame_prediction
|
||||
else:
|
||||
for l in range(self.nb_layers):
|
||||
layer_error = K.mean(K.batch_flatten(e[l]), axis=-1, keepdims=True)
|
||||
all_error = layer_error if l == 0 else K.concatenate((all_error, layer_error), axis=-1)
|
||||
if self.output_mode == 'error':
|
||||
output = all_error
|
||||
else:
|
||||
output = K.concatenate((K.batch_flatten(frame_prediction), all_error), axis=-1)
|
||||
|
||||
states = r + c + e
|
||||
if self.extrap_start_time is not None:
|
||||
states += [frame_prediction, t + 1]
|
||||
return output, states
|
||||
|
||||
def get_config(self):
|
||||
config = {'stack_sizes': self.stack_sizes,
|
||||
'R_stack_sizes': self.R_stack_sizes,
|
||||
'A_filt_sizes': self.A_filt_sizes,
|
||||
'Ahat_filt_sizes': self.Ahat_filt_sizes,
|
||||
'R_filt_sizes': self.R_filt_sizes,
|
||||
'pixel_max': self.pixel_max,
|
||||
'error_activation': self.error_activation.__name__,
|
||||
'A_activation': self.A_activation.__name__,
|
||||
'LSTM_activation': self.LSTM_activation.__name__,
|
||||
'LSTM_inner_activation': self.LSTM_inner_activation.__name__,
|
||||
'data_format': self.data_format,
|
||||
'extrap_start_time': self.extrap_start_time,
|
||||
'output_mode': self.output_mode}
|
||||
base_config = super(PredNet, self).get_config()
|
||||
return dict(list(base_config.items()) + list(config.items()))
|
||||
@@ -0,0 +1 @@
|
||||
{"class_name": "Model", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 24, 20, 40, 12], "dtype": "float32", "sparse": false, "name": "input_1"}, "inbound_nodes": []}, {"name": "pred_net_1", "class_name": "PredNet", "config": {"name": "pred_net_1", "trainable": true, "return_sequences": true, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "implementation": 0, "stack_sizes": [12, 48, 96], "R_stack_sizes": [12, 48, 96], "A_filt_sizes": [3, 3], "Ahat_filt_sizes": [3, 3, 3], "R_filt_sizes": [3, 3, 3], "pixel_max": 1.0, "error_activation": "relu", "A_activation": "relu", "LSTM_activation": "tanh", "LSTM_inner_activation": "hard_sigmoid", "data_format": "channels_last", "extrap_start_time": null, "output_mode": "error"}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "time_distributed_1", "class_name": "TimeDistributed", "config": {"name": "time_distributed_1", "trainable": false, "layer": {"class_name": "Dense", "config": {"name": "dense_1", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}}, "inbound_nodes": [[["pred_net_1", 0, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true, "data_format": "channels_last"}, "inbound_nodes": [[["time_distributed_1", 0, 0, {}]]]}, {"name": "dense_2", "class_name": "Dense", "config": {"name": "dense_2", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}], "input_layers": [["input_1", 0, 0]], "output_layers": [["dense_2", 0, 0]]}, "keras_version": "2.2.0", "backend": "tensorflow"}
|
||||
BIN
Project Final/Training/weights/old/prednet_weather_weights.hdf5
Normal file
BIN
Project Final/Training/weights/old/prednet_weather_weights.hdf5
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"class_name": "Model", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 24, 20, 40, 7], "dtype": "float32", "sparse": false, "name": "input_1"}, "inbound_nodes": []}, {"name": "pred_net_1", "class_name": "PredNet", "config": {"name": "pred_net_1", "trainable": true, "return_sequences": true, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "implementation": 0, "stack_sizes": [7, 48, 96], "R_stack_sizes": [7, 48, 96], "A_filt_sizes": [3, 3], "Ahat_filt_sizes": [3, 3, 3], "R_filt_sizes": [3, 3, 3], "pixel_max": 1.0, "error_activation": "relu", "A_activation": "relu", "LSTM_activation": "tanh", "LSTM_inner_activation": "hard_sigmoid", "data_format": "channels_last", "extrap_start_time": null, "output_mode": "error"}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "time_distributed_1", "class_name": "TimeDistributed", "config": {"name": "time_distributed_1", "trainable": false, "layer": {"class_name": "Dense", "config": {"name": "dense_1", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}}, "inbound_nodes": [[["pred_net_1", 0, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true, "data_format": "channels_last"}, "inbound_nodes": [[["time_distributed_1", 0, 0, {}]]]}, {"name": "dense_2", "class_name": "Dense", "config": {"name": "dense_2", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}], "input_layers": [["input_1", 0, 0]], "output_layers": [["dense_2", 0, 0]]}, "keras_version": "2.2.0", "backend": "tensorflow"}
|
||||
BIN
Project Final/Training/weights/prednet_weather_weights.hdf5
Normal file
BIN
Project Final/Training/weights/prednet_weather_weights.hdf5
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"class_name": "Model", "config": {"name": "model_1", "layers": [{"name": "input_1", "class_name": "InputLayer", "config": {"batch_input_shape": [null, 24, 20, 40, 12], "dtype": "float32", "sparse": false, "name": "input_1"}, "inbound_nodes": []}, {"name": "pred_net_1", "class_name": "PredNet", "config": {"name": "pred_net_1", "trainable": true, "return_sequences": true, "return_state": false, "go_backwards": false, "stateful": false, "unroll": false, "implementation": 0, "stack_sizes": [12, 48, 96], "R_stack_sizes": [12, 48, 96], "A_filt_sizes": [3, 3], "Ahat_filt_sizes": [3, 3, 3], "R_filt_sizes": [3, 3, 3], "pixel_max": 1.0, "error_activation": "relu", "A_activation": "relu", "LSTM_activation": "tanh", "LSTM_inner_activation": "hard_sigmoid", "data_format": "channels_last", "extrap_start_time": null, "output_mode": "error"}, "inbound_nodes": [[["input_1", 0, 0, {}]]]}, {"name": "time_distributed_1", "class_name": "TimeDistributed", "config": {"name": "time_distributed_1", "trainable": false, "layer": {"class_name": "Dense", "config": {"name": "dense_1", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}}, "inbound_nodes": [[["pred_net_1", 0, 0, {}]]]}, {"name": "flatten_1", "class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true, "data_format": "channels_last"}, "inbound_nodes": [[["time_distributed_1", 0, 0, {}]]]}, {"name": "dense_2", "class_name": "Dense", "config": {"name": "dense_2", "trainable": false, "units": 1, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "inbound_nodes": [[["flatten_1", 0, 0, {}]]]}], "input_layers": [["input_1", 0, 0]], "output_layers": [["dense_2", 0, 0]]}, "keras_version": "2.2.0", "backend": "tensorflow"}
|
||||
Binary file not shown.
434
Project Final/Transformation/Data_Transformation.ipynb
Normal file
434
Project Final/Transformation/Data_Transformation.ipynb
Normal file
File diff suppressed because one or more lines are too long
373
Project Final/Transformation/stations_mask.csv
Normal file
373
Project Final/Transformation/stations_mask.csv
Normal file
@@ -0,0 +1,373 @@
|
||||
,STATION_ID,LAT_SCALED,LON_SCALED
|
||||
0,WBAN:00184,16,22
|
||||
1,WBAN:14929,3,18
|
||||
2,WBAN:13962,14,17
|
||||
3,WBAN:94975,5,17
|
||||
4,WBAN:14813,7,30
|
||||
5,WBAN:53864,14,26
|
||||
6,WBAN:23061,10,13
|
||||
7,WBAN:54921,6,18
|
||||
8,WBAN:00258,12,12
|
||||
9,WBAN:24044,6,15
|
||||
10,WBAN:03049,16,14
|
||||
11,WBAN:94299,6,2
|
||||
12,WBAN:53933,10,18
|
||||
13,WBAN:23047,12,16
|
||||
14,WBAN:63811,12,28
|
||||
15,WBAN:00137,11,13
|
||||
16,WBAN:04864,3,24
|
||||
17,WBAN:12832,17,27
|
||||
18,WBAN:93730,8,35
|
||||
19,WBAN:53932,12,19
|
||||
20,WBAN:14605,4,38
|
||||
21,WBAN:94281,3,1
|
||||
22,WBAN:54817,4,29
|
||||
23,WBAN:53138,8,7
|
||||
24,WBAN:24130,3,4
|
||||
25,WBAN:23155,12,3
|
||||
26,WBAN:14606,3,39
|
||||
27,WBAN:14616,4,39
|
||||
28,WBAN:54833,5,24
|
||||
30,WBAN:00282,2,14
|
||||
31,WBAN:94947,7,19
|
||||
32,WBAN:00127,18,18
|
||||
33,WBAN:00224,4,2
|
||||
34,WBAN:54781,5,36
|
||||
35,WBAN:03044,14,16
|
||||
36,WBAN:24033,3,11
|
||||
37,WBAN:04725,6,34
|
||||
38,WBAN:23157,10,4
|
||||
39,WBAN:24011,2,16
|
||||
40,WBAN:00286,4,23
|
||||
41,WBAN:53881,10,31
|
||||
42,WBAN:03036,10,10
|
||||
43,WBAN:94793,7,37
|
||||
44,WBAN:23225,8,2
|
||||
45,WBAN:03859,10,30
|
||||
46,WBAN:23158,13,6
|
||||
47,WBAN:00263,17,18
|
||||
48,WBAN:24131,5,5
|
||||
49,WBAN:63871,16,27
|
||||
50,WBAN:14739,6,37
|
||||
51,WBAN:00310,0,5
|
||||
52,WBAN:93808,10,26
|
||||
53,WBAN:00353,5,11
|
||||
54,WBAN:24132,3,9
|
||||
55,WBAN:00451,10,21
|
||||
56,WBAN:00435,18,20
|
||||
57,WBAN:00433,9,3
|
||||
58,WBAN:24180,6,8
|
||||
59,WBAN:94946,6,17
|
||||
60,WBAN:94902,4,19
|
||||
61,WBAN:03721,13,32
|
||||
62,WBAN:23159,10,8
|
||||
63,WBAN:94037,3,14
|
||||
64,WBAN:94054,4,12
|
||||
65,WBAN:03068,9,14
|
||||
66,WBAN:04866,5,25
|
||||
67,WBAN:94282,0,1
|
||||
68,WBAN:14742,4,36
|
||||
69,WBAN:14817,4,27
|
||||
70,WBAN:54743,7,35
|
||||
72,WBAN:12986,17,19
|
||||
73,WBAN:23136,13,3
|
||||
74,WBAN:54923,3,19
|
||||
75,WBAN:00285,0,17
|
||||
76,WBAN:93729,12,34
|
||||
77,WBAN:93810,10,24
|
||||
78,WBAN:03177,14,4
|
||||
80,WBAN:24089,5,12
|
||||
81,WBAN:00465,6,32
|
||||
82,WBAN:00283,0,18
|
||||
83,WBAN:53887,9,24
|
||||
84,WBAN:24017,5,15
|
||||
85,WBAN:04114,4,7
|
||||
86,WBAN:94943,4,17
|
||||
87,WBAN:13880,14,31
|
||||
88,WBAN:13866,9,30
|
||||
89,WBAN:00143,8,15
|
||||
90,WBAN:93203,8,1
|
||||
91,WBAN:13301,8,22
|
||||
92,WBAN:53916,8,21
|
||||
93,WBAN:93104,11,4
|
||||
94,WBAN:94605,2,38
|
||||
95,WBAN:23051,11,14
|
||||
97,WBAN:14820,6,30
|
||||
98,WBAN:03027,12,13
|
||||
99,WBAN:00222,9,21
|
||||
100,WBAN:23008,13,14
|
||||
101,WBAN:12867,18,30
|
||||
102,WBAN:24045,4,10
|
||||
103,WBAN:24136,1,5
|
||||
104,WBAN:00276,15,17
|
||||
105,WBAN:53129,10,7
|
||||
107,WBAN:03945,9,22
|
||||
109,WBAN:13812,8,29
|
||||
110,WBAN:13984,8,18
|
||||
111,WBAN:94057,5,13
|
||||
113,WBAN:04141,5,0
|
||||
114,WBAN:00234,11,23
|
||||
115,WBAN:04908,7,20
|
||||
116,WBAN:94977,0,22
|
||||
117,WBAN:24286,6,0
|
||||
118,WBAN:04915,7,21
|
||||
119,WBAN:03073,15,20
|
||||
120,WBAN:00287,0,14
|
||||
121,WBAN:03847,11,27
|
||||
123,WBAN:93798,9,32
|
||||
124,WBAN:00316,3,22
|
||||
125,WBAN:94032,4,14
|
||||
126,WBAN:24137,0,8
|
||||
127,WBAN:23161,12,5
|
||||
128,WBAN:24219,3,2
|
||||
129,WBAN:54734,6,35
|
||||
130,WBAN:94704,5,32
|
||||
131,WBAN:04223,0,2
|
||||
133,WBAN:04871,6,25
|
||||
134,WBAN:53925,13,21
|
||||
135,WBAN:03976,16,21
|
||||
136,WBAN:53964,13,18
|
||||
137,WBAN:04916,5,23
|
||||
138,WBAN:22001,17,16
|
||||
139,WBAN:00315,7,29
|
||||
140,WBAN:23162,8,8
|
||||
141,WBAN:23078,14,11
|
||||
142,WBAN:00445,14,25
|
||||
143,WBAN:04139,6,3
|
||||
144,WBAN:14933,6,21
|
||||
145,WBAN:03104,13,5
|
||||
146,WBAN:53853,16,26
|
||||
147,WBAN:24138,3,8
|
||||
148,WBAN:00444,7,11
|
||||
149,WBAN:04978,4,22
|
||||
150,WBAN:13985,10,17
|
||||
151,WBAN:93026,15,10
|
||||
152,WBAN:13707,8,34
|
||||
153,WBAN:54786,7,34
|
||||
154,WBAN:54844,2,28
|
||||
155,WBAN:04787,7,32
|
||||
156,WBAN:00443,5,10
|
||||
158,WBAN:03070,11,15
|
||||
159,WBAN:14747,5,31
|
||||
160,WBAN:00298,5,29
|
||||
161,WBAN:03809,11,24
|
||||
164,WBAN:00254,18,22
|
||||
165,WBAN:14991,3,23
|
||||
166,WBAN:23114,12,4
|
||||
167,WBAN:93816,8,25
|
||||
168,WBAN:93992,14,22
|
||||
169,WBAN:23044,15,12
|
||||
170,WBAN:00182,2,20
|
||||
171,WBAN:13786,11,34
|
||||
172,WBAN:00210,11,29
|
||||
173,WBAN:93076,10,15
|
||||
174,WBAN:03733,8,31
|
||||
176,WBAN:24220,1,2
|
||||
177,WBAN:24006,4,15
|
||||
178,WBAN:14748,6,33
|
||||
180,WBAN:13989,9,19
|
||||
181,WBAN:53986,11,18
|
||||
182,WBAN:24141,1,3
|
||||
183,WBAN:94853,3,26
|
||||
184,WBAN:94971,5,20
|
||||
185,WBAN:00304,18,23
|
||||
186,WBAN:24221,4,0
|
||||
189,WBAN:04111,7,9
|
||||
190,WBAN:24114,1,4
|
||||
191,WBAN:00220,13,30
|
||||
192,WBAN:04925,7,22
|
||||
193,WBAN:94056,3,15
|
||||
194,WBAN:00270,18,17
|
||||
196,WBAN:94957,8,20
|
||||
197,WBAN:94969,4,21
|
||||
198,WBAN:23090,10,11
|
||||
199,WBAN:93996,10,23
|
||||
200,WBAN:03707,10,32
|
||||
201,WBAN:53922,11,21
|
||||
202,WBAN:13762,10,34
|
||||
203,WBAN:94966,2,19
|
||||
204,WBAN:00326,16,30
|
||||
205,WBAN:00237,2,22
|
||||
206,WBAN:14825,7,28
|
||||
207,WBAN:04780,5,37
|
||||
209,WBAN:53889,9,25
|
||||
210,WBAN:13829,14,27
|
||||
211,WBAN:00449,6,9
|
||||
212,WBAN:13806,11,26
|
||||
214,WBAN:94933,5,21
|
||||
216,WBAN:00162,7,14
|
||||
217,WBAN:63847,12,27
|
||||
218,WBAN:12895,19,31
|
||||
219,WBAN:53988,15,22
|
||||
220,WBAN:13947,8,19
|
||||
221,WBAN:53861,15,27
|
||||
222,WBAN:13964,12,21
|
||||
223,WBAN:03875,15,30
|
||||
224,WBAN:23091,16,15
|
||||
225,WBAN:14827,7,27
|
||||
226,WBAN:04929,1,20
|
||||
227,WBAN:53841,9,27
|
||||
228,WBAN:54818,4,26
|
||||
229,WBAN:00152,3,36
|
||||
230,WBAN:94868,6,31
|
||||
231,WBAN:03981,13,17
|
||||
232,WBAN:93947,16,17
|
||||
233,WBAN:03706,9,33
|
||||
234,WBAN:04876,6,24
|
||||
235,WBAN:04924,6,19
|
||||
236,WBAN:04836,1,39
|
||||
237,WBAN:93193,10,3
|
||||
238,WBAN:94276,0,0
|
||||
240,WBAN:54772,4,37
|
||||
241,WBAN:00265,7,26
|
||||
242,WBAN:03896,13,27
|
||||
243,WBAN:13975,11,17
|
||||
244,WBAN:03056,14,15
|
||||
245,WBAN:93764,8,33
|
||||
246,WBAN:94959,7,23
|
||||
247,WBAN:12993,17,24
|
||||
248,WBAN:23081,12,10
|
||||
249,WBAN:23064,9,16
|
||||
250,WBAN:94041,1,16
|
||||
251,WBAN:04807,6,26
|
||||
252,WBAN:53870,12,30
|
||||
253,WBAN:04854,3,28
|
||||
254,WBAN:00391,13,31
|
||||
255,WBAN:13764,9,34
|
||||
256,WBAN:53982,14,20
|
||||
257,WBAN:94008,0,12
|
||||
258,WBAN:00361,10,27
|
||||
260,WBAN:24087,1,13
|
||||
261,WBAN:14750,5,35
|
||||
262,WBAN:00135,9,1
|
||||
263,WBAN:53893,13,25
|
||||
264,WBAN:03708,12,32
|
||||
265,WBAN:23065,8,16
|
||||
266,WBAN:04994,0,20
|
||||
267,WBAN:14829,6,27
|
||||
268,WBAN:53977,14,18
|
||||
269,WBAN:03195,11,8
|
||||
270,WBAN:14916,1,19
|
||||
271,WBAN:23066,8,11
|
||||
272,WBAN:94992,1,24
|
||||
273,WBAN:94919,1,21
|
||||
274,WBAN:04999,3,20
|
||||
275,WBAN:00481,9,8
|
||||
276,WBAN:00387,4,3
|
||||
277,WBAN:93057,12,11
|
||||
278,WBAN:24201,1,1
|
||||
279,WBAN:53967,13,19
|
||||
280,WBAN:24143,1,9
|
||||
281,WBAN:04880,7,25
|
||||
282,WBAN:24051,7,13
|
||||
283,WBAN:14898,4,25
|
||||
284,WBAN:13723,11,31
|
||||
285,WBAN:13939,13,23
|
||||
286,WBAN:63874,15,26
|
||||
287,WBAN:94626,3,38
|
||||
288,WBAN:13926,14,19
|
||||
289,WBAN:53874,13,29
|
||||
291,WBAN:24048,4,11
|
||||
293,WBAN:14976,6,22
|
||||
294,WBAN:03870,12,29
|
||||
295,WBAN:54819,6,29
|
||||
296,WBAN:53941,11,20
|
||||
298,WBAN:53913,11,19
|
||||
299,WBAN:03030,11,16
|
||||
300,WBAN:94836,2,26
|
||||
301,WBAN:00150,2,18
|
||||
302,WBAN:00221,12,8
|
||||
303,WBAN:00186,16,25
|
||||
304,WBAN:93706,8,32
|
||||
305,WBAN:94161,5,7
|
||||
306,WBAN:00231,11,33
|
||||
307,WBAN:53938,12,18
|
||||
308,WBAN:53855,8,28
|
||||
309,WBAN:00357,15,18
|
||||
310,WBAN:03908,16,23
|
||||
311,WBAN:00154,10,33
|
||||
312,WBAN:14858,1,25
|
||||
313,WBAN:53119,11,3
|
||||
315,WBAN:00455,8,23
|
||||
316,WBAN:04884,3,27
|
||||
317,WBAN:04936,6,20
|
||||
318,WBAN:00159,9,12
|
||||
319,WBAN:14751,7,33
|
||||
320,WBAN:13971,11,22
|
||||
321,WBAN:00431,14,21
|
||||
322,WBAN:14752,6,36
|
||||
323,WBAN:00219,12,31
|
||||
324,WBAN:00322,1,17
|
||||
326,WBAN:94012,0,10
|
||||
327,WBAN:03167,13,4
|
||||
328,WBAN:94025,7,12
|
||||
330,WBAN:93228,10,1
|
||||
331,WBAN:94973,2,23
|
||||
332,WBAN:53973,16,19
|
||||
333,WBAN:00337,14,29
|
||||
334,WBAN:04998,7,18
|
||||
335,WBAN:24144,2,8
|
||||
336,WBAN:53886,10,25
|
||||
337,WBAN:03711,11,32
|
||||
338,WBAN:00250,15,28
|
||||
340,WBAN:04113,3,3
|
||||
341,WBAN:94038,2,15
|
||||
342,WBAN:94931,1,22
|
||||
343,WBAN:03810,11,30
|
||||
344,WBAN:00306,17,21
|
||||
345,WBAN:00260,18,21
|
||||
346,WBAN:93990,8,17
|
||||
347,WBAN:53972,15,19
|
||||
348,WBAN:63837,14,30
|
||||
349,WBAN:93986,12,17
|
||||
350,WBAN:93034,14,14
|
||||
352,WBAN:00392,10,2
|
||||
353,WBAN:23002,14,12
|
||||
355,WBAN:00163,7,15
|
||||
356,WBAN:00128,15,29
|
||||
357,WBAN:12962,17,17
|
||||
358,WBAN:00429,9,11
|
||||
359,WBAN:94225,2,0
|
||||
360,WBAN:00225,16,18
|
||||
361,WBAN:03962,13,22
|
||||
362,WBAN:93757,9,31
|
||||
363,WBAN:14609,2,39
|
||||
364,WBAN:12927,17,23
|
||||
365,WBAN:12918,17,20
|
||||
366,WBAN:04887,5,28
|
||||
367,WBAN:00484,4,13
|
||||
368,WBAN:53896,9,26
|
||||
369,WBAN:03860,9,29
|
||||
370,WBAN:63804,12,26
|
||||
371,WBAN:53903,16,20
|
||||
372,WBAN:14936,4,18
|
||||
373,WBAN:13986,9,18
|
||||
374,WBAN:04933,3,21
|
||||
375,WBAN:00291,0,19
|
||||
376,WBAN:94720,6,38
|
||||
377,WBAN:53990,13,20
|
||||
378,WBAN:00452,2,5
|
||||
379,WBAN:24145,5,8
|
||||
380,WBAN:93115,14,5
|
||||
381,WBAN:03144,14,6
|
||||
382,WBAN:24091,7,16
|
||||
383,WBAN:00141,10,20
|
||||
385,WBAN:64706,7,31
|
||||
386,WBAN:93819,8,26
|
||||
387,WBAN:14918,0,21
|
||||
388,WBAN:00377,17,29
|
||||
389,WBAN:00240,5,27
|
||||
390,WBAN:14937,6,23
|
||||
391,WBAN:54941,5,22
|
||||
392,WBAN:94926,2,24
|
||||
393,WBAN:04997,2,21
|
||||
394,WBAN:04781,7,36
|
||||
395,WBAN:94761,5,33
|
||||
396,WBAN:00464,3,25
|
||||
397,WBAN:00216,4,28
|
||||
399,WBAN:24166,4,9
|
||||
401,WBAN:03889,10,29
|
||||
402,WBAN:03811,11,25
|
||||
403,WBAN:04946,4,20
|
||||
404,WBAN:14833,6,28
|
||||
405,WBAN:93753,12,33
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user