From dae63118271428a08efc402cb57ffc95c5f0a856 Mon Sep 17 00:00:00 2001 From: hakangunturkun Date: Tue, 14 Apr 2020 11:35:50 -0500 Subject: change versions of some frameworks --- server.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/server.py b/server.py index bb1c885..fd1285b 100755 --- a/server.py +++ b/server.py @@ -15,10 +15,6 @@ import os import re import pytz - -import string -import re -import os from os import listdir import nltk from nltk.corpus import stopwords @@ -28,16 +24,16 @@ import numpy as np from numpy import array import tensorflow import keras -from tensorflow.keras.models import Model -from tensorflow.keras.preprocessing.text import Tokenizer -from tensorflow.keras.preprocessing.sequence import pad_sequences -from tensorflow.keras.layers import * -from tensorflow.keras.models import Sequential -from tensorflow.keras.layers import Dense -from tensorflow.keras.layers import Flatten -from tensorflow.keras.layers import Embedding -from tensorflow.keras import metrics -from tensorflow.keras import optimizers +from keras.models import Model +from keras.preprocessing.text import Tokenizer +from keras.preprocessing.sequence import pad_sequences +from keras.layers import * +from keras.models import Sequential +from keras.layers import Dense +from keras.layers import Flatten +from keras.layers import Embedding +from keras import metrics +from keras import optimizers import pickle app=Flask(__name__) @@ -85,8 +81,8 @@ def create_model(vocab_size, max_length): model.add(Flatten()) model.add(Dense(10, activation='relu')) model.add(Dense(1, activation='sigmoid')) - opt = tensorflow.keras.optimizers.Adamax(learning_rate=0.002, beta_1=0.9, beta_2=0.999) - model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[tensorflow.keras.metrics.AUC()]) + opt = keras.optimizers.Adamax(learning_rate=0.002, beta_1=0.9, beta_2=0.999) + model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[keras.metrics.AUC()]) return model @app.route("/") -- cgit v1.2.3