Skip to content
Snippets Groups Projects
Commit 7b07b412 authored by Danial Hezarkhani's avatar Danial Hezarkhani
Browse files

Added new messages for showing metrics. Brough training to a the model-class. Added comments

parent 7fa34af3
No related branches found
No related tags found
1 merge request!10Updated Sentiment_Analysis NN and trained Model
Showing
with 365 additions and 2967 deletions
...@@ -120,14 +120,12 @@ python3 -m pip install grpcio-tools googleapis-commonprotos ...@@ -120,14 +120,12 @@ python3 -m pip install grpcio-tools googleapis-commonprotos
* Now, run this command inside csv_databroker folder: * Now, run this command inside csv_databroker folder:
```commandline ```commandline
python3 -m grpc_tools.protoc -I. --python_out=. -- python3 -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. databroker.proto
grpc_python_out=. databroker.proto
``` ```
* Now, run this command inside Sentiment_Analysis Command: * Now, run this command inside Sentiment_Analysis Command:
```commandline ```commandline
python3 -m grpc_tools.protoc -I. --python_out=. -- python3 -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. model.proto
grpc_python_out=. model.proto
``` ```
This command used model.proto file to generate the needed stubs to create the This command used model.proto file to generate the needed stubs to create the
client/server. client/server.
......
...@@ -10,7 +10,13 @@ message Text { ...@@ -10,7 +10,13 @@ message Text {
string query = 1; string query = 1;
} }
// Message send to extract dataset metadata
message DatasetMetaData {
string datasetName = 1;
}
//Define the service //Define the service
service Databroker { service Databroker {
rpc sadatabroker(Empty) returns (Text); rpc sadatabroker(Empty) returns (Text);
} rpc log_dataset_metadata(DatasetMetaData) returns (Empty);
}
\ No newline at end of file
...@@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( ...@@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
syntax='proto3', syntax='proto3',
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0bmodel.proto\"\x07\n\x05\x45mpty\"\x15\n\x04Text\x12\r\n\x05query\x18\x01 \x01(\t2+\n\nDatabroker\x12\x1d\n\x0csadatabroker\x12\x06.Empty\x1a\x05.Textb\x06proto3' serialized_pb=b'\n\x0bmodel.proto\"\x07\n\x05\x45mpty\"\x15\n\x04Text\x12\r\n\x05query\x18\x01 \x01(\t\"&\n\x0f\x44\x61tasetMetaData\x12\x13\n\x0b\x64\x61tasetName\x18\x01 \x01(\t2]\n\nDatabroker\x12\x1d\n\x0csadatabroker\x12\x06.Empty\x1a\x05.Text\x12\x30\n\x14log_dataset_metadata\x12\x10.DatasetMetaData\x1a\x06.Emptyb\x06proto3'
) )
...@@ -81,8 +81,41 @@ _TEXT = _descriptor.Descriptor( ...@@ -81,8 +81,41 @@ _TEXT = _descriptor.Descriptor(
serialized_end=45, serialized_end=45,
) )
_DATASETMETADATA = _descriptor.Descriptor(
name='DatasetMetaData',
full_name='DatasetMetaData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='datasetName', full_name='DatasetMetaData.datasetName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=47,
serialized_end=85,
)
DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
DESCRIPTOR.message_types_by_name['Text'] = _TEXT DESCRIPTOR.message_types_by_name['Text'] = _TEXT
DESCRIPTOR.message_types_by_name['DatasetMetaData'] = _DATASETMETADATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), { Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
...@@ -99,6 +132,13 @@ Text = _reflection.GeneratedProtocolMessageType('Text', (_message.Message,), { ...@@ -99,6 +132,13 @@ Text = _reflection.GeneratedProtocolMessageType('Text', (_message.Message,), {
}) })
_sym_db.RegisterMessage(Text) _sym_db.RegisterMessage(Text)
DatasetMetaData = _reflection.GeneratedProtocolMessageType('DatasetMetaData', (_message.Message,), {
'DESCRIPTOR' : _DATASETMETADATA,
'__module__' : 'model_pb2'
# @@protoc_insertion_point(class_scope:DatasetMetaData)
})
_sym_db.RegisterMessage(DatasetMetaData)
_DATABROKER = _descriptor.ServiceDescriptor( _DATABROKER = _descriptor.ServiceDescriptor(
...@@ -108,8 +148,8 @@ _DATABROKER = _descriptor.ServiceDescriptor( ...@@ -108,8 +148,8 @@ _DATABROKER = _descriptor.ServiceDescriptor(
index=0, index=0,
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
serialized_start=47, serialized_start=87,
serialized_end=90, serialized_end=180,
methods=[ methods=[
_descriptor.MethodDescriptor( _descriptor.MethodDescriptor(
name='sadatabroker', name='sadatabroker',
...@@ -121,6 +161,16 @@ _DATABROKER = _descriptor.ServiceDescriptor( ...@@ -121,6 +161,16 @@ _DATABROKER = _descriptor.ServiceDescriptor(
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
), ),
_descriptor.MethodDescriptor(
name='log_dataset_metadata',
full_name='Databroker.log_dataset_metadata',
index=1,
containing_service=None,
input_type=_DATASETMETADATA,
output_type=_EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
]) ])
_sym_db.RegisterServiceDescriptor(_DATABROKER) _sym_db.RegisterServiceDescriptor(_DATABROKER)
......
...@@ -20,6 +20,11 @@ class DatabrokerStub(object): ...@@ -20,6 +20,11 @@ class DatabrokerStub(object):
request_serializer=model__pb2.Empty.SerializeToString, request_serializer=model__pb2.Empty.SerializeToString,
response_deserializer=model__pb2.Text.FromString, response_deserializer=model__pb2.Text.FromString,
) )
self.log_dataset_metadata = channel.unary_unary(
'/Databroker/log_dataset_metadata',
request_serializer=model__pb2.DatasetMetaData.SerializeToString,
response_deserializer=model__pb2.Empty.FromString,
)
class DatabrokerServicer(object): class DatabrokerServicer(object):
...@@ -32,6 +37,12 @@ class DatabrokerServicer(object): ...@@ -32,6 +37,12 @@ class DatabrokerServicer(object):
context.set_details('Method not implemented!') context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') raise NotImplementedError('Method not implemented!')
def log_dataset_metadata(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_DatabrokerServicer_to_server(servicer, server): def add_DatabrokerServicer_to_server(servicer, server):
rpc_method_handlers = { rpc_method_handlers = {
...@@ -40,6 +51,11 @@ def add_DatabrokerServicer_to_server(servicer, server): ...@@ -40,6 +51,11 @@ def add_DatabrokerServicer_to_server(servicer, server):
request_deserializer=model__pb2.Empty.FromString, request_deserializer=model__pb2.Empty.FromString,
response_serializer=model__pb2.Text.SerializeToString, response_serializer=model__pb2.Text.SerializeToString,
), ),
'log_dataset_metadata': grpc.unary_unary_rpc_method_handler(
servicer.log_dataset_metadata,
request_deserializer=model__pb2.DatasetMetaData.FromString,
response_serializer=model__pb2.Empty.SerializeToString,
),
} }
generic_handler = grpc.method_handlers_generic_handler( generic_handler = grpc.method_handlers_generic_handler(
'Databroker', rpc_method_handlers) 'Databroker', rpc_method_handlers)
...@@ -67,3 +83,20 @@ class Databroker(object): ...@@ -67,3 +83,20 @@ class Databroker(object):
model__pb2.Text.FromString, model__pb2.Text.FromString,
options, channel_credentials, options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def log_dataset_metadata(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Databroker/log_dataset_metadata',
model__pb2.DatasetMetaData.SerializeToString,
model__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
...@@ -15,7 +15,6 @@ def run(): ...@@ -15,7 +15,6 @@ def run():
stub = model_pb2_grpc.DatabrokerStub(channel) stub = model_pb2_grpc.DatabrokerStub(channel)
ui_request = model_pb2.Empty() ui_request = model_pb2.Empty()
response = stub.sadatabroker(ui_request) response = stub.sadatabroker(ui_request)
print("Greeter client received: ") print("Greeter client received: ")
print(response) print(response)
......
...@@ -19,7 +19,14 @@ class DatabrokerServicer(model_pb2_grpc.DatabrokerServicer): ...@@ -19,7 +19,14 @@ class DatabrokerServicer(model_pb2_grpc.DatabrokerServicer):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.send_data=True self.send_data=True
# call the function retrieving metadata on first generation of the container.
##########
# for testing only:
request = model_pb2.DatasetMetaData()
request.datasetName = "test Dataset"
self.get_dataset_metadata(request, None)
###########
def sadatabroker(self, request, context): def sadatabroker(self, request, context):
# this simple example without streaming sends only one data record, then the pipeline must be run again # this simple example without streaming sends only one data record, then the pipeline must be run again
...@@ -34,6 +41,13 @@ class DatabrokerServicer(model_pb2_grpc.DatabrokerServicer): ...@@ -34,6 +41,13 @@ class DatabrokerServicer(model_pb2_grpc.DatabrokerServicer):
return response return response
# Getting meta data of the given dataset.
def log_dataset_metadata(self, request, context):
datasetName = request.datasetName
logger.debug("test logging metadata")
logger.debug(datasetName)
def serve(): def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
model_pb2_grpc.add_DatabrokerServicer_to_server(DatabrokerServicer(), server) model_pb2_grpc.add_DatabrokerServicer_to_server(DatabrokerServicer(), server)
......
...@@ -22,7 +22,6 @@ def hello(): ...@@ -22,7 +22,6 @@ def hello():
headers = ['Text', 'Classification'] headers = ['Text', 'Classification']
return render_template("index.html", headers=headers, results=results) return render_template("index.html", headers=headers, results=results)
# if __name__ == '__main__': # if __name__ == '__main__':
def app_run(): def app_run():
app.secret_key = "samodel" app.secret_key = "samodel"
......
//Define the used version of proto //Define the used version of proto
syntax = "proto3"; syntax = "proto3";
message Empty {
}
//Define a message to hold the features input by the client //Define a message to hold the features input by the client
message Text { message Text {
string query = 1; string query = 1;
...@@ -11,7 +15,13 @@ message Review_Classify { ...@@ -11,7 +15,13 @@ message Review_Classify {
float review = 1 ; float review = 1 ;
} }
message predictionMetric {
float loss = 1;
float accuracy = 2;
}
//Define the service //Define the service
service sentiment_analysis_model { service sentiment_analysis_model {
rpc classify_review(Text) returns (Review_Classify); rpc classify_review(Text) returns (Review_Classify);
rpc calculate_metrics(Empty) returns (predictionMetric);
} }
\ No newline at end of file
...@@ -2,20 +2,96 @@ from keras.models import load_model ...@@ -2,20 +2,96 @@ from keras.models import load_model
from keras.datasets import imdb from keras.datasets import imdb
from keras.preprocessing import sequence from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Embedding, Conv1D, GlobalMaxPooling1D
from keras.datasets import imdb
class SentimentAnalysis():
def __init__(self):
# The maximum length of the input sequences.
self.maxlen = 100
# The maximum number of words in the vocabulary.
self.max_features = 5000
# The dimension of the embedding vectors.
self.embedding_dims = 32
######
# NN settings
# The number of filters to use in the convolutional layer. This is the number of output channels that the convolutional layer will produce.
self.filters = 128
# The size of the filter kernel. This is the number of input elements that the filter will be applied to.
self.kernel_size = 3
# The stride of the convolutional layer. This is the number of input elements to skip between each application of the filter kernel.
self.strides = 1
#The type of padding to use. The default is valid, which means that the output of the convolutional layer will be the same size as the input.
self.padding = 'valid'
self.batch_size = 64
self.epochs = 5
self.model_saving_path = "model.h5"
def predict(self,input):
model = load_model(self.model_saving_path)
d = imdb.get_word_index()
words = input.split()
review = []
for word in words:
if word not in d:
review.append(2)
else:
review.append(d[word] + 3)
review = sequence.pad_sequences([review],
truncating='pre', padding='pre', maxlen=self.maxlen)
prediction = model.predict(review)
return prediction[0][0]
def calculate_metrics(self):
model = load_model(self.model_saving_path)
_, (x_test, y_test) = imdb.load_data(num_words=self.max_features)
x_test = sequence.pad_sequences(x_test, maxlen=self.maxlen)
score = model.evaluate(x_test, y_test, verbose=0)
score_dic = {
"loss": score[0],
"accuracy": score[1]
}
return score_dic
def train(self):
(x_train, y_train), _ = imdb.load_data(num_words=self.max_features)
x_train = sequence.pad_sequences(x_train, maxlen=self.maxlen)
model = Sequential()
# The embedding layer will convert each word in the input sequence to a dense vector of fixed size.
# This vector will represent the meaning of the word in a more meaningful way than the one-hot encoded vector.
model.add(Embedding(self.max_features, self.embedding_dims, input_length=self.maxlen))
model.add(Conv1D(self.filters, self.kernel_size, padding=self.padding, activation='relu', strides=self.strides))
# Global max pooling is a type of pooling operation that takes the maximum value over the entire input sequence.
# This means that the output of the global max pooling layer will be a single vector, regardless of the length of the input sequence.
model.add(GlobalMaxPooling1D())
# Adds a dense layer with 128 neurons and a sigmoid activation function
# Dense layers are the most common type of layer in neural networks. They take a vector of inputs and produce a vector of outputs.
# Each neuron in a dense layer is connected to all of the neurons in the previous layer.
# The sigmoid activation function is a non-linear function that squashes its input to a value between 0 and 1.
# It is often used in classification tasks, where the output of the model should be a probability between 0 and 1.
## makes no sense to have a Dense with 128 nerons after pooling?
#model.add(Dense(128, activation='sigmoid'))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])
history = model.fit(x_train, y_train, batch_size=self.batch_size, epochs=self.epochs)
model.save(self.model_saving_path)
def classify_review(review): print("Saved model to disk")
maxlen = 100 print("Accuracy in training:")
model = load_model('model.h5') print(history.history['acc'])
d = imdb.get_word_index() print("loss in training:")
words = review.split() print(history.history['loss'])
review = [] \ No newline at end of file
for word in words:
if word not in d:
review.append(2)
else:
review.append(d[word] + 3)
review = sequence.pad_sequences([review],
truncating='pre', padding='pre', maxlen=maxlen)
prediction = model.predict(review)
return prediction[0][0]
...@@ -19,12 +19,37 @@ DESCRIPTOR = _descriptor.FileDescriptor( ...@@ -19,12 +19,37 @@ DESCRIPTOR = _descriptor.FileDescriptor(
syntax='proto3', syntax='proto3',
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0bmodel.proto\"\x15\n\x04Text\x12\r\n\x05query\x18\x01 \x01(\t\"!\n\x0fReview_Classify\x12\x0e\n\x06review\x18\x01 \x01(\x02\x32\x46\n\x18sentiment_analysis_model\x12*\n\x0f\x63lassify_review\x12\x05.Text\x1a\x10.Review_Classifyb\x06proto3' serialized_pb=b'\n\x0bmodel.proto\"\x07\n\x05\x45mpty\"\x15\n\x04Text\x12\r\n\x05query\x18\x01 \x01(\t\"!\n\x0fReview_Classify\x12\x0e\n\x06review\x18\x01 \x01(\x02\"2\n\x10predictionMetric\x12\x0c\n\x04loss\x18\x01 \x01(\x02\x12\x10\n\x08\x61\x63\x63uracy\x18\x02 \x01(\x02\x32v\n\x18sentiment_analysis_model\x12*\n\x0f\x63lassify_review\x12\x05.Text\x1a\x10.Review_Classify\x12.\n\x11\x63\x61lculate_metrics\x12\x06.Empty\x1a\x11.predictionMetricb\x06proto3'
) )
_EMPTY = _descriptor.Descriptor(
name='Empty',
full_name='Empty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15,
serialized_end=22,
)
_TEXT = _descriptor.Descriptor( _TEXT = _descriptor.Descriptor(
name='Text', name='Text',
full_name='Text', full_name='Text',
...@@ -52,8 +77,8 @@ _TEXT = _descriptor.Descriptor( ...@@ -52,8 +77,8 @@ _TEXT = _descriptor.Descriptor(
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=15, serialized_start=24,
serialized_end=36, serialized_end=45,
) )
...@@ -84,14 +109,62 @@ _REVIEW_CLASSIFY = _descriptor.Descriptor( ...@@ -84,14 +109,62 @@ _REVIEW_CLASSIFY = _descriptor.Descriptor(
extension_ranges=[], extension_ranges=[],
oneofs=[ oneofs=[
], ],
serialized_start=38, serialized_start=47,
serialized_end=71, serialized_end=80,
)
_PREDICTIONMETRIC = _descriptor.Descriptor(
name='predictionMetric',
full_name='predictionMetric',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='loss', full_name='predictionMetric.loss', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accuracy', full_name='predictionMetric.accuracy', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=82,
serialized_end=132,
) )
DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
DESCRIPTOR.message_types_by_name['Text'] = _TEXT DESCRIPTOR.message_types_by_name['Text'] = _TEXT
DESCRIPTOR.message_types_by_name['Review_Classify'] = _REVIEW_CLASSIFY DESCRIPTOR.message_types_by_name['Review_Classify'] = _REVIEW_CLASSIFY
DESCRIPTOR.message_types_by_name['predictionMetric'] = _PREDICTIONMETRIC
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
'DESCRIPTOR' : _EMPTY,
'__module__' : 'model_pb2'
# @@protoc_insertion_point(class_scope:Empty)
})
_sym_db.RegisterMessage(Empty)
Text = _reflection.GeneratedProtocolMessageType('Text', (_message.Message,), { Text = _reflection.GeneratedProtocolMessageType('Text', (_message.Message,), {
'DESCRIPTOR' : _TEXT, 'DESCRIPTOR' : _TEXT,
'__module__' : 'model_pb2' '__module__' : 'model_pb2'
...@@ -106,6 +179,13 @@ Review_Classify = _reflection.GeneratedProtocolMessageType('Review_Classify', (_ ...@@ -106,6 +179,13 @@ Review_Classify = _reflection.GeneratedProtocolMessageType('Review_Classify', (_
}) })
_sym_db.RegisterMessage(Review_Classify) _sym_db.RegisterMessage(Review_Classify)
predictionMetric = _reflection.GeneratedProtocolMessageType('predictionMetric', (_message.Message,), {
'DESCRIPTOR' : _PREDICTIONMETRIC,
'__module__' : 'model_pb2'
# @@protoc_insertion_point(class_scope:predictionMetric)
})
_sym_db.RegisterMessage(predictionMetric)
_SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor( _SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor(
...@@ -115,8 +195,8 @@ _SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor( ...@@ -115,8 +195,8 @@ _SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor(
index=0, index=0,
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
serialized_start=73, serialized_start=134,
serialized_end=143, serialized_end=252,
methods=[ methods=[
_descriptor.MethodDescriptor( _descriptor.MethodDescriptor(
name='classify_review', name='classify_review',
...@@ -128,6 +208,16 @@ _SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor( ...@@ -128,6 +208,16 @@ _SENTIMENT_ANALYSIS_MODEL = _descriptor.ServiceDescriptor(
serialized_options=None, serialized_options=None,
create_key=_descriptor._internal_create_key, create_key=_descriptor._internal_create_key,
), ),
_descriptor.MethodDescriptor(
name='calculate_metrics',
full_name='sentiment_analysis_model.calculate_metrics',
index=1,
containing_service=None,
input_type=_EMPTY,
output_type=_PREDICTIONMETRIC,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
]) ])
_sym_db.RegisterServiceDescriptor(_SENTIMENT_ANALYSIS_MODEL) _sym_db.RegisterServiceDescriptor(_SENTIMENT_ANALYSIS_MODEL)
......
...@@ -20,6 +20,11 @@ class sentiment_analysis_modelStub(object): ...@@ -20,6 +20,11 @@ class sentiment_analysis_modelStub(object):
request_serializer=model__pb2.Text.SerializeToString, request_serializer=model__pb2.Text.SerializeToString,
response_deserializer=model__pb2.Review_Classify.FromString, response_deserializer=model__pb2.Review_Classify.FromString,
) )
self.calculate_metrics = channel.unary_unary(
'/sentiment_analysis_model/calculate_metrics',
request_serializer=model__pb2.Empty.SerializeToString,
response_deserializer=model__pb2.predictionMetric.FromString,
)
class sentiment_analysis_modelServicer(object): class sentiment_analysis_modelServicer(object):
...@@ -32,6 +37,12 @@ class sentiment_analysis_modelServicer(object): ...@@ -32,6 +37,12 @@ class sentiment_analysis_modelServicer(object):
context.set_details('Method not implemented!') context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') raise NotImplementedError('Method not implemented!')
def calculate_metrics(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_sentiment_analysis_modelServicer_to_server(servicer, server): def add_sentiment_analysis_modelServicer_to_server(servicer, server):
rpc_method_handlers = { rpc_method_handlers = {
...@@ -40,6 +51,11 @@ def add_sentiment_analysis_modelServicer_to_server(servicer, server): ...@@ -40,6 +51,11 @@ def add_sentiment_analysis_modelServicer_to_server(servicer, server):
request_deserializer=model__pb2.Text.FromString, request_deserializer=model__pb2.Text.FromString,
response_serializer=model__pb2.Review_Classify.SerializeToString, response_serializer=model__pb2.Review_Classify.SerializeToString,
), ),
'calculate_metrics': grpc.unary_unary_rpc_method_handler(
servicer.calculate_metrics,
request_deserializer=model__pb2.Empty.FromString,
response_serializer=model__pb2.predictionMetric.SerializeToString,
),
} }
generic_handler = grpc.method_handlers_generic_handler( generic_handler = grpc.method_handlers_generic_handler(
'sentiment_analysis_model', rpc_method_handlers) 'sentiment_analysis_model', rpc_method_handlers)
...@@ -67,3 +83,20 @@ class sentiment_analysis_model(object): ...@@ -67,3 +83,20 @@ class sentiment_analysis_model(object):
model__pb2.Review_Classify.FromString, model__pb2.Review_Classify.FromString,
options, channel_credentials, options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def calculate_metrics(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/sentiment_analysis_model/calculate_metrics',
model__pb2.Empty.SerializeToString,
model__pb2.predictionMetric.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
...@@ -16,11 +16,16 @@ results = [] ...@@ -16,11 +16,16 @@ results = []
class sentiment_analysis_modelServicer(model_pb2_grpc.sentiment_analysis_modelServicer): class sentiment_analysis_modelServicer(model_pb2_grpc.sentiment_analysis_modelServicer):
def __init__(self):
self.psp_obj = psp.SentimentAnalysis()
def classify_review(self, request, context): def classify_review(self, request, context):
# define the buffer of the response : # define the buffer of the response :
response = model_pb2.Review_Classify() response = model_pb2.Review_Classify()
# get the value of the response by calling the desired function : # get the value of the response by calling the desired function :
response.review = psp.classify_review(request.query) response.review = self.psp_obj.predict(request.query)
result = [request.query, response.review] result = [request.query, response.review]
with open("results.txt", mode="a+") as f: with open("results.txt", mode="a+") as f:
# for e0, e1, e2, e3, e4, e5 in result: # for e0, e1, e2, e3, e4, e5 in result:
...@@ -29,6 +34,22 @@ class sentiment_analysis_modelServicer(model_pb2_grpc.sentiment_analysis_modelSe ...@@ -29,6 +34,22 @@ class sentiment_analysis_modelServicer(model_pb2_grpc.sentiment_analysis_modelSe
return response return response
#The function will get the metrics calculated in predict_sale_price.py
def calculate_metrics(self, request, context):
# get the metrics
score_dic = self.psp_obj.calculate_metrics()
# make a grcp message object from the proto file.
response = model_pb2.predictionMetric()
response.loss = score_dic["loss"]
response.accuracy = score_dic["accuracy"]
logging.debug('Test loss:', score_dic["loss"])
logging.debug('Test accuracy:', score_dic["accuracy"])
return response
def serve(): def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
model_pb2_grpc.add_sentiment_analysis_modelServicer_to_server(sentiment_analysis_modelServicer(), server) model_pb2_grpc.add_sentiment_analysis_modelServicer_to_server(sentiment_analysis_modelServicer(), server)
...@@ -37,8 +58,9 @@ def serve(): ...@@ -37,8 +58,9 @@ def serve():
threading.Thread(target=app_run()).start() threading.Thread(target=app_run()).start()
server.wait_for_termination() server.wait_for_termination()
if __name__ == '__main__': if __name__ == '__main__':
logging.basicConfig() logging.basicConfig()
open('results.txt', 'w').close() open('results.txt', 'w').close()
serve() serve()
source diff could not be displayed: it is too large. Options to address this: view the blob.
MSSubClass,LotArea,YearBuilt,BedroomAbvGr,TotRmsAbvGrd
60,8450,2003,3,8
20,9600,1976,3,6
60,11250,2001,3,6
70,9550,1915,3,7
60,14260,2000,4,9
50,14115,1993,1,5
20,10084,2004,3,7
60,10382,1973,3,7
50,6120,1931,2,8
190,7420,1939,2,5
source diff could not be displayed: it is too large. Options to address this: view the blob.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment