IMPORT & DEFINE

In [ ]:
%%capture 

import numpy as np  
import pandas as pd 
import pickle
import re           
import os
from bs4 import BeautifulSoup 
from sklearn.model_selection import train_test_split
from keras.preprocessing.text import Tokenizer 
from keras.preprocessing.sequence import pad_sequences
from nltk.corpus import stopwords   
import tensorflow as tf 
from tensorflow.keras.layers import Input, LSTM, Embedding, Dense, Concatenate, TimeDistributed, Bidirectional
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
import matplotlib.pyplot as plt

from tensorflow.python.keras.layers import Layer
from tensorflow.python.keras import backend as K

import nltk
nltk.download('stopwords')
stop_words = set(stopwords.words('english'))
In [ ]:
dataset_downloaded = False

FUNCTIONS

In [ ]:
%%capture


contraction_mapping = {"ain't": "is not", "aren't": "are not","can't": "cannot", "'cause": "because", "could've": "could have", "couldn't": "could not",

                           "didn't": "did not", "doesn't": "does not", "don't": "do not", "hadn't": "had not", "hasn't": "has not", "haven't": "have not",

                           "he'd": "he would","he'll": "he will", "he's": "he is", "how'd": "how did", "how'd'y": "how do you", "how'll": "how will", "how's": "how is",

                           "I'd": "I would", "I'd've": "I would have", "I'll": "I will", "I'll've": "I will have","I'm": "I am", "I've": "I have", "i'd": "i would",

                           "i'd've": "i would have", "i'll": "i will",  "i'll've": "i will have","i'm": "i am", "i've": "i have", "isn't": "is not", "it'd": "it would",

                           "it'd've": "it would have", "it'll": "it will", "it'll've": "it will have","it's": "it is", "let's": "let us", "ma'am": "madam",

                           "mayn't": "may not", "might've": "might have","mightn't": "might not","mightn't've": "might not have", "must've": "must have",

                           "mustn't": "must not", "mustn't've": "must not have", "needn't": "need not", "needn't've": "need not have","o'clock": "of the clock",

                           "oughtn't": "ought not", "oughtn't've": "ought not have", "shan't": "shall not", "sha'n't": "shall not", "shan't've": "shall not have",

                           "she'd": "she would", "she'd've": "she would have", "she'll": "she will", "she'll've": "she will have", "she's": "she is",

                           "should've": "should have", "shouldn't": "should not", "shouldn't've": "should not have", "so've": "so have","so's": "so as",

                           "this's": "this is","that'd": "that would", "that'd've": "that would have", "that's": "that is", "there'd": "there would",

                           "there'd've": "there would have", "there's": "there is", "here's": "here is","they'd": "they would", "they'd've": "they would have",

                           "they'll": "they will", "they'll've": "they will have", "they're": "they are", "they've": "they have", "to've": "to have",

                           "wasn't": "was not", "we'd": "we would", "we'd've": "we would have", "we'll": "we will", "we'll've": "we will have", "we're": "we are",

                           "we've": "we have", "weren't": "were not", "what'll": "what will", "what'll've": "what will have", "what're": "what are",

                           "what's": "what is", "what've": "what have", "when's": "when is", "when've": "when have", "where'd": "where did", "where's": "where is",

                           "where've": "where have", "who'll": "who will", "who'll've": "who will have", "who's": "who is", "who've": "who have",

                           "why's": "why is", "why've": "why have", "will've": "will have", "won't": "will not", "won't've": "will not have",

                           "would've": "would have", "wouldn't": "would not", "wouldn't've": "would not have", "y'all": "you all",

                           "y'all'd": "you all would","y'all'd've": "you all would have","y'all're": "you all are","y'all've": "you all have",

                           "you'd": "you would", "you'd've": "you would have", "you'll": "you will", "you'll've": "you will have",

                           "you're": "you are", "you've": "you have"}

class AttentionLayer(Layer):
  '''
    This class implements Bahdanau attention
  '''
  def __init__(self, **kwargs):
    super(AttentionLayer, self).__init__(**kwargs) 
  
  def build(self, input_shape):

    assert isinstance(input_shape, list) 

    #create a trainable weight variable for this layer 
    self.w_a = self.add_weight(name='w_a', shape= tf.TensorShape((input_shape[0][2], input_shape[0][2])), initializer= 'uniform', trainable= True)
    self.u_a = self.add_weight(name='u_a', shape= tf.TensorShape((input_shape[1][2], input_shape[0][2])), initializer= 'uniform', trainable= True) 
    self.v_a = self.add_weight(name='v_a', shape= tf.TensorShape((input_shape[0][2], 1)), initializer= 'uniform', trainable = True)

    super(AttentionLayer, self).build(input_shape) 
  
  def call(self, inputs, verbose = False):
    '''
      inputs: [encoder_output_sequence, decoder_output_sequence] 
    ''' 
    assert type(inputs) == list 
    encoder_out_seq, decoder_out_seq = inputs 
    if verbose : 
      print("encoder out seq:", encoder_out_seq.shape) 
      print("decoder out seq:", decoder_out_seq.shape) 

    def energy_step(inputs, states):
      '''
        step function for computing energy for a single decoder state
      '''
      assert_msg = 'states must be a list. However, states {} is of type {}'.format(states, type(states)) 
      assert isinstance(states, list) or isinstance(states, tuple), assert_msg 

      ''' some parameters required for computing energy for as single decoder state '''
      en_seq_len, en_hidden = encoder_out_seq.shape[1], encoder_out_seq.shape[2]
      de_hidden = inputs.shape[-1] 

      ''' computing s.wa where s=[s0, s1, ..., si] '''
      # <= batch_size*en_seq_len, latent_dim 
      reshaped_enc_outputs = K.reshape(encoder_out_seq, (-1, en_hidden)) 
      # <= batch_size*en_seq_len, latent_dim 
      w_a_dot_s = K.reshape(K.dot(reshaped_enc_outputs, self.w_a), (-1, en_seq_len, en_hidden)) 
      if verbose : print("wa.s>", w_a_dot_s.shape) 

      '''computing hj.ua '''
      u_a_dot_h = K.expand_dims(K.dot(inputs, self.u_a), 1) # <= batch_size, 1, latent_dim 
      if verbose: print("ua.h", u_a_dot_h.shape) 

      ''' tanh(s.wa + hj.ua) '''
      # <= batch_size*en_seq_len, latent_dim 
      reshaped_ws_plus_uh = K.tanh(K.reshape(w_a_dot_s + u_a_dot_h, (-1, en_hidden))) 
      if verbose : print("ws+uh>", reshaped_ws_plus_uh.shape) 

      '''softmax(va.tanh(s.wa+hj.ua))'''
      # <= batch_size, en_seq_len 
      e_i = K.reshape(K.dot(reshaped_ws_plus_uh, self.v_a), (-1, en_seq_len)) 
      # <= batch_size, en_seq_len 
      e_i = K.softmax(e_i) 

      if verbose : print("ei>", e_i.shape) 

      return e_i, [e_i] 
    
    def context_step(inputs, states):
      '''step function for computing ci using ei '''
      c_i = K.sum(encoder_out_seq*K.expand_dims(inputs, -1), axis=1) 
      if verbose : print("ci>", c_i.shape) 
      return c_i, [c_i] 

    def create_initial_state(inputs, hidden_size):
      #we are not using initial states, ut need to pass something to K.rnn function 
      fake_state = K.zeros_like(inputs) # <= (batch_size, enc_seq_len, latent_dim) 
      fake_state = K.sum(fake_state, axis=[1,2]) # <= (batch_size) 
      fake_state = K.expand_dims(fake_state) # <= (batch_size, 1) 
      fake_state = K.tile(fake_state, [1, hidden_size]) # <= (batch_size, latent_dim) 
      return fake_state 

    fake_state_c = create_initial_state(encoder_out_seq, encoder_out_seq.shape[-1]) 
    fake_state_e = create_initial_state(encoder_out_seq, encoder_out_seq.shape[1]) # <= (batch_size, enc_seq_len, latent_dim) 

    '''computing energy outputs'''
    # e_outputs => (batch_size, de_seq_len, en_seq_len)  
    last_out, e_outputs, _ = K.rnn(energy_step, decoder_out_seq, [fake_state_e]) 

    '''computing context vectors'''
    last_out, c_outputs, _ = K.rnn(context_step, e_outputs, [fake_state_c])

    return c_outputs, e_outputs 
  
  def compute_output_shape(self, input_shape):
    '''
      outputs produced by the layer
    '''
    return [
            tf.TensorShape((input_shape[1][0], input_shape[1][1], input_shape[1][2])),
            tf.TensorShape((input_shape[1][0], input_shape[1][1], input_shape[0][1]))
    ]

def get_dataset():
  !wget --load-cookies /tmp/cookies.txt\
   "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt\
    --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=1EzY1IfN_QGCVp9EUVxZ3dZhRF_EUtyJA' -O- \
    | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=1EzY1IfN_QGCVp9EUVxZ3dZhRF_EUtyJA" -O kaggle.json && rm -rf /tmp/cookies.txt

  !pip install kaggle

  !mkdir ~/.kaggle
  !cp kaggle.json ~/.kaggle/
  !chmod 600 ~/.kaggle/kaggle.json

  !kaggle datasets download -d snap/amazon-fine-food-reviews
  !unzip 'amazon-fine-food-reviews.zip'


def get_embeddings_matrix(glove, word2ix, word_embed_size = 200, vocab_size = 5000):
  embeddings_matrix = np.zeros((vocab_size, word_embed_size))

  for word, index in word2ix.items():
    embed_vec = glove.get(word) 
    if embed_vec is not None :
      embeddings_matrix[index] = embed_vec 
  
  return embeddings_matrix 

def get_glove_200():

  def read_pickle(pickle_file):
    with open(pickle_file, 'rb') as f :
      return pickle.load(f)
   
  !wget --load-cookies /tmp/cookies.txt "https://docs.google.com/uc?export=download&confirm=$(wget --quiet --save-cookies /tmp/cookies.txt --keep-session-cookies --no-check-certificate 'https://docs.google.com/uc?export=download&id=10Xgw5e157bD3z7cJG2_0DLmZSwGzXh8c' -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/\1\n/p')&id=10Xgw5e157bD3z7cJG2_0DLmZSwGzXh8c" -O glove_200.pickle && rm -rf /tmp/cookies.txt
  glove_200 = read_pickle('glove_200.pickle') 
  return glove_200 


def cleaner(data, maxlen = 3, RemoveStopWords = True):
  def clean(text, threshold, RemoveStopWords): 
    newtext = text.lower() 
    newtext = BeautifulSoup(newtext, 'lxml').text 
    newtext = re.sub(r'\([^)]*\)', '', newtext)
    newtext = re.sub('"', '', newtext) 
    newtext = ' '.join([contraction_mapping[t] if t in contraction_mapping else t for t in newtext.split(" ")])
    newtext = re.sub(r"'s\b","",newtext)        #?
    newtext = re.sub("[^a-zA-Z]", " ", newtext) #?
    if RemoveStopWords : tokens = [w for w in newtext.split(" ") if not w in stop_words]
    else : tokens = [w for w in newtext.split(" ")]

    long_tokens = [t for t in tokens if len(t)>=threshold]
    return (" ".join(long_tokens)).strip()
   
  cleaned = [clean(d, maxlen, RemoveStopWords) for d in data] 
  return cleaned

def get_model(embeddings_matrix, max_len_text, x_voc_size, y_voc_size, latent_dim = 500):

  #Encoder
  encoder_inputs = Input(shape=(max_len_text,))
  enc_emb = Embedding(x_voc_size, 200, weights = [embeddings_matrix], input_length = max_len_text, trainable = False)(encoder_inputs)

  #LSTM 1 
  encoder_lstm1 = LSTM(latent_dim, return_sequences = True, return_state = True)
  encoder_output1, state_h1, state_c1 = encoder_lstm1(enc_emb) 

  #LSTM 2
  encoder_lstm2 = LSTM(latent_dim, return_sequences = True, return_state = True) 
  encoder_output2, state_h2, state_c2 = encoder_lstm2(encoder_output1) 

  #LSTM 3 
  encoder_lstm3 = LSTM(latent_dim, return_sequences = True, return_state = True)  
  encoder_outputs, state_h, state_c = encoder_lstm3(encoder_output2) 
  print("encoder outputs shape:", encoder_outputs.shape) 

  #set up the decoder 
  decoder_inputs = Input(shape=(None,))
  dec_emb_layer = Embedding(x_voc_size, 200, weights= [embeddings_matrix], input_length = max_len_text, trainable = False)
  dec_emb = dec_emb_layer(decoder_inputs) 

  #LSTM using encoder_states as initial state 
  decoder_lstm = LSTM(latent_dim, return_sequences = True, return_state = True) 
  decoder_outputs, decoder_fwd_state, decoder_back_state = decoder_lstm(dec_emb, initial_state = [state_h, state_c])
  print("decoder_outputs shape:", decoder_outputs.shape) 

  #attention layer 
  attn_layer = AttentionLayer(name='attention_layer') 
  attn_out, attn_states = attn_layer([encoder_outputs, decoder_outputs]) 

  #concatenate attention output and decoder LSTM output 
  decoder_concat_input = Concatenate(axis=-1, name='concat_layer')([decoder_outputs, attn_out])

  #Dense layer
  decoder_dense = TimeDistributed(Dense(y_voc_size, activation='softmax')) 
  decoder_outputs = decoder_dense(decoder_concat_input) 

  #Define the model 
  model = Model([encoder_inputs, decoder_inputs], decoder_outputs) 
  return model

Download & clean

In [ ]:
%%capture

if not dataset_downloaded :
  dataset_downloaded = True
  get_dataset()
  
data = pd.read_csv("Reviews.csv") 
data.drop_duplicates(subset=['Text'], inplace= True) 
data.dropna(axis=0, inplace=True) 


Text = cleaner(data['Text'], maxlen =3)
Summary = cleaner(data['Summary'], maxlen =1, RemoveStopWords = False)  

Summary = ['_START_ ' + s + ' _END_' for s in Summary]

Explore

In [ ]:
Text[:10]
Out[ ]:
['bought several vitality canned dog food products found good quality product looks like stew processed meat smells better labrador finicky appreciates product better',
 'product arrived labeled jumbo salted peanuts peanuts actually small sized unsalted sure error vendor intended represent product jumbo',
 'confection around centuries light pillowy citrus gelatin nuts case filberts cut tiny squares liberally coated powdered sugar tiny mouthful heaven chewy flavorful highly recommend yummy treat familiar story lewis lion witch wardrobe treat seduces edmund selling brother sisters witch',
 'looking secret ingredient robitussin believe found got addition root beer extract ordered made cherry soda flavor medicinal',
 'great taffy great price wide assortment yummy taffy delivery quick taffy lover deal',
 'got wild hair taffy ordered five pound bag taffy enjoyable many flavors watermelon root beer melon peppermint grape etc complaint bit much red black licorice flavored pieces kids husband lasted two weeks would recommend brand taffy delightful treat',
 'saltwater taffy great flavors soft chewy candy individually wrapped well none candies stuck together happen expensive version fralinger would highly recommend candy served beach themed party everyone loved',
 'taffy good soft chewy flavors amazing would definitely recommend buying satisfying',
 'right mostly sprouting cats eat grass love rotate around wheatgrass rye',
 'healthy dog food good digestion also good small puppies dog eats required amount every feeding']
In [ ]:
Summary[:10]
Out[ ]:
['_START_ good quality dog food _END_',
 '_START_ not as advertised _END_',
 '_START_ delight says it all _END_',
 '_START_ cough medicine _END_',
 '_START_ great taffy _END_',
 '_START_ nice taffy _END_',
 '_START_ great just as good as the expensive brands _END_',
 '_START_ wonderful tasty taffy _END_',
 '_START_ yay barley _END_',
 '_START_ healthy dog food _END_']
In [ ]:
sentence_length_df = pd.DataFrame({'Text sentence length':[len(sent.split(" ")) for sent in Text], 'Summary sentence length':[len(sent.split(" ")) for sent in Summary]})
sentence_length_df.hist(bins = 200, figsize = (20, 10)) 
plt.show()

Tokenize

In [ ]:
TEXT_MAXLEN = 80
SUMMARY_MAXLEN = 10 

x_train, x_val, y_train, y_val = train_test_split(Text, Summary, test_size = 0.1,random_state=0,shuffle=True)
In [ ]:
x_tokenizer = Tokenizer() 
x_tokenizer.fit_on_texts(list(x_train))

#convert text sequence into integer sequence 
x_train = x_tokenizer.texts_to_sequences(x_train) 
x_val = x_tokenizer.texts_to_sequences(x_val) 

#padding with zeros 
x_train = pad_sequences(x_train, maxlen = TEXT_MAXLEN, padding='post') 
x_val = pad_sequences(x_val, maxlen = SUMMARY_MAXLEN, padding='post') 

X_VOCAB_SIZE = len(x_tokenizer.word_index) +1
In [ ]:
y_tokenizer = Tokenizer() 
y_tokenizer.fit_on_texts(list(y_train)) 

y_train = y_tokenizer.texts_to_sequences(y_train) 
y_val = y_tokenizer.texts_to_sequences(y_val) 

y_train = pad_sequences(y_train, maxlen = SUMMARY_MAXLEN, padding='post') 
y_val = pad_sequences(y_val, maxlen = SUMMARY_MAXLEN, padding='post') 

Y_VOCAB_SIZE = len(y_tokenizer.word_index) +1
In [ ]:
%%capture

glove_200 = get_glove_200() 
embeddings_matrix = get_embeddings_matrix(glove=glove_200, word2ix= x_tokenizer.word_index, word_embed_size = 200, vocab_size = X_VOCAB_SIZE)

Model

In [ ]:
K.clear_session()
model = get_model(embeddings_matrix, TEXT_MAXLEN, X_VOCAB_SIZE, Y_VOCAB_SIZE)
model.compile(optimizer = 'rmsprop', loss = 'sparse_categorical_crossentropy') 
es = EarlyStopping(monitor = 'val_loss', mode='min', verbose = 1)
encoder outputs shape: (None, 80, 500)
decoder_outputs shape: (None, None, 500)
In [ ]:
train_in_seq = [x_train, y_train[:, :-1]] 
train_out_seq = y_train.reshape(y_train.shape[0], y_train.shape[1], 1)[:, 1:]
val_in_seq = [x_val, y_val[:, :-1]] 
val_out_seq = y_val.reshape(y_val.shape[0], y_val.shape[1], 1)[:, 1:] 
history = model.fit( train_in_seq, train_out_seq, epochs = 50, callbacks = [es], batch_size = 512, validation_data = (val_in_seq, val_out_seq))
Epoch 1/50
692/692 [==============================] - 376s 544ms/step - loss: 2.8786 - val_loss: 2.5896
Epoch 2/50
692/692 [==============================] - 375s 542ms/step - loss: 2.3599 - val_loss: 2.4943
Epoch 3/50
692/692 [==============================] - 375s 541ms/step - loss: 2.1851 - val_loss: 2.4898
Epoch 4/50
692/692 [==============================] - 374s 541ms/step - loss: 2.0808 - val_loss: 2.5129
Epoch 00004: early stopping
In [ ]:
from matplotlib import pyplot 
pyplot.plot(history.history['loss'], label='train') 
pyplot.plot(history.history['val_loss'], label='test') 
pyplot.legend() 
pyplot.show()

PLAY

In [ ]:
x_i2w = x_tokenizer.index_word 
x_w2i = x_tokenizer.word_index
y_i2w = y_tokenizer.index_word 
y_w2i = y_tokenizer.word_index
In [ ]:
def summarize(sentence, x_len, y_len, x_w2i, y_w2i, model = model):
  def pad_seq(seq, maxlen, pad):
    if len(seq) >= maxlen: return np.array(seq[0:maxlen] )
    else: return np.array(seq + [pad]*(maxlen-len(seq)))

  if isinstance(sentence, str): sentence = sentence.split(' ') 
  encoder_seq = pad_seq([x_w2i[s] for s in sentence], maxlen= x_len, pad=0)
  decoder_seq = pad_seq([y_w2i['start']], maxlen= y_len, pad= 0)  

  for i in range(y_len):
    pred = model.predict([encoder_seq, decoder_seq])
    pred = np.reshape(pred, (pred.shape[0], pred.shape[2]))
    
    word_idx = np.argmax(pred[i,:]) 
    if word_idx==0 : break 
    if word_idx==y_w2i['end'] : break
    decoder_seq[i+1] = word_idx 

  return decoder_seq[1:]
In [ ]:
for i in range(60,100):
  sample_text = ' '.join([x_i2w[x] for x in x_train[i] if x!=0]) 
  sample_summary = ' '.join([y_i2w[x] for x in y_train[i][1:] if x!=0 and y_i2w[x]!='end']) 
  
  seq = summarize(sample_text, TEXT_MAXLEN, TEXT_MAXLEN, x_w2i, y_w2i)
  seq = list(seq)
  seq = ' '.join([y_i2w[y] for y in seq if y!=0]) 
  print("text:", sample_text) 
  print("gt:", sample_summary)
  print("summary:", seq)  
  print()
text: kobe steaks four stars gripe problem size first cuts one inch thick ghastly offensive steak bare minimum two inches thick even associate proletarians imagine horrors people committ decent food cannot people eat sensibly please get started wanted include sterility drugs fast food particularly bargain menu merely hope dream another day secondly law somewhere steak less two pounds heavens fourteen ouncer mere sampler tease suppose cuts intended given away offensive stores yuppies flock lemmings cliff lastly must close review become distraught
gt: okay for a casual get together
summary: great

text: really enjoy coffee nice island coconutty flavor definitely purchase coffee cups
gt: yummy coffee
summary: great

text: using beef broccoli packets years easy mix water soy sauce favorite recipe replace beef shrimp super delicious
gt: a family favorite
summary: great for a great

text: said high hopes whole flavored water thing think energy beverage craze joke ultimately product least comes one thing taste area merely tastes kind like kool aid fruit punch quite good also vaguely medicinal taste mentioned headline lingering aftertaste whether taste good depends much like flavor wishing diet coke around means bad product fairly sure could get effect sugar free kool aid like probably spend lot less money alternatively get good night sleep drink plain water eat orange pretty much outcome
gt: slightly medicinal with a lingering aftertaste
summary: great for the best ever

text: arrived perfect condition hands best tasting honey ever use honey sweeten everything tried lot different ones favorite bottles size little bears perfect squeezing coffee tea pancakes etc
gt: best tasting honey i have ever had
summary: great

text: really impressed durable bags tearing top put around top trash minimal overfill bags problem spillage breakage carrying trash purchase
gt: impressed
summary: great

text: product melted extremely disfigured longer looked like sucker many different forms entire box ruined caramel longer sucker coming plastic beware buying extremely unhappy client party favors useless waste money
gt: wish you could rate a zero
summary: great for a bit the best

text: glad bought tried like target beer bread tasted much better would buy
gt: not as good as i thought
summary: great

text: first learned orzo food network channel tried loved local grocery began carry tri colored orzo bought loved particularly beauty three colors added dish store stopped carrying brand tri color orzo came home got line bought thru amazon happy find one favorite pastas given gifts everyone loves thanks pleased customer
gt: rave review of orzo
summary: great

text: ordered case pumpernickel jalapeno cheddar sandwiches give friend going chemo radiation favorite amazed put order yesterday afternoon arrived morning great however disappointed expiration date bags feb weeks boo quality perspective
gt: disappointed in age freshness of product
summary: great product

text: great candy bar cannot get enough peanuts mouth peel back wrapper see coating peanuts take bite get tasty peanut buttery treat center held together delicious caramel able get enough sweet nutty masterpiece snack craftsmanship run mill chocolate coated candy bar much also salty taste gives makes stand nut lovers nirvana everyone else darn fine snack
gt: strictly for nut lovers
summary: great for the best price

text: yrs ago discovered son peanut allergy introduction soynut butter came curious chocolate flavor would like place could find amazon took plunge glad makes tasty sandwich jelly favorite bananas plain great use substitute recipes call peanut butter wish amazon would offer subscribe option keep stock regularly many months expensive buying anywhere else
gt: love the soynut butter
summary: the best ever

text: order product matcha green tea powder love together order two love starbucks green tea latte taste saves money great buy cuz bottle way bigger looks picture
gt: love
summary: great product

text: meals amazing low calorie delicious require refrigeration cook two microwave one meal less calories find delicious goodness microwave meal less half definately cupbard
gt: delicious and amazing
summary: great

text: rich delicious decaffeinated coffee really hard tell real thing san francisco bay coffee bite real coffee missing decaf eco frendly design resonable price big plus
gt: rich and tasty decaf
summary: the best coffee great

text: nice quality easy cook find better cooked minutes directed like minutes depending quantity cooked one time enough water cook totally dry yet noticeable water remaining bottom pan great breakfast cooked bit lean sausage
gt: better than i expected
summary: great product

text: warning never back nestle chocolate getting used chocolaty sweet costco carries similar ghirardelli double chocolate bittersweet chips way better cost baking season food faddists saying chocolate good enjoy changes
gt: so excellent but shop around
summary: great

text: chiocciole italian means snails like wider mezzi rigatoni curling shell shaped ideal chunky sauce unfortunately pasta shape available neither cecco barilla supermarket brands buy marella brand new happy find made two bags chiocciole great results
gt: hard to find shape excellent texture
summary: great product

text: buying sugar twin years sudden could longer find ounce boxes local grocery stores much convenient small packets make iced tea gallon tried substituting brands sweeteners none sweetened well sugar twin glad find size still available online far best sugar free sweeteners
gt: best sweetener on the market
summary: great taste great

text: coffee taste amazing tolerable perhaps par lower end medium quality espresso coffee high get insane better energy drinks energy drinks really get shaky edge gets close enough edge comfortable control like speed love good job medaglia
gt: ridiculous energy
summary: great taste great for the best price

text: thank wonderful chips great price arrived perfect condition although box little smashed chips pristine condition
gt: and arrived with not a single broken chip
summary: great

text: arrived time dont quite melt advertised required much stiring left something sludge bottom cup
gt: they sort of work
summary: great

text: love love love love cherry flavored anything easy haha candies good love nice cherry favor thanks
gt: great tasty candy
summary: love it is the best ever

text: using coconut oil substitute cooking oils skin hair products years stellar brand organic cold pressed made safe storing lot good info web coconut oil saving health jump band wagon late
gt: best advice i ever received
summary: great for the best price

text: used dove soap many years think good sensitive skin grandchildren enjoy also
gt: lovely dove
summary: great product

text: really enjoyed product tastes good close russian iranian versions also service quick delivery added bonus would highly recommend product
gt: excellent quality and value
summary: great

text: flavor simply delightful funny taste stevia sweet black cherry flavor pleased
gt: delightful
summary: the best i love it is not so good stuff

text: often review things especially groceries oats deserve good word actually good oats good price good packaging agree foks say good mccann half price actually better last mccann got local supermarket gone stale store super fresh nutty fears shipping unfounded mine arrived undamaged days
gt: good good good
summary: great

text: everyone found birthday way think candies exceptional tasting enjoyed seeing friends eyes light saw old favorites generated lot talking laughing reminiscing bunch ladies got candy cigarettes picture taken smoking lounge dad frugal unfortunately price slip included first says kidding bit also showing german roots gayle box candy called several times let know got worth fun consider getting gift box father era gift lot candy figured would enjoy plenty share year old father thought candy selection right target remembered buying kid
gt: a blast at the retirement home
summary: great

text: one flavors keep around extra surprise get using old food favorite like awhile change sure poultry platter means guess anyway smells fresh stink ridiculously palatable recommend
gt: we will eat it once in awhile
summary: great taste great

text: really loved taste energy drink two servings per calories per serving entire calories light slightly fruity taste really enjoyed drinking energy drinks always experienced kick energy get one said feel good crash later certainly get fear super energy
gt: servings per can of no fear energy drink
summary: great

text: used product one purchased weaker flavor one previously
gt: mccormick pure orange
summary: great product

text: lagavulin pronounced lahk voolin one islay distilleries long interesting history regarding region scotch whiskey distilleries interested give google islay whiskeys tried tend smoky peaty taste hint sea lagavulin favorite drinker scotch whiskey promise notice unique distinct difference right away wonderful wife scotch whiskey drinker enjoys lagavulin last decade seemed gained popularity noticed substantial increase local retail prices typically range also experienced supply shortages believe price listed seems good cannot say enough good things whiskey give try glad
gt: nothing like any other scotch whiskey
summary: the best

text: son started reject nursing around wks since exclusively pumping son almost months eating pump due big size lot research formula one seems best smells tastes good vanilla flavor son loves give one bottle day formula rest breast milk worried constipation also add packet baby essentials probiotic bottle day thing like formula gets really foamy try mix shaking use mini wisk works great
gt: my baby loves it
summary: love it is the best i love it is a great

text: best decaf found tried lot great cup coffee
gt: love it
summary: the best price

text: great gluten free product wonderful flavor get mushy like rice flour pasta tends downside really cannot reheat unless fry microwave reheat tends get dry fall apart
gt: great taste
summary: great for a great

text: coffee great bold taste wakens ones taste buds every cup highly recommend brand
gt: great coffee
summary: great

text: count coffee pod packages got machine free result rocky relationship coffee easily bellying bar spending month starbucks bit manufacturer pod machine bought recommends using pod brand pods course trouble trouble finding first batch got machine bought several packs melitta guys amazon use pod coffeemaker since friend melitta machine sprang enough pods qualify free super saver shipping figured would give work turns work fine machine regular flavored coffees though necessarily gourmet stuff really quite tasty would problem serving coffee guest home
gt: swf seeks tall dark warm fast joe
summary: great coffee

text: looking traditional treat family event impressed quality food service shatila pastries fresh delivered quickly dramatic shipping mark reasonably priced placing another order gift friend highly recommend
gt: best service and quality i have seen
summary: great

text: excellent alternative regular pasta product wish came shape varieties even non gluten sensitive friends family member love
gt: love this stuff
summary: great for the best buy it is not so good for a little goes a great

we can learn from the great great great great great results that it is el hamd llah 3la kol 7aga (thank god for everything)