Package featuregenerator :: Package parser :: Package berkeley :: Module berkeleyclient
[hide private]
[frames] | no frames]

Source Code for Module featuregenerator.parser.berkeley.berkeleyclient

  1  # -*- coding: utf-8 -*- 
  2  import xmlrpclib  
  3  import time 
  4  import sys 
  5  from featuregenerator.languagefeaturegenerator import LanguageFeatureGenerator 
  6  import socket 
  7  from nltk import PunktWordTokenizer, PunktSentenceTokenizer 
  8  from featuregenerator.parser.berkeley.socket.berkeleyparsersocket import BerkeleyParserSocket 
  9   
 10  from py4j.java_gateway import JavaGateway 
 11  from py4j.java_gateway import GatewayClient 
 12  from py4j.java_gateway import java_import 
 13  from py4j.protocol import Py4JError 
 14   
 15   
16 -class BerkeleyFeatureGenerator(LanguageFeatureGenerator):
17
18 - def __init__(self, *args):
19 raise NotImplementedError( "BerkeleyFeatureGenerator class has been deprecated. Please use either of the subclasses" )
20
21 - def parse(self, string):
22 raise NotImplementedError( "BerkeleyFeatureGenerator class has been deprecated. Please use either of the subclasses" )
23
24 - def add_features_batch(self, parallelsentences):
25 raise NotImplementedError( "BerkeleyFeatureGenerator class has been deprecated. Please use either of the subclasses" )
26 27 #Warning: not language-aware function. Use the ones above
28 - def get_features_simplesentence(self, simplesentence, parallelsentence):
29 sent_string = self.prepare_sentence(simplesentence) 30 return self.get_features_string(sent_string)
31
32 - def get_features_string(self, sent_string):
33 results = self.parse(sent_string) 34 if results == {}: 35 return {} 36 loglikelihood = results['loglikelihood'] 37 nbestList = results['nbest'] 38 n = len(nbestList) 39 40 best_confidence = -1e308; 41 best_parse = "" 42 sum_confidence = 0 43 44 #print "analyzing tree statistics", 45 for entry in nbestList: 46 confidence = entry["confidence"] 47 parse = entry["tree"] 48 if float(confidence) > best_confidence: 49 best_confidence = float(confidence) 50 best_parse = parse 51 sum_confidence += float(confidence) 52 53 #print 54 if n !=0: 55 avg_confidence = sum_confidence / n 56 else: 57 avg_confidence = -float('inf') 58 59 attributes = {} 60 attributes ["berkeley-n"] = str(n) 61 attributes ["berkley-loglikelihood"] = str(loglikelihood) 62 attributes ["berkeley-best-parse-confidence"] = str(best_confidence) 63 attributes ["berkeley-avg-confidence"] = str(avg_confidence) 64 attributes ["berkeley-tree"] = best_parse 65 return attributes
66
67 - def prepare_sentence(self, simplesentence):
68 69 string = simplesentence.get_string() 70 if self.tokenize: 71 string = string.replace(u'“', u'"') 72 strings = PunktSentenceTokenizer().tokenize(string) 73 fixed_string = [] 74 for string in strings: 75 tokens = PunktWordTokenizer().tokenize(string) 76 tokens[-1] = tokens[-1].replace(".", " .") 77 fixed_string.extend(tokens) 78 string = " ".join(fixed_string) 79 return string
80 81 82 # batch = [] 83 # preprocessed_batch = [] 84 # for parallelsentence in parallelsentences: 85 # batch.append((parallelsentence.serialize(), parallelsentence.get_attribute("langsrc"), parallelsentence.get_attribute("langtgt"))) 86 # 87 # for (row, langsrc, langtgt) in batch: 88 # preprocessed_row = [] 89 # col_id = 0 90 # for simplesentence in row: 91 # if (col_id == 0 and langsrc == self.lang) or (col_id > 0 and langtgt == self.lang): 92 # simplesentence = simplesentence.get_string() 93 # #simplesentence = self.__prepare_sentence_b64__(simplesentence) 94 # preprocessed_row.append(simplesentence) 95 # else: 96 # simplesentence = "" 97 # preprocessed_row.append(simplesentence) 98 # col_id += 1 99 # preprocessed_batch.append(preprocessed_row) 100 # 101 # socket.setdefaulttimeout(None) 102 # connected = False 103 # while not connected: 104 # #try: 105 # features_batch = self.server.BParser.parse_batch(preprocessed_batch) 106 # connected = True 107 # #except TimeoutException: TODO: find a better way to handle timeouts 108 # # sys.stderr.write("Connection to server %s failed, trying again after a few seconds...\n" % self.url) 109 # # time.sleep(5) 110 # 111 # row_id = 0 112 # 113 # 114 # new_parallelsentences = [] 115 # for row in features_batch: 116 # parallelsentence = parallelsentences[row_id] 117 # src = parallelsentence.get_source() 118 # targets = parallelsentence.get_translations() 119 # 120 # column_id = 0 121 # #dig in the batch to retrieve features 122 # for feature_set in row: 123 # for key in feature_set: 124 # if column_id == 0: 125 # src.add_attribute(key, feature_set[key]) 126 # else: 127 # targets[column_id - 1].add_attribute(key, feature_set[key]) 128 # 129 # 130 # column_id += 1 131 # 132 # parallelsentence.set_source(src) 133 # parallelsentence.set_translations(targets) 134 # new_parallelsentences.append(parallelsentence) 135 # row_id += 1 136 # 137 # return new_parallelsentences 138 139 140 141 # 142 # def parse(self, string): 143 # 144 # results = self.server.BParser.parse ( string ) 145 # loglikelihood = results['loglikelihood'] 146 # nbestList = results['nbest'] 147 # n = len(nbestList) 148 # 149 # 150 # 151 # best_confidence = -1e308; 152 # best_parse = "" 153 # sum_confidence = 0 154 # 155 # for entry in nbestList: 156 # confidence = entry["confidence"] 157 # parse = entry["tree"] 158 # if float(confidence) > best_confidence: 159 # best_confidence = float(confidence) 160 # best_parse = parse 161 # sum_confidence += float(confidence) 162 # 163 # avg_confidence = sum_confidence / n 164 # 165 # print "berkeley-n" + str(n) 166 # print "berkley-loglikelihood" + str(results['loglikelihood']) 167 # print "berkeley-best-parse-confidence" , best_confidence 168 # print "berkeley-avg-confidence" , avg_confidence 169 # print "berkeley-best-parse-tree" , best_parse 170 171
172 -class BerkeleySocketFeatureGenerator(BerkeleyFeatureGenerator):
173 """ 174 Class that handles the feature generation functions by calling Berkeley parser 175 through a socket connection. This class has the advantage that it gets controlled 176 fully by python code. So many parsers can be started and run in parallel, e.g. 177 for speeding up parsing process via parallelization. 178 This may be a problem when parser is too big and can only be loaded once for many 179 experiments. In that case use an XMLRPC server 180 """ 181 182 # def __init__(self, lang, grammarfile, socket_no, tokenize = False):
183 - def __init__(self, lang, grammarfile, gateway, tokenize = False):
184 #def __init__(self, lang, grammarfile, socket_no, tokenize = False): 185 self.lang = lang 186 self.tokenize = tokenize 187 # self.berkeleyparser = BerkeleyParserSocket(grammarfile, socket_no) 188 print "berkeleyclient.py: initializing BerkeleyParserSocket" 189 #self.berkeleyparser = BerkeleyParserSocket(grammarfile, gateway) 190 191 #try: 192 # self.gateway = gateway 193 # self.module_view = self.gateway.new_jvm_view() 194 # java_import(self.module_view, 'BParser') 195 196 # get the application instance 197 # self.berkeleyparser = self.module_view.BParser(grammarfile) 198 # #print self.berkeleyparser.parse("This is a test") 199 #except Py4JError: 200 self.berkeleyparser = BerkeleyParserSocket(grammarfile, gateway) 201 #sys.stderr.write("avoided BParser exception\n") 202 203 sys.stderr.write("got BParser object\n")
204 205
206 - def parse(self, string):
207 parse = self.berkeleyparser.parse(string) 208 return parse
209 210 # def __del__(self): 211 # try: 212 # self.berkeleyparser.__del__() 213 # except: 214 # pass 215 216 217 218
219 -class BerkeleyXMLRPCFeatureGenerator(BerkeleyFeatureGenerator):
220 - def __init__(self, url, lang="", tokenize = False):
221 ''' 222 Handles the connection with a Berkeley Server through XML-RPC 223 ''' 224 self.server = xmlrpclib.ServerProxy(url) 225 self.url = url 226 self.lang = lang 227 self.tokenize = tokenize
228
229 - def parse(self, string):
230 connected = False 231 failed = 0 232 while not connected: 233 try: 234 results = self.server.BParser.parse (string) 235 connected = True 236 237 except Exception as inst: 238 print type(inst), inst 239 time.sleep(0.5) 240 failed += 1 241 return results
242 243
244 - def add_features_batch(self, parallelsentences):
245 row_id = 0 246 247 if parallelsentences[0].get_attribute("langsrc") == self.lang: 248 batch = [[self.prepare_sentence(parallelsentence.get_source())] for parallelsentence in parallelsentences] 249 250 features_batch = self.xmlrpc_call(batch) #self.server.getNgramFeatures_batch(batch) 251 252 253 for row in features_batch: 254 parallelsentence = parallelsentences[row_id] 255 src = parallelsentence.get_source() 256 257 #dig in the batch to retrieve features 258 259 for feature_set in row: 260 for key in feature_set: 261 src.add_attribute(key, feature_set[key]) 262 #print "type" , feature_set[key], type(feature_set[key]) 263 264 parallelsentence.set_source(src) 265 #parallelsentence.set_translations(targets) 266 parallelsentences[row_id] = parallelsentence 267 row_id += 1 268 elif parallelsentences[0].get_attribute("langtgt") == self.lang: 269 batch = [[self.prepare_sentence(translation) for translation in parallelsentence.get_translations()] for parallelsentence in parallelsentences] 270 271 features_batch = self.xmlrpc_call_batch(batch) 272 273 for row in features_batch: 274 parallelsentence = parallelsentences[row_id] 275 targets = parallelsentence.get_translations() 276 277 column_id = 0 278 #dig in the batch to retrieve features 279 for feature_set in row: 280 for key in feature_set: 281 targets[column_id].add_attribute(key, feature_set[key]) 282 283 column_id += 1 284 285 #parallelsentence.set_source(src) 286 parallelsentence.set_translations(targets) 287 parallelsentences[row_id] = parallelsentence 288 row_id += 1 289 return parallelsentences
290
291 - def xmlrpc_call_batch(self, batch):
292 socket.setdefaulttimeout(None) 293 connected = False 294 features_batch = [] 295 while not connected: 296 try: 297 features_batch = self.server.BParser.parse_batch(batch) 298 connected = True 299 except xmlrpclib.Fault, err: 300 print "Fault code: %d" % err.faultCode 301 print "Fault string: %s" % err.faultString 302 print "\nconnection failed, sleeping for 5 sec" 303 time.sleep(5) 304 except socket.timeout: 305 print "time out, doing something..." 306 time.sleep(5) 307 except Exception, errorcode: 308 if errorcode[0] == 111: 309 print "error 10035, doing something..." 310 time.sleep(5) 311 #except TimeoutException: TODO: find a better way to handle timeouts 312 # sys.stderr.write("Connection to server %s failed, trying again after a few seconds...\n" % self.url) 313 # time.sleep(5) 314 return features_batch
315 #b = BerkeleyFeatureGenerator("http://percival.sb.dfki.de:8683", "fr") 316 #b.parse("C' est notre travail pour continuer à soutenir Lettonie avec l' intégration de la population russe.") 317