tvaLib
int.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 # tvaLib Copyright (c) 2012-2016 Paul G. St-Aubin
3 # Ecole Polytechnique de Montreal, McGill University
4 # Python 2.7; (dt) Spyder Windows 10 64-bit; ipython Ubuntu 15.04 64-bit
5 
9 import os, sys
10 import math as m
11 from multiprocessing import Pool as multiprocessing_Pool
12 from random import triangular as random_triangular
13 from inspect import getfile as inspect_getfile
14 from inspect import currentframe as inspect_currentframe
15 import cPickle as pickle
16 
17 if __name__ == '__main__':
18  print('tvaInteraction Library loaded directly.')
19  sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.abspath(inspect_getfile(inspect_currentframe())))))
20 import lib.tools as tvaLib
21 #Disable output
22 import include.config as tvaConfig; oldstdout = sys.stdout;sys.stdout = tvaConfig.NullWriter()
23 
24 try:
25  #Numpy
26  try: import numpy as np
27  except Exception: raise Exception, [0101, 'Numpy is not installed.']
28  #Traffic Intelligence
29  from events import Interaction as TrafIntEvents_Interaction
30  from prediction import SafetyPoint as TrafIntPrediction_SafetyPoint
31  from indicators import TemporalIndicator as TrafIntIndicators_TemporalIndicator
32 finally:
33  sys.stdout = oldstdout #Re-enable output
34 
35 
38 
39 
40 class Behaviour():
41  def __init__(self, maxSteering, maxAcceleration, minAcceleration=None):
42  self.maxSteering = maxSteering
43  self.maxAcceleration = maxAcceleration
44  if(not minAcceleration): self.minAcceleration = -maxAcceleration
45  else: self.minAcceleration = minAcceleration
46 
48  return random_triangular(self.minAcceleration, self.maxAcceleration, 0.)
50  return random_triangular(-self.maxSteering, self.maxSteering, 0.)
51 
53  ''' Stores static information about an interaction-instant. '''
54  def __init__(self, incidence_angle):
55  self.incidence_angle = incidence_angle
56 
57  def getIncidenceAngle(self): return self.incidence_angle
58  def getClassification(self, classMethods):
59  for classMethod in classMethods:
60  for angleRange in classMethod.incidenceAngleRanges:
61  if(self.incidence_angle >= angleRange[0] and self.incidence_angle <= angleRange[1]): return classMethod
62 
63 class SafetyPoint(TrafIntPrediction_SafetyPoint):
64  ''' Extended from Traffic Intelligence '''
65  def __init__(self, sampleSize1=None, sampleSize2=None, *args, **kwargs):
66  TrafIntPrediction_SafetyPoint.__init__(self, *args, **kwargs)
67  self.sampleSize1 = sampleSize1
68  self.sampleSize2 = sampleSize2
69 
70 class Interaction(TrafIntEvents_Interaction):
71  ''' This is an upgrade to TI's standard Interaction() class.
72  Synonym: userPair. '''
73 
74  def __init__(self, *args, **kwargs):
75  self.user1Hash = kwargs.pop('user1Hash')
76  self.user2Hash = kwargs.pop('user2Hash')
77  TrafIntEvents_Interaction.__init__(self, *args, **kwargs)
78 
79  def getInstantCount(self):
80  return self.getLastInstant()-self.getFirstInstant()+1
81 
82  def getInstantWIndicatorCount(self, indicatorName=None):
83  if(indicatorName==None): indicatorName=Interaction.indicatorNames[7]
84  if(self.getIndicator(indicatorName)): return len(self.getIndicator(indicatorName))
85  else: return 0
86 
88  ''' Rebuild interaction instants from list of collision points. '''
89  indicators = {}
90  for i, cp in self.collisionPoints.iteritems():
91  indicators[i] = SafetyPoint.computeExpectedIndicator(cp)
92  self.addIndicator(TrafIntIndicators_TemporalIndicator(Interaction.indicatorNames[7], indicators))
93 
96  for t in self.getTimeInterval():
97  t1 = t - self.roadUser1.getFirstInstant()
98  t2 = t - self.roadUser2.getFirstInstant()
99  try: incidence_angle = tvaLib.Geo.vectorsToAngleDegCC(self.roadUser1.velocities.getXCoordinates()[t1], self.roadUser1.velocities.getYCoordinates()[t1], self.roadUser2.velocities.getXCoordinates()[t2], self.roadUser2.velocities.getYCoordinates()[t2])
100  except IndexError: incidence_angle = None #Ignore potential issues if the reported length of an object differs than its stored length usually a minor bug
101  self.interactionDescriptors[t] = InteractionInstantDescriptor(incidence_angle)
102 
104  try: return self.interactionDescriptors
105  except AttributeError:
107  return self.interactionDescriptors
108 
109  def constrainToZone(self, zone):
110  ''' Drop any collision points outside of given zone. '''
111  if(len(zone) == 0): return 0
112  drop_list = []
113  if(self.collisionPoints):
114  for frame in self.collisionPoints:
115  for i in range(len(self.collisionPoints[frame])):
116  if(True not in [tvaLib.Geo.pip(self.collisionPoints[frame][i].x, self.collisionPoints[frame][i].y, z) for z in zone]):
117  self.collisionPoints[frame][i] = None
118  drop_list.append([frame,i])
119  self.collisionPoints[frame] = filter(None, self.collisionPoints[frame])
120  self.collisionPoints = dict((k, v) for k, v in self.collisionPoints.items() if v)
121 
122  if(drop_list):
123  self.indicators.pop(Interaction.indicatorNames[7],None)
124  self.genSafetyIndicators()
125 
126  return len(drop_list)
127 
128  def dropSafetyIndicatorsByValue(self, indicatorLowerBound=0, indicatorUpperBound=sys.maxint):
129  ''' Recursively delete indicators with values below indicatorLowerBound
130  and greater than indicatorUpperBound. '''
131  drop_list = []
132 
133  if(self.collisionPoints):
134  for frame in self.collisionPoints:
135  for i in range(len(self.collisionPoints[frame])):
136  if(self.collisionPoints[frame][i].indicator < indicatorLowerBound or self.collisionPoints[frame][i].indicator > indicatorUpperBound):
137  self.collisionPoints[frame][i] = None
138  drop_list.append([frame,i])
139  self.collisionPoints[frame] = filter(None, self.collisionPoints[frame])
140  self.collisionPoints = dict((k, v) for k, v in self.collisionPoints.items() if v)
141 
142  if(drop_list):
143  self.indicators.pop(Interaction.indicatorNames[7],None)
144  self.genSafetyIndicators()
145 
146  return len(drop_list)
147 
148  def getPointList(self, ptype='CP', atInstant=None, alignRestrictions=None, maxIndThreshold=sys.maxint, aggregateInstants=True, max_inst_prob=1.0, verbose=0):
149  ''' Input:
150  ======
151  ptype -> The type of point: CP|CZ
152  alignRestrictions -> [[laneIx,Smin,Smax],...] A series of sections
153  identified by align index and bounded between
154  curvilinear distances Smin and Smax. Only
155  indicators originating from within these
156  sections will be returned.
157  maxIndThreshold -> Indicator values above this will be ignored.
158  aggregateInstants -> For interaction-instants with more than one
159  indicator if the same type, return a weighted
160  average according to probability. Defaults to
161  True.
162 
163  Return format:
164  ==============
165  0 1 2 3 4 5 6 7 8
166  [[value1,px1,py1,prob1,time1,num11,num12,descriptor,exposure_list1],[value2,px2,...],...]
167  '''
168 
169 
170  userNum1 = list(self.roadUserNumbers)[0]
171  try: userNum2 = list(self.roadUserNumbers)[1]
172  except:
173  userNum2 = None
174  if(verbose >= 2): tvaLib.printWarning('Interaction has unexpected userNum listing (only one found)', 'Warning')
175 
176 
177  if(ptype=='CZ'): pointInstants = self.crossingZones
178  else: pointInstants = self.collisionPoints
179  if(atInstant is not None):
180  if(int(atInstant) in pointInstants): pointInstants = {int(atInstant): pointInstants[int(atInstant)]}
181  else: return []
182 
183 
184  point_list = []
185  if(pointInstants):
186  for pointInstantKey in pointInstants:
187  # Verify optional restrictions along alignment sections
188  if(alignRestrictions):
189  relative_time1 = pointInstantKey-self.roadUser1.getFirstInstant()
190  relative_time2 = pointInstantKey-self.roadUser2.getFirstInstant()
191  obj1MeetsRestriction = False
192  obj2MeetsRestriction = False
193  for alignRestriction in alignRestrictions:
194  if(self.roadUser1.curvilinearPositions.getLanes()[relative_time1] == alignRestriction[0] and self.roadUser1.curvilinearPositions.getXCoordinates()[relative_time1] >= alignRestriction[1] and self.roadUser1.curvilinearPositions.getXCoordinates()[relative_time1] <= alignRestriction[2]): obj1MeetsRestriction = True
195  if(self.roadUser2.curvilinearPositions.getLanes()[relative_time2] == alignRestriction[0] and self.roadUser2.curvilinearPositions.getXCoordinates()[relative_time2] >= alignRestriction[1] and self.roadUser2.curvilinearPositions.getXCoordinates()[relative_time2] <= alignRestriction[2]): obj2MeetsRestriction = True
196  if(not obj1MeetsRestriction or not obj2MeetsRestriction): continue
197 
198 
199 
200  instantPoints = [[point.indicator, point.x, point.y, point.probability, pointInstantKey, userNum1, userNum2] for point in pointInstants[pointInstantKey] if point.indicator < maxIndThreshold]
201 
202 
203  try: descriptor = self.getInteractionDescriptors()[pointInstantKey]
204  except: descriptor = None
205 
206  try: exposure = self.exposure
207  except: exposure = None
208 
209 
211  if(aggregateInstants and instantPoints): point_list.append([sum([x[0]*x[3] for x in instantPoints])/sum([x[3] for x in instantPoints]), instantPoints[0][1], instantPoints[0][2], min(sum([x[3] for x in instantPoints]),max_inst_prob), instantPoints[0][4], instantPoints[0][5], instantPoints[0][6], descriptor, exposure])
212  else: point_list += instantPoints
213  return point_list
214 
215  def getAggregatedPointList(self, method=0, percentile=0.15, minimumProbability=0.0, format='points', **kwargs):
216  points = self.getPointList(**kwargs)
217  if(not points): return None
218 
219  if(method==1):
220  pair_indicator = tvaLib.Math.getPercentileKeyFromList(points, percentile, sorting=True, sortingColumnIx=0)
221  if(pair_indicator[3]> minimumProbability): return pair_indicator
222  elif(method==3):
223  pair_indicator = tvaLib.Math.getPercentileKeyFromList(points, percentile, sorting=True, sortingColumnIx=3)
224  if(pair_indicator[3]> minimumProbability): return pair_indicator
225  else:
226  for pair in points:
227  if(pair[3] > minimumProbability):
228  return pair
229 
230  def getAggregatedInterAngle(self, percentile=0.5, minimumProbability=0.0):
231  ''' Pull list of points, sort interaction angle, and return percentile value '''
232  angles = [self.getInteractionDescriptors()[descriptor].getIncidenceAngle() for descriptor in self.getInteractionDescriptors()]
233  return tvaLib.Math.getPercentileKeyFromList(angles, percentile, sorting=True)
234 
235 
236 
237 
238 
241 
242 def computeCrossingsCollisionsWorker(userPairs, objOffset, predictionMethod, triage=False, collisionDistanceThreshold=1.8, timeHorizon=100, verbose=0):
243  ''' This is a worker function outside of the UserPairs object used to
244  parallelize the computeCrossingsCollisions operations.
245 
246  Notes:
247  ======
248  CPList from a computeCrossingsCollisions should have the format:
249  [x,y,prob,TTC]
250  '''
251 
252  result = []
253  skipped_rates = []
254 
255 
256  if(type(timeHorizon) == int or type(timeHorizon) == float): timeHorizon_ = timeHorizon
257  else: timeHorizon_ = timeHorizon.value
258  if(type(verbose) == int): verbose_ = verbose
259  else: verbose_ = verbose.value
260  if(type(collisionDistanceThreshold) == float): collisionDistanceThreshold_ = collisionDistanceThreshold
261  else: collisionDistanceThreshold_ = collisionDistanceThreshold.value
262 
263 
264  if(predictionMethod.protocol == 'ccc'):
265  behaviour = Behaviour(predictionMethod.params.getMaxSteering(), predictionMethod.params.getMaxAcceleration(), predictionMethod.params.getMinAcceleration())
266  predictionMethod.setAccelerationDistribution(behaviour.accelerationDistribution)
267  predictionMethod.setSteeringDistribution(behaviour.steeringDistribution)
268 
269  prediction = predictionMethod.getPredictionClass()
270  for userPair in range(len(userPairs)):
271  CPList, CZList = prediction.computeCrossingsCollisions(userPairs[userPair].roadUser1, userPairs[userPair].roadUser2, collisionDistanceThreshold=collisionDistanceThreshold_, timeHorizon=timeHorizon_, computeCZ = False)
272  # Fix bug with probability reporting of Traffic-Intelligence
273  # TODO: submit bug report for this in TI
274  if(CPList):
275  for cpt in CPList:
276  for y in range(len(CPList[cpt])):
277  CPList[cpt][y].probability = CPList[cpt][y].probability/float(len(CPList[cpt]))
278  result.append({})
279  result[-1]['ix'] = objOffset+userPair
280  result[-1]['CP'] = CPList
281  result[-1]['CZ'] = CZList
282 
283 
284 
285  else:
286  prediction = predictionMethod.getPredictionClass()
287  for userPair in range(len(userPairs)):
288  result.append({})
289  result[-1]['ix'] = objOffset+userPair
290  result[-1]['CP'] = prediction.computeCrossingsCollisions(obj1=userPairs[userPair].roadUser1, obj2=userPairs[userPair].roadUser2, interval=userPairs[userPair].getTimeInterval(), triage=triage, timeHorizon=timeHorizon_, verbose=verbose_)
291  skipped_rates.append(1-len(result[-1]['CP'])/userPairs[userPair].getTimeInterval().length())
292  if(verbose >= 5): print(' Interaction instance skip rate: {0:.1f}%'.format(skipped_rates[-1]*100))
293 
294  return [result, skipped_rates]
295 
296 
299 
300 class UserPairs():
301  def __init__(self, objects, methodName='default', **kwargs):
302  ''' Create all pairs of two co-existing road users
303  TODO: add test to compute categories?
304  '''
305 
307  self.intWTTCmap = []
312  self.methodName = methodName
313  self.detectedSize = 0
314  self.usedSize = 0
315 
316 
317  self.genPairs(objects, **kwargs)
318 
319  def __len__(self): return len(self.data)
320  def __getitem__(self, i): return self.data[i]
321  def __setitem__(self, i, value): self.data[i] = value; return True
322  def __iter__(self): return iter(self.data)
323  def __str__(self): return 'List of interactions (currently storing'+len(self.data)+')'
324  def __add__(self, userPairs):
325  self.data += userPairs.data
326  self.interactionCount += userPairs.interactionCount
327  self.intWTTCmap += userPairs.intWTTCmap
328  self.skipped_rate_none += userPairs.skipped_rate_none
329  self.skipped_rate_part += userPairs.skipped_rate_part
330  self.skipped_rate_full += userPairs.skipped_rate_full
331  self.skipped_rate_avg += userPairs.skipped_rate_avg
332  self.methodName = userPairs.methodName
333  try:
334  self.detectedSize += userPairs.detectedSize
335  self.usedSize += userPairs.usedSize
336  except: pass
337 
338  def genPairs(self, objects, maxUserPairs=4000, max_distance=50.0, searchRange=50, verbose=0):
339  ''' Generate user pairs. '''
340  self.data = []
341  num = 0
342  distanceSquared = max_distance**2
343  for ix in range(len(objects)):
344  for jx in range(len(objects))[(ix+1):min(ix+searchRange,len(objects))]:
345  commonTimeInterval = objects[ix].commonTimeInterval(objects[jx])
346  if(not commonTimeInterval.empty()):
347 
348  for t in commonTimeInterval:
349  t1 = t-objects[ix].getFirstInstant()
350  t2 = t-objects[jx].getFirstInstant()
351  try:
352  if(tvaLib.Geo.ppdSearchSquared(objects[ix].getXCoordinates()[t1],objects[ix].getYCoordinates()[t1], objects[jx].getXCoordinates()[t2],objects[jx].getYCoordinates()[t2], distanceSquared)):
353 
354  self.data.append(Interaction(num, commonTimeInterval, objects[ix].num, objects[jx].num, objects[ix], objects[jx], user1Hash=objects[ix].hash, user2Hash=objects[jx].hash))
355  num += 1
356  break
357  except IndexError: pass
358  if(verbose >= 2): print(str(len(self.data))+' candidate user pairs detected')
359  self.detectedSize = len(self.data)
360 
361  if(len(self.data) > maxUserPairs):
362  if(verbose >= 2): print('Sample is too large. Pruning list of user pairs down systematically to '+str(maxUserPairs)+' user pairs.')
363  self.data = self.data[::int(m.ceil(len(self.data)/float(maxUserPairs)))]+self.data[1::int(m.ceil(len(self.data)/float(maxUserPairs)))][:int(maxUserPairs-len(self.data)/int(m.ceil(len(self.data)/float(maxUserPairs))))]
364  self.data.sort(key=lambda x: x.num, reverse=False)
365  self.usedSize = len(self.data)
366  return True
367 
368  def genExposure(self, objFrameTable, label='300 frames', frames=300):
369  for i in range(len(self.data)):
370  try: self.data[i].exposure
371  except AttributeError: self.data[i].exposure = {}
372  self.data[i].exposure[label] = objFrameTable.getExposureAtInstant(self.data[i].getFirstInstant(), frames=frames)
373  return
374 
375  def importPairs(self, paths, version=None, filename=''):
376  ''' Attempt to import user pairs (*.pva) from any of paths above. '''
377  if(not filename): filename = 'userPairs('+self.methodName+').pva'
378  if(type(paths) != list): paths = [paths]
379  for path in paths:
380  if(os.path.exists(path) and os.path.isfile(os.path.join(path, filename))):
381  with open(os.path.join(path, filename), 'rb') as input_data:
382  originalVersion = pickle.dump(input_data)
383  if(version and originalVersion != version): return False
384  tmp_dict = pickle.load(input_data)
385  self.__dict__.update(tmp_dict)
386  return True
387  return False
388 
389  def exportPairs(self, path, version=None, filename=''):
390  ''' Export the contents of this instance. '''
391  if(not filename): filename = 'userPairs('+self.methodName+').pva'
392  with open(os.path.join(path, filename), 'wb') as output:
393  pickle.dump(version, output, protocol=2)
394  pickle.dump(self.__dict__, output, protocol=2)
395  return True
396 
397  def getInstantCount(self):
398  ''' Return count of instants. '''
399  return sum([userPair.getInstantCount() for userPair in self])
400 
402  ''' Return count of instants with indicators. '''
403  return sum([userPair.getInstantWIndicatorCount() for userPair in self])
404 
405  def getIntWTTCCount(self):
406  ''' Return count of pairs with indicators. '''
407  return len(self.intWTTCmap)
408 
409  def calculateIndicators(self, predictionMethod, triage=False, threads=1, collisionDistanceThreshold=1.5, timeHorizon=100, chunkSize=100, shortcircuit=True, verbose=0):
410  ''' Process indicator calculation with support for multithreading. This function splits the userPairs load into chunks and multithreads the calculation.
411 
412  Use this function's shortcircuit flag (usually called as a manual override) to disable multithreading entirely if it is causing problems or if the callback needs to be debugged.
413  '''
414 
415  tvaLib.prepareInstanceMethodForPickling()
416 
417 
418  self.chunks = int(m.ceil(len(self)/float(chunkSize)))
419  self.chunk = 0
420  self.data_ = []
421  self.verbose = verbose
422  if(self.verbose): self.prog = tvaLib.ProgressBar(0, self.chunks+1, 77)
423  if(self.verbose == 1): self.prog.updateAmount(self.chunk)
424  #verbose = multiprocessing.Value('i', verbose)
425  #timeHorizon = multiprocessing.Value('i', timeHorizon)
426  #collisionDistanceThreshold = multiprocessing.Value('d', collisionDistanceThreshold)
427  #import sys; import dev.memoryUsage as memUs; print('Memory usage in bytes; referential: {0}, real: {1}'.format(sys.getsizeof(predictionMethod['prediction'].data),memUs.asizeof(predictionMethod['prediction'].data)))
428 
429  while self.chunk < self.chunks:
430  # Manual overide to shortcircuit multithreadeading if it is causing problems
431  if(shortcircuit or threads==1):
432  if(self.chunk == self.chunks-1): terminatorIx = len(self)
433  else: terminatorIx = int((self.chunk+1)*chunkSize)
434  packagedData = computeCrossingsCollisionsWorker(self[int(self.chunk*chunkSize):terminatorIx], self.chunk*chunkSize, predictionMethod, triage, collisionDistanceThreshold, timeHorizon, verbose=verbose)
436  self.chunk += 1
437  # Multithreading
438  else:
439  pool = multiprocessing_Pool(threads)
440  for thread in range(threads):
441  if(self.chunk >= self.chunks): continue
442  elif(self.chunk == self.chunks-1): terminatorIx = len(self)
443  else: terminatorIx = int((self.chunk+1)*chunkSize)
444  pool.apply_async(computeCrossingsCollisionsWorker, args = (self[int(self.chunk*chunkSize):terminatorIx], self.chunk*chunkSize, predictionMethod, triage, collisionDistanceThreshold, timeHorizon, verbose), callback = self.computeCrossingsCollisionsWorkerCallback)
445  self.chunk += 1
446  pool.close()
447  pool.join()
448 
449 
450  if(verbose >= 2): print(' Distributing indicator calculations in memory...')
452  return True
453 
455  ''' Perform end of chunk tasks: data sotrage, progress updates, and
456  compilation of statistics.
457 
458  Store indicator caluclations temporarily to self.data_ to be
459  handled by distributeIndicatorWorkerResults() syncronously when all
460  chunks have been computed.
461 
462  packagedData contains both results and skipped_rates from
463  computeCrossingsCollisionsWorker() (It's a list) '''
464 
465  self.data_ += packagedData[0]
466 
467  if(len(packagedData[1])):
468  oldTotal = sum([self.skipped_rate_none, self.skipped_rate_part, self.skipped_rate_full])
469  self.skipped_rate_avg = (self.skipped_rate_avg*oldTotal+sum(packagedData[1]))/(oldTotal+len(packagedData[1]))
470  for sk in packagedData[1]:
471  if(sk <= 0): self.skipped_rate_none += 1
472  elif(sk >= 0): self.skipped_rate_part += 1
473  else: self.skipped_rate_full += 1
474 
475  if(self.verbose >= 1):
476  if(self.verbose == 1): self.prog.updateAmount(self.chunk)
477  elif(self.verbose >= 2): print(' Chunk '+str(self.chunk)+' of '+str(self.chunks)+' chunks completed.')
478  return True
479 
481  ''' This function is called at the end of calculateIndicators() to
482  distribute indicator results stored temporarily by
483  computeCrossingsCollisionsWorkerCallback(). It is necessary to
484  perform this step syncrounously.
485 
486  self.data_ is a list of unmerged results, one result per user pair.
487  self.data_[i]['ix'] is the corresponding user pair (self.data) ix
488  self.data_[i]['CP'] contains the calculated collision points
489  self.data_[i]['CZ'] contains the calculated crossing zones
490  '''
491  if(not self.data_): return False
492 
493  for dIx in range(len(self.data_)):
494  if('CP' in self.data_[dIx]):
495  self[self.data_[dIx]['ix']].collisionPoints = self.data_[dIx]['CP']
496  self.intWTTCmap.append(self.data_[dIx]['ix'])
497  if('CZ' in self.data_[dIx]):
498  self[self.data_[dIx]['ix']].crossingZones = self.data_[dIx]['CZ']
499  self[self.data_[dIx]['ix']].genSafetyIndicators()
500  self[self.data_[dIx]['ix']].genInteractionDescriptors()
501  self.data_ = []
502  return True
503 
504  def getPointList(self, method=0, percentile=0.15, minimumProbability=0.0, format='points', userType1=None, userType2=None, originLane1=None, originLane2=None, destLane1=None, destLane2=None, **kwargs):
505  ''' Returns safetyPoints as a formated list.
506 
507  Input:
508  ======
509  method=0 Return all indicators
510  method=1 Return one indicator per pair according to percentile of indicator
511  percentile=0 minimum value
512  percentile=1 maximum value
513  method=3 Return one indicator per pair according to percentile of prob
514  minimumProbability -> indicators with probability lowaer than this
515  will be ignored
516 
517  Filters:
518  =============
519  userType1, userType2 -> Filter based on road user types
520  originLane1, originLane2 -> Filter based on road user origin lane
521  destLane1, destLane2 -> Filter based on road user destination lane
522 
523 
524  kwargs Input:
525  =============
526  ptype -> The type of point: CP|CZ
527  alignRestrictions -> [[laneIx,Smin,Smax],...] A series of sections
528  identified by align index and bounded between
529  curvilinear distances Smin and Smax. Only
530  indicators originating from within these
531  sections will be returned.
532  This mode requires declaration of objects
533  maxIndThreshold -> Indicator values above this will be ignored.
534  aggregateInstants -> For interaction-instants with more than one
535  indicator if the same type, return a weighted
536  average according to probability. Defaults to
537  True.
538 
539 
540  Supported return formats:
541  =========================
542  0 1 2 3 4 5 6 7 8
543  format='points' -> [[value1,px1,py1,prob1,time1,num11,num12,descriptor,exposure_list1],[value2,px2,...],...]
544  format='columns' -> [[value1,value2,...],[px1,px2,...],...]
545  '''
546 
547 
548  return_list = []
549  for userPair in self:
550  try: ut1 = userPair.roadUser1.getUserType()
551  except: ut1 = 0
552  try: ut2 = userPair.roadUser2.getUserType()
553  except: ut2 = 0
554 
556  if(userType1):
557  if(userType2):
558  if(userType1 != ut1 or userType2 != ut2):
559  if(userType1 != ut2 or userType2 != ut1):
560  continue
561  else:
562  if(userType1 != ut1):
563  if(userType1 != ut2):
564  continue
565  elif(userType2):
566  if(userType2 != ut1):
567  if(userType2 != ut2):
568  continue
569  # ...by origin
570  if(originLane1):
571  if(originLane2):
572  if(originLane1 != userPair.roadUser1.getCurvilinearPositions()[0][2] or originLane2 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
573  if(originLane1 != userPair.roadUser2.getCurvilinearPositions()[0][2] or originLane2 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
574  continue
575  else:
576  if(originLane1 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
577  if(originLane1 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
578  continue
579  elif(originLane2):
580  if(originLane2 != userPair.roadUser1.getCurvilinearPositions()[0][2]):
581  if(originLane2 != userPair.roadUser2.getCurvilinearPositions()[0][2]):
582  continue
583  # ...by destination
584  if(destLane1):
585  if(destLane2):
586  if(destLane1 != userPair.roadUser1.getCurvilinearPositions()[-1][2] or destLane2 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
587  if(destLane1 != userPair.roadUser2.getCurvilinearPositions()[-1][2] or destLane2 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
588  continue
589  else:
590  if(destLane1 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
591  if(destLane1 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
592  continue
593  elif(destLane2):
594  if(destLane2 != userPair.roadUser1.getCurvilinearPositions()[-1][2]):
595  if(destLane2 != userPair.roadUser2.getCurvilinearPositions()[-1][2]):
596  continue
597 
598 
599  pair_point_list = userPair.getPointList(**kwargs)
600  if(pair_point_list):
601 
602  if(method==1):
603  pair_indicator = tvaLib.Math.getPercentileKeyFromList(pair_point_list, percentile, sorting=True, sortingColumnIx=0)
604  if(pair_indicator[3]> minimumProbability): return_list.append(pair_indicator)
605  elif(method==3):
606  pair_indicator = tvaLib.Math.getPercentileKeyFromList(pair_point_list, percentile, sorting=True, sortingColumnIx=3)
607  if(pair_indicator[3]> minimumProbability): return_list.append(pair_indicator)
608  else:
609  for pair in pair_point_list:
610  if(pair[3] > minimumProbability):
611  return_list += [pair]
612 
613 
614  if(format == 'columns'): return tvaLib.pointList2ColumnList(return_list)
615  else: return return_list
616 
617  def genIndicatorDistribution(self, indicator_list=None, dist_type='pdf', bins=range(0,100,1), normalise=False, **kwargs):
618  ''' Generate indicator distribution.
619 
620  Options:
621  ========
622  dist_type='pdf' #Probability distribution function (default)
623  dist_type='cdf' #Cumulative distribution function
624  dist_type='freq' #Frequency (plain histogram)
625 
626  Output:
627  =======
628  Returns two lists: a list of bin coordinates (x-axis) and a list of
629  the corresponding pdf/cdf/freq value (y-axis) for the corresponding
630  bin.
631  [[x_bin_1,x_bin_2,...],[y_bin_value_1,y_bin_value_2,...]]
632  '''
633 
634  if(not indicator_list): indicator_list = self.getPointList(**kwargs)
635  if(not indicator_list): return False
636 
637 
638  indicator_value_list = []
639  for i in indicator_list:
640  if(normalise): indicator_value_list.append(i[0]*i[3])
641  else: indicator_value_list.append(i[0])
642  histo = np.histogram(indicator_value_list, bins=bins)
643  histo = [histo[1].tolist(),histo[0].tolist()]
644 
645  try:
646 
647  if(dist_type=='freq'): return histo
648  else:
649  histo[1] = [x/float(np.sum(histo[1])) for x in histo[1]]
650  if(dist_type=='cdf'):
651  for histoIx in range(1, len(histo[1])):
652  histo[1][histoIx] += histo[1][histoIx-1]
653  return histo
654  except ZeroDivisionError: return [[],[]]
655 
656  def dropSafetyIndicatorsByValue(self, **kwargs):
657  ''' Recursively delete indicators of any user pairs with values below
658  indicatorLowerBound and greater than indicatorUpperBound. '''
659  drop_length = 0
660  for userPair in self:
661  drop_length += userPair.dropSafetyIndicatorsByValue(**kwargs)
662  return drop_length
663 
664  def constrainToZone(self, zone):
665  ''' Drop any collision points of any user pairs outside of given zone. '''
666  drop_length = 0
667  for userPair in self:
668  drop_length += userPair.constrainToZone(zone)
669  return drop_length
670 
671 
673  ''' Use repopulateObjectData() to rebuild object data, i.e. after
674  pickling. '''
675  for i in range(len(self)):
676  #try: self[i].sequenceTrace = self[i].roadUser1.file
677  #except: self[i].sequenceTrace = None
678  self[i].roadUser1 = None
679  self[i].roadUser2 = None
680  return True
681 
682  def repopulateObjectData(self, objects, graceful=True):
683  ''' Restore each interaction's embedded object data if object's hashes
684  match any existing stored hashes. Alternatively, attempt with road
685  user number (as reported by Traffic Intelligence).
686 
687  Output:
688  =======
689  Returns the percentage of successful repopulations.
690  '''
691  hashMap = [obj.hash for obj in objects]
692 
693  original_data_length = len(self)
694  failed_repopulations = 0
695 
696  for i in range(len(self)):
697  try:
698  if(hasattr(self[i],'user1Hash') and hasattr(self[i],'user2Hash')):
699  self[i].roadUser1 = objects[hashMap.index(self[i].user1Hash)]
700  self[i].roadUser2 = objects[hashMap.index(self[i].user2Hash)]
701  else:
702  self[i].roadUser1 = objects[tvaLib.Obj.num2ind(objects, list(self[i].roadUserNumbers)[0])]
703  self[i].roadUser2 = objects[tvaLib.Obj.num2ind(objects, list(self[i].roadUserNumbers)[1])]
704  except Exception:
705  if(graceful):
706  self.data[i] = None
707  failed_repopulations += 1
708  continue
709  else: return 0.0
710 
711  if(failed_repopulations > 0): self.data = filter(None, self.data)
712 
713  return 1 - failed_repopulations/float(original_data_length)
714 
715 
716 
717  def merge(self, source):
718  ''' Merge this set of interactions with another set. '''
719  self.intWTTCmap = self.intWTTCmap + [x+len(self.data) for x in source.data]
720  self.data += source.data
721  return True
722 
723 
724 
727 
729  def genPairs(self, objects, searchRange=50):
730  ''' Extend UserPairs to generate interactions with sorties de route (run off the road). '''
731  self.data = []
732  num = 0
733  for ix in range(len(objects)):
734  self.data.append(Interaction(num, commonTimeInterval, objects[ix].num, 0, objects[ix], 0))
735  num += 1
736  return True
verbose
Progress update.
Definition: int.py:421
interactionDescriptors
Definition: int.py:95
chunk
Process.
Definition: int.py:419
def garbageCollectObjectData(self)
Definition: int.py:672
def __add__(self, userPairs)
Definition: int.py:324
data_
For each unmerged result, merge data.
Definition: int.py:420
data
Prune list down.
Definition: int.py:340
def calculateIndicators(self, predictionMethod, triage=False, threads=1, collisionDistanceThreshold=1.5, timeHorizon=100, chunkSize=100, shortcircuit=True, verbose=0)
Definition: int.py:409
def __getitem__(self, i)
Definition: int.py:320
def getPointList(self, ptype='CP', atInstant=None, alignRestrictions=None, maxIndThreshold=sys.maxint, aggregateInstants=True, max_inst_prob=1.0, verbose=0)
Definition: int.py:148
def steeringDistribution(self)
Definition: int.py:49
def computeCrossingsCollisionsWorker(userPairs, objOffset, predictionMethod, triage=False, collisionDistanceThreshold=1.8, timeHorizon=100, verbose=0)
Definition: int.py:242
def __setitem__(self, i, value)
Definition: int.py:321
def __init__(self, args, kwargs)
Definition: int.py:74
skipped_rate_avg
Store data.
Definition: int.py:311
def dropSafetyIndicatorsByValue(self, kwargs)
Definition: int.py:656
def constrainToZone(self, zone)
Definition: int.py:109
detectedSize
Determine nearest distance.
Definition: int.py:313
def merge(self, source)
Definition: int.py:717
def getInstantCount(self)
Definition: int.py:79
def getInstantWIndicatorCount(self, indicatorName=None)
Definition: int.py:82
def distributeIndicatorWorkerResults(self)
Definition: int.py:480
def genPairs(self, objects, maxUserPairs=4000, max_distance=50.0, searchRange=50, verbose=0)
Definition: int.py:338
def getInteractionDescriptors(self)
Definition: int.py:103
def __init__(self, sampleSize1=None, sampleSize2=None, args, kwargs)
Definition: int.py:65
def repopulateObjectData(self, objects, graceful=True)
Definition: int.py:682
interactionCount
Static input.
Definition: int.py:306
def genIndicatorDistribution(self, indicator_list=None, dist_type='pdf', bins=range(0, 100, 1), normalise=False, kwargs)
Definition: int.py:617
def genInteractionDescriptors(self)
Definition: int.py:94
def importPairs(self, paths, version=None, filename='')
Definition: int.py:375
def genPairs(self, objects, searchRange=50)
Definition: int.py:729
def __init__(self, objects, methodName='default', kwargs)
Definition: int.py:301
def constrainToZone(self, zone)
Definition: int.py:664
def getPointList(self, method=0, percentile=0.15, minimumProbability=0.0, format='points', userType1=None, userType2=None, originLane1=None, originLane2=None, destLane1=None, destLane2=None, kwargs)
Definition: int.py:504
def getAggregatedPointList(self, method=0, percentile=0.15, minimumProbability=0.0, format='points', kwargs)
Definition: int.py:215
def genSafetyIndicators(self)
Definition: int.py:87
def getAggregatedInterAngle(self, percentile=0.5, minimumProbability=0.0)
Definition: int.py:230
def __str__(self)
Definition: int.py:323
def exportPairs(self, path, version=None, filename='')
Definition: int.py:389
def __init__(self, incidence_angle)
Definition: int.py:54
def getIntWTTCCount(self)
Definition: int.py:405
def __init__(self, maxSteering, maxAcceleration, minAcceleration=None)
Definition: int.py:41
def dropSafetyIndicatorsByValue(self, indicatorLowerBound=0, indicatorUpperBound=sys.maxint)
Definition: int.py:128
def computeCrossingsCollisionsWorkerCallback(self, packagedData)
Definition: int.py:454
chunks
Enable and ensure pickling support of instance methods for multithreading.
Definition: int.py:418
def __len__(self)
Definition: int.py:319
def genExposure(self, objFrameTable, label='300 frames', frames=300)
Definition: int.py:368
def getInstantWIndicatorCount(self)
Definition: int.py:401
def __iter__(self)
Definition: int.py:322
def accelerationDistribution(self)
Definition: int.py:47
def getClassification(self, classMethods)
Definition: int.py:58
def getInstantCount(self)
Definition: int.py:397