camera-region-capture.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242
  1. #!/usr/bin/python3
  2. # should be used in docker which has jetson package installed.
  3. #
  4. #
  5. #
  6. #
  7. import cv2
  8. import jetson.inference
  9. import jetson.utils
  10. import argparse
  11. import sys
  12. import http.client, urllib.parse
  13. import json
  14. import numpy as np
  15. from tkinter import *
  16. from tkinter import simpledialog
  17. import threading,queue
  18. # parse the command line
  19. parser = argparse.ArgumentParser(description="Draw rectangle areas and set names for them, and use DNN to recognize and send result to remote peer",
  20. formatter_class=argparse.RawTextHelpFormatter, epilog=jetson.inference.detectNet.Usage() +
  21. jetson.utils.videoSource.Usage() + jetson.utils.videoOutput.Usage() + jetson.utils.logUsage())
  22. parser.add_argument("input_URI", type=str, default="/dev/video0", nargs='?', help="URI of the input stream")
  23. parser.add_argument("output_URI", type=str, default="", nargs='?', help="URI of the output stream")
  24. parser.add_argument("--network", type=str, default="ssd-mobilenet-v2", help="pre-trained model to load (see below for options)")
  25. parser.add_argument("--overlay", type=str, default="box,labels,conf", help="detection overlay flags (e.g. --overlay=box,labels,conf)\nvalid combinations are: 'box', 'labels', 'conf', 'none'")
  26. parser.add_argument("--threshold", type=float, default=0.65, help="minimum detection threshold to use")
  27. parser.add_argument("--fcc_Ip", type=str, default="192.168.1.100:8384", help="fusion ip address and port No..")
  28. is_headless = ["--headless"] if sys.argv[0].find('console.py') != -1 else [""]
  29. try:
  30. opt = parser.parse_known_args()[0]
  31. except:
  32. parser.print_help()
  33. sys.exit(0)
  34. is_process_exiting = False
  35. def TimelyReportToRemoteApp():
  36. if is_process_exiting:
  37. return
  38. timer_ReportToRemoteApp = threading.Timer(0.5, TimelyReportToRemoteApp)
  39. batch = []
  40. #print(' 1')
  41. while True:
  42. try:
  43. nxtData = queuePendingToSendToRemote.get(block=False)
  44. #print(' stage 2: batch length: '+str(len(batch)))
  45. batch.append(nxtData)
  46. if len(batch)>=30:
  47. #don't want batch size too large
  48. break
  49. #print('report detection to remote app...')
  50. except queue.Empty:
  51. #print(' get exception with Empty')
  52. if len(batch)==0:
  53. #print(' empty batch in TimelyReportToRemoteApp')
  54. timer_ReportToRemoteApp.start()
  55. return
  56. else:
  57. break
  58. try:
  59. if len(reportServicePath)>1:
  60. #print(' doing http posting to TimelyReportToRemoteApp')
  61. conn = http.client.HTTPConnection(opt.fcc_Ip,timeout=15)
  62. headers = {'Content-type': 'application/json'}
  63. #detectionData = {"AreaName":rec_name,"ClassID":det.ClassID,"ClassName":net.GetClassDesc(det.ClassID),"Confidence":det.Confidence}
  64. json_data = json.dumps([batch])
  65. print('posting detections with length '+str(len(batch))+' to remoteApp')
  66. conn.request('POST', reportServicePath, json_data, headers)
  67. response = conn.getresponse()
  68. #print('accessing service: ShowMeApi with responseCode: ' + str(response.status), response.reason)
  69. #if str(response.status) != '200':
  70. print('Accessed Report service with responseCode: ' + str(response.status), response.reason)
  71. #sys.exit(0)
  72. #else:
  73. #print('Succeed send one batch detections(count: '+len(batch)+') to remote App.')
  74. except Exception as e:
  75. print('Failed for fatal error when accessing remote App.')
  76. print(e)
  77. timer_ReportToRemoteApp.start()
  78. timer_ReportToRemoteApp = threading.Timer(3.0, TimelyReportToRemoteApp)
  79. queuePendingToSendToRemote = queue.Queue()
  80. reportServicePath = ""
  81. if len(opt.fcc_Ip)>=7:
  82. print('connecting to FCC at '+opt.fcc_Ip+' (for disable then set param --fcc_Ip to empty)')
  83. # try read persist Areas from remote service
  84. conn = http.client.HTTPConnection(opt.fcc_Ip,timeout=15)
  85. headers = {'Content-type': 'application/json'}
  86. serviceTag = ["webapi",["Camera"]]
  87. json_data = json.dumps(serviceTag)
  88. conn.request('POST', '/u/?apitype=service&an=ShowMeApi&pn=ProcessorsDispatcher&en=Edge.Core.Processor.Dispatcher.DefaultDispatcher',
  89. json_data, headers)
  90. response = conn.getresponse()
  91. print('accessing service: ShowMeApi with responseCode: ' + str(response.status), response.reason)
  92. if response.status != 200:
  93. print("Failed for accessing ShowMeApi service, fatal error.")
  94. sys.exit(0)
  95. # Convert bytes to string type and string type to dict
  96. showmeapidata_raw = response.read().decode('utf-8')
  97. showmeapidata = json.loads(showmeapidata_raw)
  98. for api in showmeapidata:
  99. if api['ProviderType']== 'AreaIntrudeDetecterServer.App' and api['ApiName']=='Report':
  100. print('Found AreaIntrudeDetecterServerApp and its Report Api at: '+api['Path'])
  101. reportServicePath = api['Path']
  102. timer_ReportToRemoteApp.start()
  103. else:
  104. print('Skipped conn to: AreaIntrudeDetecterServer.App')
  105. #rectA.x0,y0 rectB.x1,y1
  106. def AreTwoRectAreaIntersect(rectA,rectB):
  107. # rectA is samller
  108. if rectA['x1']>=rectB['x0'] and rectA['x1']<=rectB['x1'] and rectA['y1']>=rectB['y0'] and rectA['y1']<=rectB['y1']:
  109. return True
  110. if rectB['x1']>=rectA['x0'] and rectB['x1']<=rectA['x1'] and rectB['y1']<=rectA['y0'] and rectB['y1']>=rectA['y1']:
  111. return True
  112. if rectA['x0']>=rectB['x0'] and rectA['x0']<=rectB['x1'] and rectA['y1']>=rectB['y0'] and rectA['y1']<=rectB['y1']:
  113. return True
  114. if rectB['x0']>=rectA['x0'] and rectB['x0']<=rectA['x1'] and rectB['y1']>=rectA['y0'] and rectB['y1']<=rectA['y1']:
  115. return True
  116. return False
  117. root = Tk()
  118. #hide this base window
  119. root.withdraw()
  120. onDrawingRect = False
  121. rectangles = {'':[]}
  122. ref_point = []
  123. def shape_selection(event, x, y, flags, param):
  124. # grab references to the global variables
  125. global ref_point,onDrawingRect
  126. # if the left mouse button was clicked, record the starting
  127. # (x, y) coordinates and indicate that cropping is being performed
  128. if event == cv2.EVENT_LBUTTONDOWN:
  129. #print('mouseDown')
  130. onDrawingRect = True
  131. ref_point = [(x, y)]
  132. # check to see if the left mouse button was released
  133. elif event == cv2.EVENT_LBUTTONUP:
  134. onDrawingRect = False
  135. if x - ref_point[0][0] <= 20:
  136. print('area is too small')
  137. ref_point.clear()
  138. return
  139. print('mouse is up, wait for input areaName')
  140. #d = AskForInputAreaNameDialogue(root)
  141. areaName = simpledialog.askstring("Input","pls input the area name",parent=root)
  142. if not areaName:
  143. print('cancelled input areaName')
  144. ref_point.clear()
  145. return
  146. ref_point.append((x, y))
  147. rectangles[areaName] = ref_point
  148. print('areaName collected: '+str(areaName))
  149. elif event == cv2.EVENT_MOUSEMOVE:
  150. #print('mouseUp')
  151. # record the ending (x, y) coordinates and indicate that
  152. # the cropping operation is finished
  153. if onDrawingRect:
  154. ref_point.append((x, y))
  155. # load the object detection network
  156. net = jetson.inference.detectNet(opt.network, sys.argv, opt.threshold)
  157. # create video sources & outputs
  158. input = jetson.utils.videoSource(opt.input_URI, argv=sys.argv)
  159. output = jetson.utils.videoOutput(opt.output_URI, argv=sys.argv+is_headless)
  160. cv2WindowName = 'Camera: '+opt.input_URI+' | '+opt.network
  161. cv2.namedWindow(cv2WindowName)
  162. # process frames until the user exits
  163. while True:
  164. cv2.setMouseCallback(cv2WindowName, shape_selection)
  165. cuda_img = input.Capture()
  166. # detect objects in the image (with overlay)
  167. detections = net.Detect(cuda_img, overlay=opt.overlay)
  168. jetson.utils.cudaDeviceSynchronize()
  169. #print(cuda_img)
  170. cv_img_rgb = jetson.utils.cudaToNumpy(cuda_img)
  171. cv_img_bgr = cv2.cvtColor(cv_img_rgb, cv2.COLOR_RGB2BGR)
  172. for det in detections:
  173. #print(det)
  174. for rec_name, rec_points in rectangles.items():
  175. if rec_name =='':
  176. continue
  177. rectA = {'x0':det.Left,'y0':det.Top,'x1':det.Left+det.Width,'y1':det.Top+det.Height}
  178. rectB = {'x0':rec_points[0][0],'y0':rec_points[0][1],'x1':rec_points[-1][0],'y1':rec_points[-1][1]}
  179. if AreTwoRectAreaIntersect(rectA,rectB):
  180. print('Area: '+rec_name+' has Intruder: '+net.GetClassDesc(det.ClassID)+' with confidence: '+str(det.Confidence))#.ClassID)+' at area: '+rec_name)
  181. if len(reportServicePath)>1:
  182. detectionData = {"AreaName":rec_name,"ClassID":det.ClassID,"ClassName":net.GetClassDesc(det.ClassID),"Confidence":det.Confidence}
  183. queuePendingToSendToRemote.put(detectionData)
  184. if len(ref_point)>=2:
  185. #print('drawing rect...')
  186. cv2.rectangle(cv_img_bgr, ref_point[0], ref_point[-1], (255, 0, 255), 2)
  187. #print(' left-top: '+str(ref_point[0])+', right-bot: '+str(ref_point[-1]))
  188. for rec_name, rec_points in rectangles.items():
  189. if rec_name =='':
  190. continue
  191. cv2.rectangle(cv_img_bgr, rec_points[0], rec_points[-1], (255,0,0), 2)
  192. cv2.putText(cv_img_bgr, rec_name,(rec_points[0][0]+5,rec_points[0][1]+25),cv2.FONT_HERSHEY_SIMPLEX,1,(255,155,155),2)
  193. try:
  194. #show perf counters
  195. cv2.putText(cv_img_bgr, "{:.0f} FPS".format(net.GetNetworkFPS()),(5,18),cv2.FONT_HERSHEY_SIMPLEX,0.6,(51,255,255),1)
  196. cv2.imshow(cv2WindowName,cv_img_bgr)
  197. except:
  198. print('cv2 imshow got an error.')
  199. key = cv2.waitKey(1) & 0xFF
  200. if key == ord('q'):
  201. print('user input key: q which will terminate the whole process...')
  202. break
  203. # exit on input/output EOS
  204. if not input.IsStreaming() or not output.IsStreaming():
  205. break
  206. is_process_exiting = True
  207. cv2.destroyAllWindows()
  208. timer_ReportToRemoteApp.cancel()